hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3c05f46d1103a46be34acca8a814f4ccb0fb05e6
| 92
|
py
|
Python
|
aws_clutter/clutter/__init__.py
|
cloudkeep-io/detached-ebs-cost-monitor
|
7f5bf1f6527169ac9590c2910a1fd63484d681a7
|
[
"MIT"
] | 17
|
2021-12-11T21:32:59.000Z
|
2022-03-08T03:46:55.000Z
|
aws_clutter/clutter/__init__.py
|
cloudkeep-io/detached-ebs-cost-monitor
|
7f5bf1f6527169ac9590c2910a1fd63484d681a7
|
[
"MIT"
] | null | null | null |
aws_clutter/clutter/__init__.py
|
cloudkeep-io/detached-ebs-cost-monitor
|
7f5bf1f6527169ac9590c2910a1fd63484d681a7
|
[
"MIT"
] | 1
|
2022-02-11T03:57:24.000Z
|
2022-02-11T03:57:24.000Z
|
from .debs import query, summarize, aggregate
from .ulbs import query, summarize, aggregate
| 30.666667
| 45
| 0.804348
| 12
| 92
| 6.166667
| 0.583333
| 0.297297
| 0.540541
| 0.783784
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 92
| 2
| 46
| 46
| 0.925
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
b1dc8579c3c529bfc665fa11339ca1426648b030
| 2,524
|
py
|
Python
|
findy/database/schema/misc/money_flow.py
|
doncat99/FinanceDataCenter
|
1538c8347ed5bff9a99a3cca07507a7605108124
|
[
"MIT"
] | null | null | null |
findy/database/schema/misc/money_flow.py
|
doncat99/FinanceDataCenter
|
1538c8347ed5bff9a99a3cca07507a7605108124
|
[
"MIT"
] | null | null | null |
findy/database/schema/misc/money_flow.py
|
doncat99/FinanceDataCenter
|
1538c8347ed5bff9a99a3cca07507a7605108124
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from sqlalchemy import Column, String, Float
from sqlalchemy.ext.declarative import declarative_base
from findy.database.schema.datatype import Mixin
MoneyFlowBase = declarative_base()
# 板块资金流向
class BlockMoneyFlow(MoneyFlowBase, Mixin):
__tablename__ = 'block_money_flow'
code = Column(String(length=32))
name = Column(String(length=256))
# 收盘价
close = Column(Float)
change_pct = Column(Float)
turnover_rate = Column(Float)
# 净流入
net_inflows = Column(Float)
# 净流入率
net_inflow_rate = Column(Float)
# 主力=超大单+大单
net_main_inflows = Column(Float)
net_main_inflow_rate = Column(Float)
# 超大单
net_huge_inflows = Column(Float)
net_huge_inflow_rate = Column(Float)
# 大单
net_big_inflows = Column(Float)
net_big_inflow_rate = Column(Float)
# 中单
net_medium_inflows = Column(Float)
net_medium_inflow_rate = Column(Float)
# 小单
net_small_inflows = Column(Float)
net_small_inflow_rate = Column(Float)
class StockMoneyFlow(MoneyFlowBase, Mixin):
__tablename__ = 'stock_money_flow'
code = Column(String(length=32))
name = Column(String(length=256))
# 收盘价
close = Column(Float)
change_pct = Column(Float)
turnover_rate = Column(Float)
# 净流入
net_inflows = Column(Float)
# 净流入率
net_inflow_rate = Column(Float)
# 主力=超大单+大单
net_main_inflows = Column(Float)
net_main_inflow_rate = Column(Float)
# 超大单
net_huge_inflows = Column(Float)
net_huge_inflow_rate = Column(Float)
# 大单
net_big_inflows = Column(Float)
net_big_inflow_rate = Column(Float)
# 中单
net_medium_inflows = Column(Float)
net_medium_inflow_rate = Column(Float)
# 小单
net_small_inflows = Column(Float)
net_small_inflow_rate = Column(Float)
class IndexMoneyFlow(MoneyFlowBase, Mixin):
__tablename__ = 'index_money_flow'
code = Column(String(length=32))
name = Column(String(length=256))
# 净流入
net_inflows = Column(Float)
# 净流入率
net_inflow_rate = Column(Float)
# 主力=超大单+大单
net_main_inflows = Column(Float)
net_main_inflow_rate = Column(Float)
# 超大单
net_huge_inflows = Column(Float)
net_huge_inflow_rate = Column(Float)
# 大单
net_big_inflows = Column(Float)
net_big_inflow_rate = Column(Float)
# 中单
net_medium_inflows = Column(Float)
net_medium_inflow_rate = Column(Float)
# 小单
net_small_inflows = Column(Float)
net_small_inflow_rate = Column(Float)
| 23.811321
| 55
| 0.690174
| 324
| 2,524
| 5.04321
| 0.169753
| 0.282742
| 0.183599
| 0.231334
| 0.807222
| 0.807222
| 0.807222
| 0.807222
| 0.807222
| 0.807222
| 0
| 0.008105
| 0.217908
| 2,524
| 105
| 56
| 24.038095
| 0.819656
| 0.052298
| 0
| 0.827586
| 0
| 0
| 0.020287
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.051724
| 0
| 0.982759
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
b1f41b25e79ca92fdc9630773c351102eed135e3
| 24,003
|
py
|
Python
|
cogs/stations.py
|
akarealemil/trainbot
|
c58c191bcb5e0f576ea70b7d7d5a40595636311c
|
[
"BSD-3-Clause"
] | null | null | null |
cogs/stations.py
|
akarealemil/trainbot
|
c58c191bcb5e0f576ea70b7d7d5a40595636311c
|
[
"BSD-3-Clause"
] | 1
|
2021-10-02T20:22:00.000Z
|
2021-10-02T20:22:00.000Z
|
cogs/stations.py
|
akarealemil/trainbot
|
c58c191bcb5e0f576ea70b7d7d5a40595636311c
|
[
"BSD-3-Clause"
] | null | null | null |
import io
import time
import typing
import base64
import binascii
import re
import datetime
from datetime import datetime
from urllib.parse import quote as urlquote
import psutil
import os
import asyncio
import csv
import requests
import sys
sys.dont_write_bytecode = True
from utils import *
import discord
from discord.ext import commands
from discord.ext.commands.cooldowns import BucketType
class Stations(commands.Cog):
def __init__(self, bot):
self.bot = bot
guilds = len(list(bot.guilds))
@commands.command(name='station', aliases=['stations', 'stationz'])
@commands.cooldown(1,5,type=BucketType.user)
async def station_command(self, ctx, *, stationinput = None):
if ctx.author == self.bot.user:
return
now = datetime.now()
dt_string = now.strftime("%H:%M")
auth = ('rttapi_ayyitsemil', 'bf27d67e6f7057bc9e89c42c7d72cf1051ad4a06')
def check50(m):
return ctx.author == m.author
if stationinput is None:
embedmain1 = discord.Embed(title="Welcome", description="""Welcome to the Station Information Control. You selected this to display information regarding a given station. Firstly, provide me with a station. Make sure it's not closed!""", colour=discord.Colour.from_rgb(255,20,147))
embedmain1.add_field(name="Notice", value="Please enter a station name, for example 'London Euston' or the CRS Code, for example EUS. If you enter it incorrectly, it may give an error!")
mainembed1 = await ctx.channel.send(embed=embedmain1)
try:
msg = await self.bot.wait_for('message', timeout=60.0, check=check50)
msg = msg.content.lower()
with open('TS.csv', "r") as tlist:
stlist = csv.reader(tlist)
for row in stlist:
found = False
if msg in (row[0].lower(), row[1].lower()):
found = True
dept = f'https://api.rtt.io/api/v1/json/search/{row[1]}'
arri = f'https://api.rtt.io/api/v1/json/search/{row[1]}/arrivals'
embedmain2 = discord.Embed(title="Welcome", description="""Thank you for providing the Station Name. I've retrieved the station, please select one of these options""", colour=discord.Colour.from_rgb(255,20,147))
embedmain2.add_field(name=":watch: ARRIVALS", value="Select this if you want ARRIVALS to display", inline=False)
embedmain2.add_field(name=":timer: DEPARTURES", value="Select this if you want DEPARTURES to display", inline=False)
embedmain2.add_field(name=":stopwatch: BOTH", value="Select this if you want BOTH (depatures and arrivals) to display", inline=False)
embedmain2.add_field(name="WARNING", value="There may be a maximum of 5 depatures OR arrivals posted (Maximum of 10 embeds)", inline=False)
embedmain2.add_field(name="Notice", value="For smaller stations, Arrivals and Departures may show the SAME information. For larger stations it may be different as the trains arriving may be terminating there, whilst departing trains may be going on further!")
mainembed = await ctx.send(embed=embedmain2)
emojis = ['⌚', '⏲️', '⏱️']
for each_emoji in emojis:
await mainembed.add_reaction(each_emoji)
def check2(reaction, user):
return reaction.message.id == mainembed.id and user == ctx.author
try:
reaction, user = await self.bot.wait_for('reaction_add', timeout=60.0, check=check2)
except asyncio.TimeoutError:
embederror = discord.Embed(title="CANCELLED", description=f"Your attention please. We are sorry that the {dt_string}, Train Information Bot service to Destination/Arrival Input has been cancelled. This is due to your lack of emoji choice. The Developer apologises for the inconvenience this may cause you.", colour=discord.Colour.from_rgb(255,20,147))
await ctx.channel.send(embed=embederror)
else:
try:
with requests.Session() as s:
depar = s.get(dept, auth=auth)
obtaindata = depar.json()
arriv = s.get(arri, auth=auth)
arrivdata = arriv.json()
for i in range(1,4):
toc = obtaindata["services"][i]['atocName']
deptime = obtaindata["services"][i]['locationDetail']['gbttBookedDeparture']
accdeptime = obtaindata["services"][i]['locationDetail']['realtimeDeparture']
destin = obtaindata["services"][i]['locationDetail']['destination'][0]["description"]
destatime = obtaindata["services"][i]['locationDetail']['destination'][0]['publicTime']
try:
status = obtaindata["services"][i]['locationDetail']['serviceLocation']
except (KeyError):
status = "Not Confirmed"
toc2 = arrivdata["services"][i]['atocName']
origin = arrivdata["services"][i]['locationDetail']['origin'][0]["description"]
odeptime = arrivdata["services"][i]['locationDetail']['origin'][0]["publicTime"]
destatime2 = arrivdata["services"][i]['locationDetail']['destination'][0]['publicTime']
if str(reaction.emoji) == '⌚':
aembed2=discord.Embed(title="ARRIVALS", description=f"", colour=discord.Colour.from_rgb(255,20,147))
aembed2.set_author(name=f"STATION ARRIVALS | Station {msg}")
aembed2.add_field(name="TOC (Train Operating Company)", value=f"{toc2}", inline=True)
aembed2.add_field(name="Arrival Time", value=f"{destatime2}", inline=True)
aembed2.add_field(name="Origin", value=f"{origin}", inline=True)
aembed2.add_field(name="Origin Departure Time", value=f"{odeptime}", inline=True)
aembed2.set_footer(text='Bot developed by Emil#0581')
await ctx.send(embed=aembed2)
elif str(reaction.emoji) == '⏲️':
embed2=discord.Embed(title="DEPATURES", description=f"", colour=discord.Colour.from_rgb(255,20,147))
embed2.set_author(name=f"STATION DEPARTURES | Station {msg}")
embed2.add_field(name="TOC (Train Operating Company)", value=f"{toc}", inline=True)
embed2.add_field(name="Departure Time", value=f"{deptime}", inline=True)
embed2.add_field(name="Destination", value=f"{destin}", inline=True)
embed2.add_field(name="Destination Arrival Time", value=f"{destatime}", inline=True)
embed2.add_field(name="Actual Departure Time", value=f"{accdeptime}", inline=True)
embed2.add_field(name="Status", value=f"{status}", inline=True)
embed2.set_footer(text='Bot developed by Emil#0581')
await ctx.send(embed=embed2)
elif str(reaction.emoji) == '⏱️':
aembed2=discord.Embed(title="ARRIVALS", description=f"", colour=discord.Colour.from_rgb(255,20,147))
aembed2.set_author(name=f"STATION ARRIVALS | Station {msg}")
aembed2.add_field(name="TOC (Train Operating Company)", value=f"{toc2}", inline=True)
aembed2.add_field(name="Arrival Time", value=f"{destatime2}", inline=True)
aembed2.add_field(name="Origin", value=f"{origin}", inline=True)
aembed2.add_field(name="Origin Departure Time", value=f"{odeptime}", inline=True)
aembed2.set_footer(text='Bot developed by Emil#0581')
await ctx.send(embed=aembed2)
dembed2=discord.Embed(title="DEPATURES", description=f"", colour=discord.Colour.from_rgb(255,20,147))
dembed2.set_author(name=f"STATION DEPARTURES | Station {msg}")
dembed2.add_field(name="TOC (Train Operating Company)", value=f"{toc}", inline=True)
dembed2.add_field(name="Departure Time", value=f"{deptime}", inline=True)
dembed2.add_field(name="Destination", value=f"{destin}", inline=True)
dembed2.add_field(name="Destination Arrival Time", value=f"{destatime}", inline=True)
dembed2.add_field(name="Actual Departure Time", value=f"{accdeptime}", inline=True)
dembed2.add_field(name="Status", value=f"{status}", inline=True)
dembed2.set_footer(text='Bot developed by Emil#0581')
await ctx.send(embed=dembed2)
except (ConnectionError, TimeoutError):
await ctx.send("If you are seeing this error, either one of two things has happened: API is down on RealTrainTimes, NetworkRail or similar. OR there's no more trains on this day and you'd need to wait until morning for it to update. Information is obtained from NR API, so it's whenever they update it")
time.sleep(600)
except Exception as e:
await ctx.send(e)
await ctx.send("Please report this to the owner using tb!suggestion error, and include your discord invite link!")
break
else:
await ctx.send("This station could not be found. Retry the command.")
except asyncio.TimeoutError:
embederror2 = discord.Embed(title="CANCELLED", description=f"Your attention please. We are sorry that the {dt_string}, Train Information Bot service to Station Name Input has been cancelled. This is due to your lack of response. The Developer apologises for the inconvenience this may cause you.", colour=discord.Colour.from_rgb(255,20,147))
await ctx.send(embed=embederror2)
else:
stationinput = stationinput.lower()
with open('TS.csv', "r") as tlist:
stlist = csv.reader(tlist)
for row in stlist:
if stationinput in (row[0].lower(), row[1].lower()):
dept = f'https://api.rtt.io/api/v1/json/search/{row[1]}'
arri = f'https://api.rtt.io/api/v1/json/search/{row[1]}/arrivals'
embedmain2 = discord.Embed(title="Welcome", description="""Thank you for providing the Station Name. I've retrieved the station, please select one of these options""", colour=discord.Colour.from_rgb(255,20,147))
embedmain2.add_field(name=":watch: ARRIVALS", value="Select this if you want ARRIVALS to display", inline=False)
embedmain2.add_field(name=":timer: DEPARTURES", value="Select this if you want DEPARTURES to display", inline=False)
embedmain2.add_field(name=":stopwatch: BOTH", value="Select this if you want BOTH (depatures and arrivals) to display", inline=False)
embedmain2.add_field(name="WARNING", value="There may be a maximum of 5 depatures OR arrivals posted (Maximum of 10 embeds)", inline=False)
embedmain2.add_field(name="Notice", value="For smaller stations, Arrivals and Departures may show the SAME information. For larger stations it may be different as the trains arriving may be terminating there, whilst departing trains may be going on further!")
mainembed = await ctx.send(embed=embedmain2)
emojis = ['⌚', '⏲️', '⏱️']
for each_emoji in emojis:
await mainembed.add_reaction(each_emoji)
def check2(reaction, user):
return reaction.message.id == mainembed.id and user == ctx.author
try:
reaction, user = await self.bot.wait_for('reaction_add', timeout=60.0, check=check2)
except asyncio.TimeoutError:
embederror = discord.Embed(title="CANCELLED", description=f"Your attention please. We are sorry that the {dt_string}, Train Information Bot service to Destination/Arrival Input has been cancelled. This is due to your lack of emoji choice. The Developer apologises for the inconvenience this may cause you.", colour=discord.Colour.from_rgb(255,20,147))
await ctx.channel.send(embed=embederror)
else:
try:
with requests.Session() as s:
depar = s.get(dept, auth=auth)
obtaindata = depar.json()
arriv = s.get(arri, auth=auth)
arrivdata = arriv.json()
for i in range(1,4):
toc = obtaindata["services"][i]['atocName']
deptime = obtaindata["services"][i]['locationDetail']['gbttBookedDeparture']
accdeptime = obtaindata["services"][i]['locationDetail']['realtimeDeparture']
destin = obtaindata["services"][i]['locationDetail']['destination'][0]["description"]
destatime = obtaindata["services"][i]['locationDetail']['destination'][0]['publicTime']
try:
status = obtaindata["services"][i]['locationDetail']['serviceLocation']
except (KeyError):
status = "Not Confirmed"
toc2 = arrivdata["services"][i]['atocName']
origin = arrivdata["services"][i]['locationDetail']['origin'][0]["description"]
odeptime = arrivdata["services"][i]['locationDetail']['origin'][0]["publicTime"]
destatime2 = arrivdata["services"][i]['locationDetail']['destination'][0]['publicTime']
if str(reaction.emoji) == '⌚':
aembed2=discord.Embed(title="ARRIVALS", description=f"", colour=discord.Colour.from_rgb(255,20,147))
aembed2.set_author(name=f"STATION ARRIVALS | Station {stationinput}")
aembed2.add_field(name="TOC (Train Operating Company)", value=f"{toc2}", inline=True)
aembed2.add_field(name="Arrival Time", value=f"{destatime2}", inline=True)
aembed2.add_field(name="Origin", value=f"{origin}", inline=True)
aembed2.add_field(name="Origin Departure Time", value=f"{odeptime}", inline=True)
aembed2.set_footer(text='Bot developed by Emil#0581')
await ctx.send(embed=aembed2)
elif str(reaction.emoji) == '⏲️':
embed2=discord.Embed(title="DEPATURES", description=f"", colour=discord.Colour.from_rgb(255,20,147))
embed2.set_author(name=f"STATION DEPARTURES | Station {stationinput}")
embed2.add_field(name="TOC (Train Operating Company)", value=f"{toc}", inline=True)
embed2.add_field(name="Departure Time", value=f"{deptime}", inline=True)
embed2.add_field(name="Destination", value=f"{destin}", inline=True)
embed2.add_field(name="Destination Arrival Time", value=f"{destatime}", inline=True)
embed2.add_field(name="Actual Departure Time", value=f"{accdeptime}", inline=True)
embed2.add_field(name="Status", value=f"{status}", inline=True)
embed2.set_footer(text='Bot developed by Emil#0581')
await ctx.send(embed=embed2)
elif str(reaction.emoji) == '⏱️':
aembed2=discord.Embed(title="ARRIVALS", description=f"", colour=discord.Colour.from_rgb(255,20,147))
aembed2.set_author(name=f"STATION ARRIVALS | Station {stationinput}")
aembed2.add_field(name="TOC (Train Operating Company)", value=f"{toc2}", inline=True)
aembed2.add_field(name="Arrival Time", value=f"{destatime2}", inline=True)
aembed2.add_field(name="Origin", value=f"{origin}", inline=True)
aembed2.add_field(name="Origin Departure Time", value=f"{odeptime}", inline=True)
aembed2.set_footer(text='Bot developed by Emil#0581')
await ctx.send(embed=aembed2)
dembed2=discord.Embed(title="DEPATURES", description=f"", colour=discord.Colour.from_rgb(255,20,147))
dembed2.set_author(name=f"STATION DEPARTURES | Station {stationinput}")
dembed2.add_field(name="TOC (Train Operating Company)", value=f"{toc}", inline=True)
dembed2.add_field(name="Departure Time", value=f"{deptime}", inline=True)
dembed2.add_field(name="Destination", value=f"{destin}", inline=True)
dembed2.add_field(name="Destination Arrival Time", value=f"{destatime}", inline=True)
dembed2.add_field(name="Actual Departure Time", value=f"{accdeptime}", inline=True)
dembed2.add_field(name="Status", value=f"{status}", inline=True)
dembed2.set_footer(text='Bot developed by Emil#0581')
await ctx.send(embed=dembed2)
except (ConnectionError, TimeoutError):
await ctx.send("If you are seeing this error, either one of two things has happened: API is down on RealTrainTimes, NetworkRail or similar. OR there's no more trains on this day and you'd need to wait until morning for it to update. Information is obtained from NR API, so it's whenever they update it")
time.sleep(600)
except Exception as e:
print(e)
await ctx.send("If you are seeing this error, the API is down.")
break
else:
await ctx.send("This station could not be found. Retry the command.")
@commands.command(name='name', aliases=['stationconvert', 'stationcrs', 'CRS', 'crscode', 'crsstation'])
@commands.cooldown(1,5,type=BucketType.user)
async def name_command(self, ctx, *, station = None):
if ctx.author == self.bot.user:
return
now = datetime.now()
dt_string = now.strftime("%H:%M")
if station is None:
embedmain1 = discord.Embed(title="Welcome", description="""Welcome to the Station <-> CRS. You selected this to obtain a CRS code from a station, or a station from a CRS code""", colour=discord.Colour.from_rgb(255,20,147))
mainembed1 = await ctx.channel.send(embed=embedmain1)
def check50(m):
return ctx.author == m.author
msg = await self.bot.wait_for('message', timeout=60.0, check=check50)
msg = msg.content.lower()
with open('TS.csv', "r") as tlist:
stlist = csv.reader(tlist)
for row in stlist:
if msg in (row[0].lower(), row[1].lower()):
crsembed = discord.Embed(title=f"Station CRS - {row[1]}", description=f"**{row[0]}** is the corresponding station to the inputted CRS Code")
await ctx.send(embed=crsembed)
break
else:
await ctx.send('This station is not apart of the list.')
else:
station = station.lower()
with open('TS.csv', "r") as tlist:
stlist = csv.reader(tlist)
for row in stlist:
if msg in (row[0].lower(), row[1].lower()):
crsembed = discord.Embed(title=f"Station CRS - {row[1]}", description=f"**{row[0]}** is the corresponding station to the inputted CRS Code")
await ctx.send(embed=crsembed)
break
else:
await ctx.send('This station is not apart of the list.')
def setup(bot):
bot.add_cog(Stations(bot))
| 69.172911
| 383
| 0.500271
| 2,376
| 24,003
| 5.016414
| 0.135522
| 0.034231
| 0.051347
| 0.025506
| 0.892273
| 0.890343
| 0.890343
| 0.881282
| 0.867522
| 0.855105
| 0
| 0.024927
| 0.399992
| 24,003
| 346
| 384
| 69.372832
| 0.801278
| 0
| 0
| 0.809701
| 0
| 0.052239
| 0.262176
| 0.001666
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022388
| false
| 0
| 0.070896
| 0.014925
| 0.119403
| 0.003731
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5923f600fb5384c6d60b88c7c62be3f2a8a75036
| 28,039
|
py
|
Python
|
sdk/python/pulumi_gcp/compute/network_peering.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_gcp/compute/network_peering.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_gcp/compute/network_peering.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['NetworkPeeringArgs', 'NetworkPeering']
@pulumi.input_type
class NetworkPeeringArgs:
def __init__(__self__, *,
network: pulumi.Input[str],
peer_network: pulumi.Input[str],
export_custom_routes: Optional[pulumi.Input[bool]] = None,
export_subnet_routes_with_public_ip: Optional[pulumi.Input[bool]] = None,
import_custom_routes: Optional[pulumi.Input[bool]] = None,
import_subnet_routes_with_public_ip: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a NetworkPeering resource.
:param pulumi.Input[str] network: The primary network of the peering.
:param pulumi.Input[str] peer_network: The peer network in the peering. The peer network
may belong to a different project.
:param pulumi.Input[bool] export_custom_routes: Whether to export the custom routes to the peer network. Defaults to `false`.
:param pulumi.Input[bool] export_subnet_routes_with_public_ip: Whether subnet routes with public IP range are exported. The default value is true, all subnet routes are exported. The IPv4 special-use ranges (https://en.wikipedia.org/wiki/IPv4#Special_addresses) are always exported to peers and are not controlled by this field.
:param pulumi.Input[bool] import_custom_routes: Whether to import the custom routes from the peer network. Defaults to `false`.
:param pulumi.Input[bool] import_subnet_routes_with_public_ip: Whether subnet routes with public IP range are imported. The default value is false. The IPv4 special-use ranges (https://en.wikipedia.org/wiki/IPv4#Special_addresses) are always imported from peers and are not controlled by this field.
:param pulumi.Input[str] name: Name of the peering.
"""
pulumi.set(__self__, "network", network)
pulumi.set(__self__, "peer_network", peer_network)
if export_custom_routes is not None:
pulumi.set(__self__, "export_custom_routes", export_custom_routes)
if export_subnet_routes_with_public_ip is not None:
pulumi.set(__self__, "export_subnet_routes_with_public_ip", export_subnet_routes_with_public_ip)
if import_custom_routes is not None:
pulumi.set(__self__, "import_custom_routes", import_custom_routes)
if import_subnet_routes_with_public_ip is not None:
pulumi.set(__self__, "import_subnet_routes_with_public_ip", import_subnet_routes_with_public_ip)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def network(self) -> pulumi.Input[str]:
"""
The primary network of the peering.
"""
return pulumi.get(self, "network")
@network.setter
def network(self, value: pulumi.Input[str]):
pulumi.set(self, "network", value)
@property
@pulumi.getter(name="peerNetwork")
def peer_network(self) -> pulumi.Input[str]:
"""
The peer network in the peering. The peer network
may belong to a different project.
"""
return pulumi.get(self, "peer_network")
@peer_network.setter
def peer_network(self, value: pulumi.Input[str]):
pulumi.set(self, "peer_network", value)
@property
@pulumi.getter(name="exportCustomRoutes")
def export_custom_routes(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to export the custom routes to the peer network. Defaults to `false`.
"""
return pulumi.get(self, "export_custom_routes")
@export_custom_routes.setter
def export_custom_routes(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "export_custom_routes", value)
@property
@pulumi.getter(name="exportSubnetRoutesWithPublicIp")
def export_subnet_routes_with_public_ip(self) -> Optional[pulumi.Input[bool]]:
"""
Whether subnet routes with public IP range are exported. The default value is true, all subnet routes are exported. The IPv4 special-use ranges (https://en.wikipedia.org/wiki/IPv4#Special_addresses) are always exported to peers and are not controlled by this field.
"""
return pulumi.get(self, "export_subnet_routes_with_public_ip")
@export_subnet_routes_with_public_ip.setter
def export_subnet_routes_with_public_ip(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "export_subnet_routes_with_public_ip", value)
@property
@pulumi.getter(name="importCustomRoutes")
def import_custom_routes(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to import the custom routes from the peer network. Defaults to `false`.
"""
return pulumi.get(self, "import_custom_routes")
@import_custom_routes.setter
def import_custom_routes(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "import_custom_routes", value)
@property
@pulumi.getter(name="importSubnetRoutesWithPublicIp")
def import_subnet_routes_with_public_ip(self) -> Optional[pulumi.Input[bool]]:
"""
Whether subnet routes with public IP range are imported. The default value is false. The IPv4 special-use ranges (https://en.wikipedia.org/wiki/IPv4#Special_addresses) are always imported from peers and are not controlled by this field.
"""
return pulumi.get(self, "import_subnet_routes_with_public_ip")
@import_subnet_routes_with_public_ip.setter
def import_subnet_routes_with_public_ip(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "import_subnet_routes_with_public_ip", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the peering.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class _NetworkPeeringState:
def __init__(__self__, *,
export_custom_routes: Optional[pulumi.Input[bool]] = None,
export_subnet_routes_with_public_ip: Optional[pulumi.Input[bool]] = None,
import_custom_routes: Optional[pulumi.Input[bool]] = None,
import_subnet_routes_with_public_ip: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
network: Optional[pulumi.Input[str]] = None,
peer_network: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
state_details: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering NetworkPeering resources.
:param pulumi.Input[bool] export_custom_routes: Whether to export the custom routes to the peer network. Defaults to `false`.
:param pulumi.Input[bool] export_subnet_routes_with_public_ip: Whether subnet routes with public IP range are exported. The default value is true, all subnet routes are exported. The IPv4 special-use ranges (https://en.wikipedia.org/wiki/IPv4#Special_addresses) are always exported to peers and are not controlled by this field.
:param pulumi.Input[bool] import_custom_routes: Whether to import the custom routes from the peer network. Defaults to `false`.
:param pulumi.Input[bool] import_subnet_routes_with_public_ip: Whether subnet routes with public IP range are imported. The default value is false. The IPv4 special-use ranges (https://en.wikipedia.org/wiki/IPv4#Special_addresses) are always imported from peers and are not controlled by this field.
:param pulumi.Input[str] name: Name of the peering.
:param pulumi.Input[str] network: The primary network of the peering.
:param pulumi.Input[str] peer_network: The peer network in the peering. The peer network
may belong to a different project.
:param pulumi.Input[str] state: State for the peering, either `ACTIVE` or `INACTIVE`. The peering is
`ACTIVE` when there's a matching configuration in the peer network.
:param pulumi.Input[str] state_details: Details about the current state of the peering.
"""
if export_custom_routes is not None:
pulumi.set(__self__, "export_custom_routes", export_custom_routes)
if export_subnet_routes_with_public_ip is not None:
pulumi.set(__self__, "export_subnet_routes_with_public_ip", export_subnet_routes_with_public_ip)
if import_custom_routes is not None:
pulumi.set(__self__, "import_custom_routes", import_custom_routes)
if import_subnet_routes_with_public_ip is not None:
pulumi.set(__self__, "import_subnet_routes_with_public_ip", import_subnet_routes_with_public_ip)
if name is not None:
pulumi.set(__self__, "name", name)
if network is not None:
pulumi.set(__self__, "network", network)
if peer_network is not None:
pulumi.set(__self__, "peer_network", peer_network)
if state is not None:
pulumi.set(__self__, "state", state)
if state_details is not None:
pulumi.set(__self__, "state_details", state_details)
@property
@pulumi.getter(name="exportCustomRoutes")
def export_custom_routes(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to export the custom routes to the peer network. Defaults to `false`.
"""
return pulumi.get(self, "export_custom_routes")
@export_custom_routes.setter
def export_custom_routes(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "export_custom_routes", value)
@property
@pulumi.getter(name="exportSubnetRoutesWithPublicIp")
def export_subnet_routes_with_public_ip(self) -> Optional[pulumi.Input[bool]]:
"""
Whether subnet routes with public IP range are exported. The default value is true, all subnet routes are exported. The IPv4 special-use ranges (https://en.wikipedia.org/wiki/IPv4#Special_addresses) are always exported to peers and are not controlled by this field.
"""
return pulumi.get(self, "export_subnet_routes_with_public_ip")
@export_subnet_routes_with_public_ip.setter
def export_subnet_routes_with_public_ip(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "export_subnet_routes_with_public_ip", value)
@property
@pulumi.getter(name="importCustomRoutes")
def import_custom_routes(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to import the custom routes from the peer network. Defaults to `false`.
"""
return pulumi.get(self, "import_custom_routes")
@import_custom_routes.setter
def import_custom_routes(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "import_custom_routes", value)
@property
@pulumi.getter(name="importSubnetRoutesWithPublicIp")
def import_subnet_routes_with_public_ip(self) -> Optional[pulumi.Input[bool]]:
"""
Whether subnet routes with public IP range are imported. The default value is false. The IPv4 special-use ranges (https://en.wikipedia.org/wiki/IPv4#Special_addresses) are always imported from peers and are not controlled by this field.
"""
return pulumi.get(self, "import_subnet_routes_with_public_ip")
@import_subnet_routes_with_public_ip.setter
def import_subnet_routes_with_public_ip(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "import_subnet_routes_with_public_ip", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the peering.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def network(self) -> Optional[pulumi.Input[str]]:
"""
The primary network of the peering.
"""
return pulumi.get(self, "network")
@network.setter
def network(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "network", value)
@property
@pulumi.getter(name="peerNetwork")
def peer_network(self) -> Optional[pulumi.Input[str]]:
"""
The peer network in the peering. The peer network
may belong to a different project.
"""
return pulumi.get(self, "peer_network")
@peer_network.setter
def peer_network(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "peer_network", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
"""
State for the peering, either `ACTIVE` or `INACTIVE`. The peering is
`ACTIVE` when there's a matching configuration in the peer network.
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
@property
@pulumi.getter(name="stateDetails")
def state_details(self) -> Optional[pulumi.Input[str]]:
"""
Details about the current state of the peering.
"""
return pulumi.get(self, "state_details")
@state_details.setter
def state_details(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state_details", value)
class NetworkPeering(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
export_custom_routes: Optional[pulumi.Input[bool]] = None,
export_subnet_routes_with_public_ip: Optional[pulumi.Input[bool]] = None,
import_custom_routes: Optional[pulumi.Input[bool]] = None,
import_subnet_routes_with_public_ip: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
network: Optional[pulumi.Input[str]] = None,
peer_network: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a network peering within GCE. For more information see
[the official documentation](https://cloud.google.com/compute/docs/vpc/vpc-peering)
and
[API](https://cloud.google.com/compute/docs/reference/latest/networks).
> Both network must create a peering with each other for the peering
to be functional.
> Subnets IP ranges across peered VPC networks cannot overlap.
## Example Usage
```python
import pulumi
import pulumi_gcp as gcp
default = gcp.compute.Network("default", auto_create_subnetworks=False)
other = gcp.compute.Network("other", auto_create_subnetworks=False)
peering1 = gcp.compute.NetworkPeering("peering1",
network=default.id,
peer_network=other.id)
peering2 = gcp.compute.NetworkPeering("peering2",
network=other.id,
peer_network=default.id)
```
## Import
VPC network peerings can be imported using the name and project of the primary network the peering exists in and the name of the network peering
```sh
$ pulumi import gcp:compute/networkPeering:NetworkPeering peering_network project-name/network-name/peering-name
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] export_custom_routes: Whether to export the custom routes to the peer network. Defaults to `false`.
:param pulumi.Input[bool] export_subnet_routes_with_public_ip: Whether subnet routes with public IP range are exported. The default value is true, all subnet routes are exported. The IPv4 special-use ranges (https://en.wikipedia.org/wiki/IPv4#Special_addresses) are always exported to peers and are not controlled by this field.
:param pulumi.Input[bool] import_custom_routes: Whether to import the custom routes from the peer network. Defaults to `false`.
:param pulumi.Input[bool] import_subnet_routes_with_public_ip: Whether subnet routes with public IP range are imported. The default value is false. The IPv4 special-use ranges (https://en.wikipedia.org/wiki/IPv4#Special_addresses) are always imported from peers and are not controlled by this field.
:param pulumi.Input[str] name: Name of the peering.
:param pulumi.Input[str] network: The primary network of the peering.
:param pulumi.Input[str] peer_network: The peer network in the peering. The peer network
may belong to a different project.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: NetworkPeeringArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a network peering within GCE. For more information see
[the official documentation](https://cloud.google.com/compute/docs/vpc/vpc-peering)
and
[API](https://cloud.google.com/compute/docs/reference/latest/networks).
> Both network must create a peering with each other for the peering
to be functional.
> Subnets IP ranges across peered VPC networks cannot overlap.
## Example Usage
```python
import pulumi
import pulumi_gcp as gcp
default = gcp.compute.Network("default", auto_create_subnetworks=False)
other = gcp.compute.Network("other", auto_create_subnetworks=False)
peering1 = gcp.compute.NetworkPeering("peering1",
network=default.id,
peer_network=other.id)
peering2 = gcp.compute.NetworkPeering("peering2",
network=other.id,
peer_network=default.id)
```
## Import
VPC network peerings can be imported using the name and project of the primary network the peering exists in and the name of the network peering
```sh
$ pulumi import gcp:compute/networkPeering:NetworkPeering peering_network project-name/network-name/peering-name
```
:param str resource_name: The name of the resource.
:param NetworkPeeringArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(NetworkPeeringArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
export_custom_routes: Optional[pulumi.Input[bool]] = None,
export_subnet_routes_with_public_ip: Optional[pulumi.Input[bool]] = None,
import_custom_routes: Optional[pulumi.Input[bool]] = None,
import_subnet_routes_with_public_ip: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
network: Optional[pulumi.Input[str]] = None,
peer_network: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = NetworkPeeringArgs.__new__(NetworkPeeringArgs)
__props__.__dict__["export_custom_routes"] = export_custom_routes
__props__.__dict__["export_subnet_routes_with_public_ip"] = export_subnet_routes_with_public_ip
__props__.__dict__["import_custom_routes"] = import_custom_routes
__props__.__dict__["import_subnet_routes_with_public_ip"] = import_subnet_routes_with_public_ip
__props__.__dict__["name"] = name
if network is None and not opts.urn:
raise TypeError("Missing required property 'network'")
__props__.__dict__["network"] = network
if peer_network is None and not opts.urn:
raise TypeError("Missing required property 'peer_network'")
__props__.__dict__["peer_network"] = peer_network
__props__.__dict__["state"] = None
__props__.__dict__["state_details"] = None
super(NetworkPeering, __self__).__init__(
'gcp:compute/networkPeering:NetworkPeering',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
export_custom_routes: Optional[pulumi.Input[bool]] = None,
export_subnet_routes_with_public_ip: Optional[pulumi.Input[bool]] = None,
import_custom_routes: Optional[pulumi.Input[bool]] = None,
import_subnet_routes_with_public_ip: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
network: Optional[pulumi.Input[str]] = None,
peer_network: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
state_details: Optional[pulumi.Input[str]] = None) -> 'NetworkPeering':
"""
Get an existing NetworkPeering resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] export_custom_routes: Whether to export the custom routes to the peer network. Defaults to `false`.
:param pulumi.Input[bool] export_subnet_routes_with_public_ip: Whether subnet routes with public IP range are exported. The default value is true, all subnet routes are exported. The IPv4 special-use ranges (https://en.wikipedia.org/wiki/IPv4#Special_addresses) are always exported to peers and are not controlled by this field.
:param pulumi.Input[bool] import_custom_routes: Whether to import the custom routes from the peer network. Defaults to `false`.
:param pulumi.Input[bool] import_subnet_routes_with_public_ip: Whether subnet routes with public IP range are imported. The default value is false. The IPv4 special-use ranges (https://en.wikipedia.org/wiki/IPv4#Special_addresses) are always imported from peers and are not controlled by this field.
:param pulumi.Input[str] name: Name of the peering.
:param pulumi.Input[str] network: The primary network of the peering.
:param pulumi.Input[str] peer_network: The peer network in the peering. The peer network
may belong to a different project.
:param pulumi.Input[str] state: State for the peering, either `ACTIVE` or `INACTIVE`. The peering is
`ACTIVE` when there's a matching configuration in the peer network.
:param pulumi.Input[str] state_details: Details about the current state of the peering.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _NetworkPeeringState.__new__(_NetworkPeeringState)
__props__.__dict__["export_custom_routes"] = export_custom_routes
__props__.__dict__["export_subnet_routes_with_public_ip"] = export_subnet_routes_with_public_ip
__props__.__dict__["import_custom_routes"] = import_custom_routes
__props__.__dict__["import_subnet_routes_with_public_ip"] = import_subnet_routes_with_public_ip
__props__.__dict__["name"] = name
__props__.__dict__["network"] = network
__props__.__dict__["peer_network"] = peer_network
__props__.__dict__["state"] = state
__props__.__dict__["state_details"] = state_details
return NetworkPeering(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="exportCustomRoutes")
def export_custom_routes(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to export the custom routes to the peer network. Defaults to `false`.
"""
return pulumi.get(self, "export_custom_routes")
@property
@pulumi.getter(name="exportSubnetRoutesWithPublicIp")
def export_subnet_routes_with_public_ip(self) -> pulumi.Output[Optional[bool]]:
"""
Whether subnet routes with public IP range are exported. The default value is true, all subnet routes are exported. The IPv4 special-use ranges (https://en.wikipedia.org/wiki/IPv4#Special_addresses) are always exported to peers and are not controlled by this field.
"""
return pulumi.get(self, "export_subnet_routes_with_public_ip")
@property
@pulumi.getter(name="importCustomRoutes")
def import_custom_routes(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to import the custom routes from the peer network. Defaults to `false`.
"""
return pulumi.get(self, "import_custom_routes")
@property
@pulumi.getter(name="importSubnetRoutesWithPublicIp")
def import_subnet_routes_with_public_ip(self) -> pulumi.Output[Optional[bool]]:
"""
Whether subnet routes with public IP range are imported. The default value is false. The IPv4 special-use ranges (https://en.wikipedia.org/wiki/IPv4#Special_addresses) are always imported from peers and are not controlled by this field.
"""
return pulumi.get(self, "import_subnet_routes_with_public_ip")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the peering.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def network(self) -> pulumi.Output[str]:
"""
The primary network of the peering.
"""
return pulumi.get(self, "network")
@property
@pulumi.getter(name="peerNetwork")
def peer_network(self) -> pulumi.Output[str]:
"""
The peer network in the peering. The peer network
may belong to a different project.
"""
return pulumi.get(self, "peer_network")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
State for the peering, either `ACTIVE` or `INACTIVE`. The peering is
`ACTIVE` when there's a matching configuration in the peer network.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="stateDetails")
def state_details(self) -> pulumi.Output[str]:
"""
Details about the current state of the peering.
"""
return pulumi.get(self, "state_details")
| 49.802842
| 336
| 0.681586
| 3,517
| 28,039
| 5.18766
| 0.0617
| 0.064511
| 0.066648
| 0.091642
| 0.898657
| 0.883091
| 0.875966
| 0.86473
| 0.852836
| 0.844615
| 0
| 0.001708
| 0.227505
| 28,039
| 562
| 337
| 49.891459
| 0.840628
| 0.391597
| 0
| 0.719595
| 1
| 0
| 0.126502
| 0.054674
| 0
| 0
| 0
| 0
| 0
| 1
| 0.162162
| false
| 0.003378
| 0.192568
| 0
| 0.452703
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3ca0e31e84e68cba6105c7fb06b6fd3dc62e6a8d
| 29,261
|
py
|
Python
|
Python/color_palette.py
|
JCVenterInstitute/DAFi-gating
|
b8e9eb77e648fd6c31b2f63cd0cd502a6a161714
|
[
"MIT"
] | 2
|
2018-04-20T15:01:52.000Z
|
2019-03-25T22:44:56.000Z
|
Python/color_palette.py
|
JCVenterInstitute/DAFi-gating
|
b8e9eb77e648fd6c31b2f63cd0cd502a6a161714
|
[
"MIT"
] | null | null | null |
Python/color_palette.py
|
JCVenterInstitute/DAFi-gating
|
b8e9eb77e648fd6c31b2f63cd0cd502a6a161714
|
[
"MIT"
] | 3
|
2018-06-18T00:47:00.000Z
|
2022-03-10T23:49:58.000Z
|
color_palette = [
'#FFFFFF', # White 0
'#000000', # Black
'#FF0000', # Red 1
'#000080', # Navy Blue 11
'#008000', # Dark Green 3
'#FFA500', # Orange 5
'#8A2BE2', # BlueViolet 6
'#808000', # Olive 7
'#00FFFF', # Cyan 8
'#FF00FF', # Magenta 9
'#0000FF', # Blue 4
'#00FF00', # Green 10
'#FFFF00', # Yellow 2
'#000080', # Navy Blue 11
'#F08080', # Light Coral 12
'#800080', # Purple 13
'#F0E68C', # Khaki 14
'#8FBC8F', # Dark Sea Green 15
'#2F4F4F', # Dark Slate Grey 16
'#008080', # Teal 17
'#9932CC', # Dark Orchid 18
'#FF7F50', # Coral 19
'#FFD700', # Gold 20
'#008B8B', # Cyan 4 21
'#800000', # Maroon 22
'#5F9EA0', # Cadet Blue 23
'#FFC0CB', # Pink 24
'#545454', # Grey 25
'#7FFFD4', # Aquamarine 26
'#ADD8E6', # Light Blue 27
'#DB7093', # Medium Violet Red 28
'#CD853F', # Tan 3 29
'#4169E1', # Royal Blue 30
'#708090', # Slate Grey 31
'#4682B4', # Steel Blue 32
'#D8BFD8', # Thistle 33
'#F5DEB3', # Wheat 34
'#9ACD32', # Yellow Green 35
'#BDB76B', # Dark Khaki 36
'#8B008B', # Magenta 4 37
'#556B2F', # Dark Olive Green 38
'#00CED1', # Dark Turquoise 39
'#FF1493', # Deep Pink 40
'#FF0031', # Red 1
'#FFFF31', # Yellow 2
'#008031', # Dark Green 3
'#00113F', # Blue 4
'#FFA531', # Orange 5
'#8A1132', # BlueViolet 6
'#808031', # Olive 7
'#00FF31', # Cyan 8
'#FF0031', # Magenta 9
'#00FF31', # Green 10
'#000031', # Navy Blue 11
'#F08031', # Light Coral 12
'#800031', # Purple 13
'#F0E631', # Khaki 14
'#8FBC31', # Dark Sea Green 15
'#2F4F31', # Dark Slate Grey 16
'#008031', # Teal 17
'#993231', # Dark Orchid 18
'#FF7F31', # Coral 19
'#FFD731', # Gold 20
'#008B31', # Cyan 4 21
'#800031', # Maroon 22
'#5F9E31', # Cadet Blue 23
'#FFC031', # Pink 24
'#545431', # Grey 25
'#7FFF31', # Aquamarine 26
'#ADD831', # Light Blue 27
'#DB7031', # Medium Violet Red 28
'#CD8531', # Tan 3 29
'#416931', # Royal Blue 30
'#708031', # Slate Grey 31
'#468231', # Steel Blue 32
'#D8BF31', # Thistle 33
'#F5DE31', # Wheat 34
'#9ACD31', # Yellow Green 35
'#BDB731', # Dark Khaki 36
'#8B0031', # Magenta 4 37
'#556B31', # Dark Olive Green 38
'#00C131', # Dark Turquoise 39
'#111431', # Deep Pink 40
'#140000', # Red 1
'#14FF00', # Yellow 2
'#148000', # Dark Green 3
'#1400FF', # Blue 4
'#14A500', # Orange 5
'#142BE2', # BlueViolet 6
'#148000', # Olive 7
'#14FFFF', # Cyan 8
'#1400FF', # Magenta 9
'#14FF00', # Green 10
'#140080', # Navy Blue 11
'#1480A0', # Light Coral 12
'#640040', # Purple 13
'#14E68C', # Khaki 14
'#14BC8F', # Dark Sea Green 15
'#144F4F', # Dark Slate Grey 16
'#148080', # Teal 17
'#1432CC', # Dark Orchid 18
'#147F50', # Coral 19
'#14D700', # Gold 20
'#141B8B', # Cyan 4 21
'#140000', # Maroon 22
'#149EA0', # Cadet Blue 23
'#14C0CB', # Pink 24
'#145454', # Grey 25
'#14FFD4', # Aquamarine 26
'#14D8E6', # Light Blue 27
'#147093', # Medium Violet Red 28
'#14853F', # Tan 3 29
'#1419E1', # Royal Blue 30
'#148090', # Slate Grey 31
'#1482B4', # Steel Blue 32
'#14BFD8', # Thistle 33
'#14DEB3', # Wheat 34
'#14CD32', # Yellow Green 35
'#14B76B', # Dark Khaki 36
'#14008B', # Magenta 4 37
'#146B2F', # Dark Olive Green 38
'#141ED1', # Dark Turquoise 39
'#141493', # Deep Pink 40
'#F0FBA0', # Red 1
'#ABFA20', # Yellow 2
'#008F0F', # Dark Green 3
'#0A30FF', # Blue 4
'#F3A544', # Orange 5
'#AA2B22', # BlueViolet 6
'#8C8C90', # Olive 7
'#05CFCF', # Cyan 8
'#F206AF', # Magenta 9
'#12FF0B', # Green 10
'#460B80', # Navy Blue 11
'#F0DBFC', # Light Coral 12
'#80DDA4', # Purple 13
'#D5C98F', # Khaki 14
'#80CCCA', # Dark Sea Green 15
'#7ACD45', # Dark Slate Grey 16
'#443032', # Teal 17
'#34266C', # Dark Orchid 18
'#817F50', # Coral 19
'#81D700', # Gold 20
'#811B8B', # Cyan 4 21
'#810000', # Maroon 22
'#819EA0', # Cadet Blue 23
'#81C0CB', # Pink 24
'#815454', # Grey 25
'#81FFD4', # Aquamarine 26
'#81D8E6', # Light Blue 27
'#817093', # Medium Violet Red 28
'#81853F', # Tan 3 29
'#8119E1', # Royal Blue 30
'#818090', # Slate Grey 31
'#8182B4', # Steel Blue 32
'#81BFD8', # Thistle 33
'#81DEB3', # Wheat 34
'#81CD32', # Yellow Green 35
'#81B76B', # Dark Khaki 36
'#81008B', # Magenta 4 37
'#816B2F', # Dark Olive Green 38
'#811ED1', # Dark Turquoise 39
'#811493', # Deep Pink 40
'#249000', # Red 1
'#249F00', # Yellow 2
'#2490B0', # Dark Green 3
'#1490AF', # Blue 4
'#149500', # Orange 5
'#149BE2', # BlueViolet 6
'#149000', # Olive 7
'#149FFF', # Cyan 8
'#1490FF', # Magenta 9
'#149F00', # Green 10
'#1F9080', # Navy Blue 11
'#1A9080', # Light Coral 12
'#109080', # Purple 13
'#14968C', # Khaki 14
'#149C8F', # Dark Sea Green 15
'#149F4F', # Dark Slate Grey 16
'#649080', # Teal 17
'#5492CC', # Dark Orchid 18
'#449F50', # Coral 19
'#349700', # Gold 20
'#249B8B', # Cyan 4 21
'#149000', # Maroon 22
'#049EA0', # Cadet Blue 23
'#1190CB', # Pink 24
'#119454', # Grey 25
'#119FD4', # Aquamarine 26
'#1198E6', # Light Blue 27
'#519093', # Medium Violet Red 28
'#41953F', # Tan 3 29
'#3499E1', # Royal Blue 30
'#249090', # Slate Grey 31
'#1492B4', # Steel Blue 32
'#219FD8', # Thistle 33
'#229EB3', # Wheat 34
'#239D32', # Yellow Green 35
'#24976B', # Dark Khaki 36
'#25908B', # Magenta 4 37
'#269B2F', # Dark Olive Green 38
'#279ED1', # Dark Turquoise 39
'#289493' # Deep Pink 40
'#FFFFFF', # White 0
'#000000', # Black
'#FF0000', # Red 1
'#000080', # Navy Blue 11
'#008000', # Dark Green 3
'#FFA500', # Orange 5
'#8A2BE2', # BlueViolet 6
'#808000', # Olive 7
'#00FFFF', # Cyan 8
'#FF00FF', # Magenta 9
'#0000FF', # Blue 4
'#00FF00', # Green 10
'#FFFF00', # Yellow 2
'#000080', # Navy Blue 11
'#F08080', # Light Coral 12
'#800080', # Purple 13
'#F0E68C', # Khaki 14
'#8FBC8F', # Dark Sea Green 15
'#2F4F4F', # Dark Slate Grey 16
'#008080', # Teal 17
'#9932CC', # Dark Orchid 18
'#FF7F50', # Coral 19
'#FFD700', # Gold 20
'#008B8B', # Cyan 4 21
'#800000', # Maroon 22
'#5F9EA0', # Cadet Blue 23
'#FFC0CB', # Pink 24
'#545454', # Grey 25
'#7FFFD4', # Aquamarine 26
'#ADD8E6', # Light Blue 27
'#DB7093', # Medium Violet Red 28
'#CD853F', # Tan 3 29
'#4169E1', # Royal Blue 30
'#708090', # Slate Grey 31
'#4682B4', # Steel Blue 32
'#D8BFD8', # Thistle 33
'#F5DEB3', # Wheat 34
'#9ACD32', # Yellow Green 35
'#BDB76B', # Dark Khaki 36
'#8B008B', # Magenta 4 37
'#556B2F', # Dark Olive Green 38
'#00CED1', # Dark Turquoise 39
'#FF1493', # Deep Pink 40
'#FF0031', # Red 1
'#FFFF31', # Yellow 2
'#008031', # Dark Green 3
'#00113F', # Blue 4
'#FFA531', # Orange 5
'#8A1132', # BlueViolet 6
'#808031', # Olive 7
'#00FF31', # Cyan 8
'#FF0031', # Magenta 9
'#00FF31', # Green 10
'#000031', # Navy Blue 11
'#F08031', # Light Coral 12
'#800031', # Purple 13
'#F0E631', # Khaki 14
'#8FBC31', # Dark Sea Green 15
'#2F4F31', # Dark Slate Grey 16
'#008031', # Teal 17
'#993231', # Dark Orchid 18
'#FF7F31', # Coral 19
'#FFD731', # Gold 20
'#008B31', # Cyan 4 21
'#800031', # Maroon 22
'#5F9E31', # Cadet Blue 23
'#FFC031', # Pink 24
'#545431', # Grey 25
'#7FFF31', # Aquamarine 26
'#ADD831', # Light Blue 27
'#DB7031', # Medium Violet Red 28
'#CD8531', # Tan 3 29
'#416931', # Royal Blue 30
'#708031', # Slate Grey 31
'#468231', # Steel Blue 32
'#D8BF31', # Thistle 33
'#F5DE31', # Wheat 34
'#9ACD31', # Yellow Green 35
'#BDB731', # Dark Khaki 36
'#8B0031', # Magenta 4 37
'#556B31', # Dark Olive Green 38
'#00C131', # Dark Turquoise 39
'#111431', # Deep Pink 40
'#140000', # Red 1
'#14FF00', # Yellow 2
'#148000', # Dark Green 3
'#1400FF', # Blue 4
'#14A500', # Orange 5
'#142BE2', # BlueViolet 6
'#148000', # Olive 7
'#14FFFF', # Cyan 8
'#1400FF', # Magenta 9
'#14FF00', # Green 10
'#140080', # Navy Blue 11
'#1480A0', # Light Coral 12
'#640040', # Purple 13
'#14E68C', # Khaki 14
'#14BC8F', # Dark Sea Green 15
'#144F4F', # Dark Slate Grey 16
'#148080', # Teal 17
'#1432CC', # Dark Orchid 18
'#147F50', # Coral 19
'#14D700', # Gold 20
'#141B8B', # Cyan 4 21
'#140000', # Maroon 22
'#149EA0', # Cadet Blue 23
'#14C0CB', # Pink 24
'#145454', # Grey 25
'#14FFD4', # Aquamarine 26
'#14D8E6', # Light Blue 27
'#147093', # Medium Violet Red 28
'#14853F', # Tan 3 29
'#1419E1', # Royal Blue 30
'#148090', # Slate Grey 31
'#1482B4', # Steel Blue 32
'#14BFD8', # Thistle 33
'#14DEB3', # Wheat 34
'#14CD32', # Yellow Green 35
'#14B76B', # Dark Khaki 36
'#14008B', # Magenta 4 37
'#146B2F', # Dark Olive Green 38
'#141ED1', # Dark Turquoise 39
'#141493', # Deep Pink 40
'#F0FBA0', # Red 1
'#ABFA20', # Yellow 2
'#008F0F', # Dark Green 3
'#0A30FF', # Blue 4
'#F3A544', # Orange 5
'#AA2B22', # BlueViolet 6
'#8C8C90', # Olive 7
'#05CFCF', # Cyan 8
'#F206AF', # Magenta 9
'#12FF0B', # Green 10
'#460B80', # Navy Blue 11
'#F0DBFC', # Light Coral 12
'#80DDA4', # Purple 13
'#D5C98F', # Khaki 14
'#80CCCA', # Dark Sea Green 15
'#7ACD45', # Dark Slate Grey 16
'#443032', # Teal 17
'#34266C', # Dark Orchid 18
'#817F50', # Coral 19
'#81D700', # Gold 20
'#811B8B', # Cyan 4 21
'#810000', # Maroon 22
'#819EA0', # Cadet Blue 23
'#81C0CB', # Pink 24
'#815454', # Grey 25
'#81FFD4', # Aquamarine 26
'#81D8E6', # Light Blue 27
'#817093', # Medium Violet Red 28
'#81853F', # Tan 3 29
'#8119E1', # Royal Blue 30
'#818090', # Slate Grey 31
'#8182B4', # Steel Blue 32
'#81BFD8', # Thistle 33
'#81DEB3', # Wheat 34
'#81CD32', # Yellow Green 35
'#81B76B', # Dark Khaki 36
'#81008B', # Magenta 4 37
'#816B2F', # Dark Olive Green 38
'#811ED1', # Dark Turquoise 39
'#811493', # Deep Pink 40
'#249000', # Red 1
'#249F00', # Yellow 2
'#2490B0', # Dark Green 3
'#1490AF', # Blue 4
'#149500', # Orange 5
'#149BE2', # BlueViolet 6
'#149000', # Olive 7
'#149FFF', # Cyan 8
'#1490FF', # Magenta 9
'#149F00', # Green 10
'#1F9080', # Navy Blue 11
'#1A9080', # Light Coral 12
'#109080', # Purple 13
'#14968C', # Khaki 14
'#149C8F', # Dark Sea Green 15
'#149F4F', # Dark Slate Grey 16
'#649080', # Teal 17
'#5492CC', # Dark Orchid 18
'#449F50', # Coral 19
'#349700', # Gold 20
'#249B8B', # Cyan 4 21
'#149000', # Maroon 22
'#049EA0', # Cadet Blue 23
'#1190CB', # Pink 24
'#119454', # Grey 25
'#119FD4', # Aquamarine 26
'#1198E6', # Light Blue 27
'#519093', # Medium Violet Red 28
'#41953F', # Tan 3 29
'#3499E1', # Royal Blue 30
'#249090', # Slate Grey 31
'#1492B4', # Steel Blue 32
'#219FD8', # Thistle 33
'#229EB3', # Wheat 34
'#239D32', # Yellow Green 35
'#24976B', # Dark Khaki 36
'#25908B', # Magenta 4 37
'#269B2F', # Dark Olive Green 38
'#279ED1', # Dark Turquoise 39
'#289493' # Deep Pink 40
'#FFFFFF', # White 0
'#000000', # Black
'#FF0000', # Red 1
'#000080', # Navy Blue 11
'#008000', # Dark Green 3
'#FFA500', # Orange 5
'#8A2BE2', # BlueViolet 6
'#808000', # Olive 7
'#00FFFF', # Cyan 8
'#FF00FF', # Magenta 9
'#0000FF', # Blue 4
'#00FF00', # Green 10
'#FFFF00', # Yellow 2
'#000080', # Navy Blue 11
'#F08080', # Light Coral 12
'#800080', # Purple 13
'#F0E68C', # Khaki 14
'#8FBC8F', # Dark Sea Green 15
'#2F4F4F', # Dark Slate Grey 16
'#008080', # Teal 17
'#9932CC', # Dark Orchid 18
'#FF7F50', # Coral 19
'#FFD700', # Gold 20
'#008B8B', # Cyan 4 21
'#800000', # Maroon 22
'#5F9EA0', # Cadet Blue 23
'#FFC0CB', # Pink 24
'#545454', # Grey 25
'#7FFFD4', # Aquamarine 26
'#ADD8E6', # Light Blue 27
'#DB7093', # Medium Violet Red 28
'#CD853F', # Tan 3 29
'#4169E1', # Royal Blue 30
'#708090', # Slate Grey 31
'#4682B4', # Steel Blue 32
'#D8BFD8', # Thistle 33
'#F5DEB3', # Wheat 34
'#9ACD32', # Yellow Green 35
'#BDB76B', # Dark Khaki 36
'#8B008B', # Magenta 4 37
'#556B2F', # Dark Olive Green 38
'#00CED1', # Dark Turquoise 39
'#FF1493', # Deep Pink 40
'#FF0031', # Red 1
'#FFFF31', # Yellow 2
'#008031', # Dark Green 3
'#00113F', # Blue 4
'#FFA531', # Orange 5
'#8A1132', # BlueViolet 6
'#808031', # Olive 7
'#00FF31', # Cyan 8
'#FF0031', # Magenta 9
'#00FF31', # Green 10
'#000031', # Navy Blue 11
'#F08031', # Light Coral 12
'#800031', # Purple 13
'#F0E631', # Khaki 14
'#8FBC31', # Dark Sea Green 15
'#2F4F31', # Dark Slate Grey 16
'#008031', # Teal 17
'#993231', # Dark Orchid 18
'#FF7F31', # Coral 19
'#FFD731', # Gold 20
'#008B31', # Cyan 4 21
'#800031', # Maroon 22
'#5F9E31', # Cadet Blue 23
'#FFC031', # Pink 24
'#545431', # Grey 25
'#7FFF31', # Aquamarine 26
'#ADD831', # Light Blue 27
'#DB7031', # Medium Violet Red 28
'#CD8531', # Tan 3 29
'#416931', # Royal Blue 30
'#708031', # Slate Grey 31
'#468231', # Steel Blue 32
'#D8BF31', # Thistle 33
'#F5DE31', # Wheat 34
'#9ACD31', # Yellow Green 35
'#BDB731', # Dark Khaki 36
'#8B0031', # Magenta 4 37
'#556B31', # Dark Olive Green 38
'#00C131', # Dark Turquoise 39
'#111431', # Deep Pink 40
'#140000', # Red 1
'#14FF00', # Yellow 2
'#148000', # Dark Green 3
'#1400FF', # Blue 4
'#14A500', # Orange 5
'#142BE2', # BlueViolet 6
'#148000', # Olive 7
'#14FFFF', # Cyan 8
'#1400FF', # Magenta 9
'#14FF00', # Green 10
'#140080', # Navy Blue 11
'#1480A0', # Light Coral 12
'#640040', # Purple 13
'#14E68C', # Khaki 14
'#14BC8F', # Dark Sea Green 15
'#144F4F', # Dark Slate Grey 16
'#148080', # Teal 17
'#1432CC', # Dark Orchid 18
'#147F50', # Coral 19
'#14D700', # Gold 20
'#141B8B', # Cyan 4 21
'#140000', # Maroon 22
'#149EA0', # Cadet Blue 23
'#14C0CB', # Pink 24
'#145454', # Grey 25
'#14FFD4', # Aquamarine 26
'#14D8E6', # Light Blue 27
'#147093', # Medium Violet Red 28
'#14853F', # Tan 3 29
'#1419E1', # Royal Blue 30
'#148090', # Slate Grey 31
'#1482B4', # Steel Blue 32
'#14BFD8', # Thistle 33
'#14DEB3', # Wheat 34
'#14CD32', # Yellow Green 35
'#14B76B', # Dark Khaki 36
'#14008B', # Magenta 4 37
'#146B2F', # Dark Olive Green 38
'#141ED1', # Dark Turquoise 39
'#141493', # Deep Pink 40
'#F0FBA0', # Red 1
'#ABFA20', # Yellow 2
'#008F0F', # Dark Green 3
'#0A30FF', # Blue 4
'#F3A544', # Orange 5
'#AA2B22', # BlueViolet 6
'#8C8C90', # Olive 7
'#05CFCF', # Cyan 8
'#F206AF', # Magenta 9
'#12FF0B', # Green 10
'#460B80', # Navy Blue 11
'#F0DBFC', # Light Coral 12
'#80DDA4', # Purple 13
'#D5C98F', # Khaki 14
'#80CCCA', # Dark Sea Green 15
'#7ACD45', # Dark Slate Grey 16
'#443032', # Teal 17
'#34266C', # Dark Orchid 18
'#817F50', # Coral 19
'#81D700', # Gold 20
'#811B8B', # Cyan 4 21
'#810000', # Maroon 22
'#819EA0', # Cadet Blue 23
'#81C0CB', # Pink 24
'#815454', # Grey 25
'#81FFD4', # Aquamarine 26
'#81D8E6', # Light Blue 27
'#817093', # Medium Violet Red 28
'#81853F', # Tan 3 29
'#8119E1', # Royal Blue 30
'#818090', # Slate Grey 31
'#8182B4', # Steel Blue 32
'#81BFD8', # Thistle 33
'#81DEB3', # Wheat 34
'#81CD32', # Yellow Green 35
'#81B76B', # Dark Khaki 36
'#81008B', # Magenta 4 37
'#816B2F', # Dark Olive Green 38
'#811ED1', # Dark Turquoise 39
'#811493', # Deep Pink 40
'#249000', # Red 1
'#249F00', # Yellow 2
'#2490B0', # Dark Green 3
'#1490AF', # Blue 4
'#149500', # Orange 5
'#149BE2', # BlueViolet 6
'#149000', # Olive 7
'#149FFF', # Cyan 8
'#1490FF', # Magenta 9
'#149F00', # Green 10
'#1F9080', # Navy Blue 11
'#1A9080', # Light Coral 12
'#109080', # Purple 13
'#14968C', # Khaki 14
'#149C8F', # Dark Sea Green 15
'#149F4F', # Dark Slate Grey 16
'#649080', # Teal 17
'#5492CC', # Dark Orchid 18
'#449F50', # Coral 19
'#349700', # Gold 20
'#249B8B', # Cyan 4 21
'#149000', # Maroon 22
'#049EA0', # Cadet Blue 23
'#1190CB', # Pink 24
'#119454', # Grey 25
'#119FD4', # Aquamarine 26
'#1198E6', # Light Blue 27
'#519093', # Medium Violet Red 28
'#41953F', # Tan 3 29
'#3499E1', # Royal Blue 30
'#249090', # Slate Grey 31
'#1492B4', # Steel Blue 32
'#219FD8', # Thistle 33
'#229EB3', # Wheat 34
'#239D32', # Yellow Green 35
'#24976B', # Dark Khaki 36
'#25908B', # Magenta 4 37
'#269B2F', # Dark Olive Green 38
'#279ED1', # Dark Turquoise 39
'#289493' # Deep Pink 40
'#FFFFFF', # White 0
'#000000', # Black
'#FF0000', # Red 1
'#000080', # Navy Blue 11
'#008000', # Dark Green 3
'#FFA500', # Orange 5
'#8A2BE2', # BlueViolet 6
'#808000', # Olive 7
'#00FFFF', # Cyan 8
'#FF00FF', # Magenta 9
'#0000FF', # Blue 4
'#00FF00', # Green 10
'#FFFF00', # Yellow 2
'#000080', # Navy Blue 11
'#F08080', # Light Coral 12
'#800080', # Purple 13
'#F0E68C', # Khaki 14
'#8FBC8F', # Dark Sea Green 15
'#2F4F4F', # Dark Slate Grey 16
'#008080', # Teal 17
'#9932CC', # Dark Orchid 18
'#FF7F50', # Coral 19
'#FFD700', # Gold 20
'#008B8B', # Cyan 4 21
'#800000', # Maroon 22
'#5F9EA0', # Cadet Blue 23
'#FFC0CB', # Pink 24
'#545454', # Grey 25
'#7FFFD4', # Aquamarine 26
'#ADD8E6', # Light Blue 27
'#DB7093', # Medium Violet Red 28
'#CD853F', # Tan 3 29
'#4169E1', # Royal Blue 30
'#708090', # Slate Grey 31
'#4682B4', # Steel Blue 32
'#D8BFD8', # Thistle 33
'#F5DEB3', # Wheat 34
'#9ACD32', # Yellow Green 35
'#BDB76B', # Dark Khaki 36
'#8B008B', # Magenta 4 37
'#556B2F', # Dark Olive Green 38
'#00CED1', # Dark Turquoise 39
'#FF1493', # Deep Pink 40
'#FF0031', # Red 1
'#FFFF31', # Yellow 2
'#008031', # Dark Green 3
'#00113F', # Blue 4
'#FFA531', # Orange 5
'#8A1132', # BlueViolet 6
'#808031', # Olive 7
'#00FF31', # Cyan 8
'#FF0031', # Magenta 9
'#00FF31', # Green 10
'#000031', # Navy Blue 11
'#F08031', # Light Coral 12
'#800031', # Purple 13
'#F0E631', # Khaki 14
'#8FBC31', # Dark Sea Green 15
'#2F4F31', # Dark Slate Grey 16
'#008031', # Teal 17
'#993231', # Dark Orchid 18
'#FF7F31', # Coral 19
'#FFD731', # Gold 20
'#008B31', # Cyan 4 21
'#800031', # Maroon 22
'#5F9E31', # Cadet Blue 23
'#FFC031', # Pink 24
'#545431', # Grey 25
'#7FFF31', # Aquamarine 26
'#ADD831', # Light Blue 27
'#DB7031', # Medium Violet Red 28
'#CD8531', # Tan 3 29
'#416931', # Royal Blue 30
'#708031', # Slate Grey 31
'#468231', # Steel Blue 32
'#D8BF31', # Thistle 33
'#F5DE31', # Wheat 34
'#9ACD31', # Yellow Green 35
'#BDB731', # Dark Khaki 36
'#8B0031', # Magenta 4 37
'#556B31', # Dark Olive Green 38
'#00C131', # Dark Turquoise 39
'#111431', # Deep Pink 40
'#140000', # Red 1
'#14FF00', # Yellow 2
'#148000', # Dark Green 3
'#1400FF', # Blue 4
'#14A500', # Orange 5
'#142BE2', # BlueViolet 6
'#148000', # Olive 7
'#14FFFF', # Cyan 8
'#1400FF', # Magenta 9
'#14FF00', # Green 10
'#140080', # Navy Blue 11
'#1480A0', # Light Coral 12
'#640040', # Purple 13
'#14E68C', # Khaki 14
'#14BC8F', # Dark Sea Green 15
'#144F4F', # Dark Slate Grey 16
'#148080', # Teal 17
'#1432CC', # Dark Orchid 18
'#147F50', # Coral 19
'#14D700', # Gold 20
'#141B8B', # Cyan 4 21
'#140000', # Maroon 22
'#149EA0', # Cadet Blue 23
'#14C0CB', # Pink 24
'#145454', # Grey 25
'#14FFD4', # Aquamarine 26
'#14D8E6', # Light Blue 27
'#147093', # Medium Violet Red 28
'#14853F', # Tan 3 29
'#1419E1', # Royal Blue 30
'#148090', # Slate Grey 31
'#1482B4', # Steel Blue 32
'#14BFD8', # Thistle 33
'#14DEB3', # Wheat 34
'#14CD32', # Yellow Green 35
'#14B76B', # Dark Khaki 36
'#14008B', # Magenta 4 37
'#146B2F', # Dark Olive Green 38
'#141ED1', # Dark Turquoise 39
'#141493', # Deep Pink 40
'#F0FBA0', # Red 1
'#ABFA20', # Yellow 2
'#008F0F', # Dark Green 3
'#0A30FF', # Blue 4
'#F3A544', # Orange 5
'#AA2B22', # BlueViolet 6
'#8C8C90', # Olive 7
'#05CFCF', # Cyan 8
'#F206AF', # Magenta 9
'#12FF0B', # Green 10
'#460B80', # Navy Blue 11
'#F0DBFC', # Light Coral 12
'#80DDA4', # Purple 13
'#D5C98F', # Khaki 14
'#80CCCA', # Dark Sea Green 15
'#7ACD45', # Dark Slate Grey 16
'#443032', # Teal 17
'#34266C', # Dark Orchid 18
'#817F50', # Coral 19
'#81D700', # Gold 20
'#811B8B', # Cyan 4 21
'#810000', # Maroon 22
'#819EA0', # Cadet Blue 23
'#81C0CB', # Pink 24
'#815454', # Grey 25
'#81FFD4', # Aquamarine 26
'#81D8E6', # Light Blue 27
'#817093', # Medium Violet Red 28
'#81853F', # Tan 3 29
'#8119E1', # Royal Blue 30
'#818090', # Slate Grey 31
'#8182B4', # Steel Blue 32
'#81BFD8', # Thistle 33
'#81DEB3', # Wheat 34
'#81CD32', # Yellow Green 35
'#81B76B', # Dark Khaki 36
'#81008B', # Magenta 4 37
'#816B2F', # Dark Olive Green 38
'#811ED1', # Dark Turquoise 39
'#811493', # Deep Pink 40
'#249000', # Red 1
'#249F00', # Yellow 2
'#2490B0', # Dark Green 3
'#1490AF', # Blue 4
'#149500', # Orange 5
'#149BE2', # BlueViolet 6
'#149000', # Olive 7
'#149FFF', # Cyan 8
'#1490FF', # Magenta 9
'#149F00', # Green 10
'#1F9080', # Navy Blue 11
'#1A9080', # Light Coral 12
'#109080', # Purple 13
'#14968C', # Khaki 14
'#149C8F', # Dark Sea Green 15
'#149F4F', # Dark Slate Grey 16
'#649080', # Teal 17
'#5492CC', # Dark Orchid 18
'#449F50', # Coral 19
'#349700', # Gold 20
'#249B8B', # Cyan 4 21
'#149000', # Maroon 22
'#049EA0', # Cadet Blue 23
'#1190CB', # Pink 24
'#119454', # Grey 25
'#119FD4', # Aquamarine 26
'#1198E6', # Light Blue 27
'#519093', # Medium Violet Red 28
'#41953F', # Tan 3 29
'#3499E1', # Royal Blue 30
'#249090', # Slate Grey 31
'#1492B4', # Steel Blue 32
'#219FD8', # Thistle 33
'#229EB3', # Wheat 34
'#239D32', # Yellow Green 35
'#24976B', # Dark Khaki 36
'#25908B', # Magenta 4 37
'#269B2F', # Dark Olive Green 38
'#279ED1', # Dark Turquoise 39
'#289493' # Deep Pink 40
'#FFFFFF', # White 0
'#000000', # Black
'#FF0000', # Red 1
'#000080', # Navy Blue 11
'#008000', # Dark Green 3
'#FFA500', # Orange 5
'#8A2BE2', # BlueViolet 6
'#808000', # Olive 7
'#00FFFF', # Cyan 8
'#FF00FF', # Magenta 9
'#0000FF', # Blue 4
'#00FF00', # Green 10
'#FFFF00', # Yellow 2
'#000080', # Navy Blue 11
'#F08080', # Light Coral 12
'#800080', # Purple 13
'#F0E68C', # Khaki 14
'#8FBC8F', # Dark Sea Green 15
'#2F4F4F', # Dark Slate Grey 16
'#008080', # Teal 17
'#9932CC', # Dark Orchid 18
'#FF7F50', # Coral 19
'#FFD700', # Gold 20
'#008B8B', # Cyan 4 21
'#800000', # Maroon 22
'#5F9EA0', # Cadet Blue 23
'#FFC0CB', # Pink 24
'#545454', # Grey 25
'#7FFFD4', # Aquamarine 26
'#ADD8E6', # Light Blue 27
'#DB7093', # Medium Violet Red 28
'#CD853F', # Tan 3 29
'#4169E1', # Royal Blue 30
'#708090', # Slate Grey 31
'#4682B4', # Steel Blue 32
'#D8BFD8', # Thistle 33
'#F5DEB3', # Wheat 34
'#9ACD32', # Yellow Green 35
'#BDB76B', # Dark Khaki 36
'#8B008B', # Magenta 4 37
'#556B2F', # Dark Olive Green 38
'#00CED1', # Dark Turquoise 39
'#FF1493', # Deep Pink 40
'#FF0031', # Red 1
'#FFFF31', # Yellow 2
'#008031', # Dark Green 3
'#00113F', # Blue 4
'#FFA531', # Orange 5
'#8A1132', # BlueViolet 6
'#808031', # Olive 7
'#00FF31', # Cyan 8
'#FF0031', # Magenta 9
'#00FF31', # Green 10
'#000031', # Navy Blue 11
'#F08031', # Light Coral 12
'#800031', # Purple 13
'#F0E631', # Khaki 14
'#8FBC31', # Dark Sea Green 15
'#2F4F31', # Dark Slate Grey 16
'#008031', # Teal 17
'#993231', # Dark Orchid 18
'#FF7F31', # Coral 19
'#FFD731', # Gold 20
'#008B31', # Cyan 4 21
'#800031', # Maroon 22
'#5F9E31', # Cadet Blue 23
'#FFC031', # Pink 24
'#545431', # Grey 25
'#7FFF31', # Aquamarine 26
'#ADD831', # Light Blue 27
'#DB7031', # Medium Violet Red 28
'#CD8531', # Tan 3 29
'#416931', # Royal Blue 30
'#708031', # Slate Grey 31
'#468231', # Steel Blue 32
'#D8BF31', # Thistle 33
'#F5DE31', # Wheat 34
'#9ACD31', # Yellow Green 35
'#BDB731', # Dark Khaki 36
'#8B0031', # Magenta 4 37
'#556B31', # Dark Olive Green 38
'#00C131', # Dark Turquoise 39
'#111431', # Deep Pink 40
'#140000', # Red 1
'#14FF00', # Yellow 2
'#148000', # Dark Green 3
'#1400FF', # Blue 4
'#14A500', # Orange 5
'#142BE2', # BlueViolet 6
'#148000', # Olive 7
'#14FFFF', # Cyan 8
'#1400FF', # Magenta 9
'#14FF00', # Green 10
'#140080', # Navy Blue 11
'#1480A0', # Light Coral 12
'#640040', # Purple 13
'#14E68C', # Khaki 14
'#14BC8F', # Dark Sea Green 15
'#144F4F', # Dark Slate Grey 16
'#148080', # Teal 17
'#1432CC', # Dark Orchid 18
'#147F50', # Coral 19
'#14D700', # Gold 20
'#141B8B', # Cyan 4 21
'#140000', # Maroon 22
'#149EA0', # Cadet Blue 23
'#14C0CB', # Pink 24
'#145454', # Grey 25
'#14FFD4', # Aquamarine 26
'#14D8E6', # Light Blue 27
'#147093', # Medium Violet Red 28
'#14853F', # Tan 3 29
'#1419E1', # Royal Blue 30
'#148090', # Slate Grey 31
'#1482B4', # Steel Blue 32
'#14BFD8', # Thistle 33
'#14DEB3', # Wheat 34
'#14CD32', # Yellow Green 35
'#14B76B', # Dark Khaki 36
'#14008B', # Magenta 4 37
'#146B2F', # Dark Olive Green 38
'#141ED1', # Dark Turquoise 39
'#141493', # Deep Pink 40
'#F0FBA0', # Red 1
'#ABFA20', # Yellow 2
'#008F0F', # Dark Green 3
'#0A30FF', # Blue 4
'#F3A544', # Orange 5
'#AA2B22', # BlueViolet 6
'#8C8C90', # Olive 7
'#05CFCF', # Cyan 8
'#F206AF', # Magenta 9
'#12FF0B', # Green 10
'#460B80', # Navy Blue 11
'#F0DBFC', # Light Coral 12
'#80DDA4', # Purple 13
'#D5C98F', # Khaki 14
'#80CCCA', # Dark Sea Green 15
'#7ACD45', # Dark Slate Grey 16
'#443032', # Teal 17
'#34266C', # Dark Orchid 18
'#817F50', # Coral 19
'#81D700', # Gold 20
'#811B8B', # Cyan 4 21
'#810000', # Maroon 22
'#819EA0', # Cadet Blue 23
'#81C0CB', # Pink 24
'#815454', # Grey 25
'#81FFD4', # Aquamarine 26
'#81D8E6', # Light Blue 27
'#817093', # Medium Violet Red 28
'#81853F', # Tan 3 29
'#8119E1', # Royal Blue 30
'#818090', # Slate Grey 31
'#8182B4', # Steel Blue 32
'#81BFD8', # Thistle 33
'#81DEB3', # Wheat 34
'#81CD32', # Yellow Green 35
'#81B76B', # Dark Khaki 36
'#81008B', # Magenta 4 37
'#816B2F', # Dark Olive Green 38
'#811ED1', # Dark Turquoise 39
'#811493', # Deep Pink 40
'#249000', # Red 1
'#249F00', # Yellow 2
'#2490B0', # Dark Green 3
'#1490AF', # Blue 4
'#149500', # Orange 5
'#149BE2', # BlueViolet 6
'#149000', # Olive 7
'#149FFF', # Cyan 8
'#1490FF', # Magenta 9
'#149F00', # Green 10
'#1F9080', # Navy Blue 11
'#1A9080', # Light Coral 12
'#109080', # Purple 13
'#14968C', # Khaki 14
'#149C8F', # Dark Sea Green 15
'#149F4F', # Dark Slate Grey 16
'#649080', # Teal 17
'#5492CC', # Dark Orchid 18
'#449F50', # Coral 19
'#349700', # Gold 20
'#249B8B', # Cyan 4 21
'#149000', # Maroon 22
'#049EA0', # Cadet Blue 23
'#1190CB', # Pink 24
'#119454', # Grey 25
'#119FD4', # Aquamarine 26
'#1198E6', # Light Blue 27
'#519093', # Medium Violet Red 28
'#41953F', # Tan 3 29
'#3499E1', # Royal Blue 30
'#249090', # Slate Grey 31
'#1492B4', # Steel Blue 32
'#219FD8', # Thistle 33
'#229EB3', # Wheat 34
'#239D32', # Yellow Green 35
'#24976B', # Dark Khaki 36
'#25908B', # Magenta 4 37
'#269B2F', # Dark Olive Green 38
'#279ED1', # Dark Turquoise 39
'#289493' # Deep Pink 40
]
| 28.715407
| 37
| 0.532552
| 3,647
| 29,261
| 4.272553
| 0.074582
| 0.028879
| 0.019253
| 0.022462
| 0.99923
| 0.99923
| 0.99923
| 0.99923
| 0.99923
| 0.99923
| 0
| 0.301849
| 0.294077
| 29,261
| 1,018
| 38
| 28.743615
| 0.452508
| 0.413827
| 0
| 0.998033
| 0
| 0
| 0.440347
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
3cb16290451e0fd7452feb771084663d9cd501ba
| 16,119
|
py
|
Python
|
q2_diversity/tests/test_alpha.py
|
gregcaporaso/q2-diversity
|
3b03b4c1e47b2893668f14c91612507e4864c34e
|
[
"BSD-3-Clause"
] | null | null | null |
q2_diversity/tests/test_alpha.py
|
gregcaporaso/q2-diversity
|
3b03b4c1e47b2893668f14c91612507e4864c34e
|
[
"BSD-3-Clause"
] | null | null | null |
q2_diversity/tests/test_alpha.py
|
gregcaporaso/q2-diversity
|
3b03b4c1e47b2893668f14c91612507e4864c34e
|
[
"BSD-3-Clause"
] | null | null | null |
# ----------------------------------------------------------------------------
# Copyright (c) 2016-2017, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
import os
import tempfile
import unittest
import io
import biom
import skbio
import qiime2
import numpy as np
import pandas as pd
import pandas.util.testing as pdt
from q2_diversity import (alpha, alpha_phylogenetic, alpha_correlation,
alpha_group_significance)
class AlphaTests(unittest.TestCase):
def test_alpha(self):
t = biom.Table(np.array([[0, 1, 3], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
actual = alpha(table=t, metric='observed_otus')
# expected computed by hand
expected = pd.Series({'S1': 1, 'S2': 2, 'S3': 2},
name='observed_otus')
pdt.assert_series_equal(actual, expected)
def test_alpha_phylo_metric(self):
t = biom.Table(np.array([[0, 1, 3], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
with self.assertRaises(ValueError):
alpha(table=t, metric='faith_pd')
def test_alpha_unknown_metric(self):
t = biom.Table(np.array([[0, 1, 3], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
with self.assertRaises(ValueError):
alpha(table=t, metric='not-a-metric')
def test_alpha_empty_table(self):
t = biom.Table(np.array([]), [], [])
with self.assertRaisesRegex(ValueError, "empty"):
alpha(table=t, metric='observed_otus')
def test_alpha_phylogenetic(self):
t = biom.Table(np.array([[0, 1, 3], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
tree = skbio.TreeNode.read(io.StringIO(
'((O1:0.25, O2:0.50):0.25, O3:0.75)root;'))
actual = alpha_phylogenetic(table=t, phylogeny=tree, metric='faith_pd')
# expected computed with skbio.diversity.alpha_diversity
expected = pd.Series({'S1': 0.75, 'S2': 1.0, 'S3': 1.0},
name='faith_pd')
pdt.assert_series_equal(actual, expected)
def test_alpha_phylogenetic_non_phylo_metric(self):
t = biom.Table(np.array([[0, 1, 3], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
tree = skbio.TreeNode.read(io.StringIO(
'((O1:0.25, O2:0.50):0.25, O3:0.75)root;'))
with self.assertRaises(ValueError):
alpha_phylogenetic(table=t, phylogeny=tree,
metric='observed_otus')
def test_alpha_phylogenetic_unknown_metric(self):
t = biom.Table(np.array([[0, 1, 3], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
tree = skbio.TreeNode.read(io.StringIO(
'((O1:0.25, O2:0.50):0.25, O3:0.75)root;'))
with self.assertRaises(ValueError):
alpha_phylogenetic(table=t, phylogeny=tree, metric='not-a-metric')
def test_alpha_phylogenetic_skbio_error_rewriting(self):
t = biom.Table(np.array([[0, 1, 3], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
tree = skbio.TreeNode.read(io.StringIO(
'((O1:0.25):0.25, O3:0.75)root;'))
# Verify through regex that there is a ``feature_ids`` substring
# followed by a ``phylogeny``
with self.assertRaisesRegex(skbio.tree.MissingNodeError,
'feature_ids.*phylogeny'):
alpha_phylogenetic(table=t, phylogeny=tree, metric='faith_pd')
def test_alpha_phylogenetic_empty_table(self):
t = biom.Table(np.array([]), [], [])
tree = skbio.TreeNode.read(io.StringIO(
'((O1:0.25):0.25, O3:0.75)root;'))
with self.assertRaisesRegex(ValueError, "empty"):
alpha_phylogenetic(table=t, phylogeny=tree, metric='faith_pd')
class AlphaCorrelationTests(unittest.TestCase):
def test_spearman(self):
alpha_div = pd.Series([2.0, 4.0, 6.0], name='alpha-div',
index=['sample1', 'sample2', 'sample3'])
md = qiime2.Metadata(
pd.DataFrame({'value': ['1.0', '2.0', '3.0']},
index=['sample1', 'sample2', 'sample3']))
with tempfile.TemporaryDirectory() as output_dir:
alpha_correlation(output_dir, alpha_div, md)
index_fp = os.path.join(output_dir, 'index.html')
self.assertTrue(os.path.exists(index_fp))
jsonp_fp = os.path.join(output_dir, 'category-value.jsonp')
self.assertTrue(os.path.exists(jsonp_fp))
self.assertTrue('Spearman' in open(jsonp_fp).read())
self.assertTrue('"sampleSize": 3' in open(jsonp_fp).read())
self.assertTrue('"data":' in open(jsonp_fp).read())
self.assertFalse('filtered' in open(jsonp_fp).read())
def test_pearson(self):
alpha_div = pd.Series([2.0, 4.0, 6.0], name='alpha-div',
index=['sample1', 'sample2', 'sample3'])
md = qiime2.Metadata(
pd.DataFrame({'value': ['1.0', '2.0', '3.0']},
index=['sample1', 'sample2', 'sample3']))
with tempfile.TemporaryDirectory() as output_dir:
alpha_correlation(output_dir, alpha_div, md, method='pearson')
index_fp = os.path.join(output_dir, 'index.html')
self.assertTrue(os.path.exists(index_fp))
jsonp_fp = os.path.join(output_dir, 'category-value.jsonp')
self.assertTrue(os.path.exists(jsonp_fp))
self.assertTrue('Pearson' in open(jsonp_fp).read())
self.assertTrue('"sampleSize": 3' in open(jsonp_fp).read())
self.assertTrue('"data":' in open(jsonp_fp).read())
self.assertFalse('filtered' in open(jsonp_fp).read())
def test_bad_method(self):
alpha_div = pd.Series([2.0, 4.0, 6.0], name='alpha-div',
index=['sample1', 'sample2', 'sample3'])
md = qiime2.MetadataCategory(
pd.Series(['1.0', '2.0', '3.0'], name='value',
index=['sample1', 'sample2', 'sample3']))
with tempfile.TemporaryDirectory() as output_dir:
with self.assertRaises(ValueError):
alpha_correlation(output_dir, alpha_div, md, method='bad!')
def test_bad_metadata(self):
alpha_div = pd.Series([2.0, 4.0, 6.0], name='alpha-div',
index=['sample1', 'sample2', 'sample3'])
md = qiime2.Metadata(
pd.DataFrame({'value': ['a', 'b', 'c']},
index=['sample1', 'sample2', 'sample3']))
with tempfile.TemporaryDirectory() as output_dir:
with self.assertRaises(ValueError):
alpha_correlation(output_dir, alpha_div, md)
def test_nan_metadata(self):
alpha_div = pd.Series([2.0, 4.0, 6.0], name='alpha-div',
index=['sample1', 'sample2', 'sample3'])
md = qiime2.Metadata(
pd.DataFrame({'value': ['1.0', '2.0', '']},
index=['sample1', 'sample2', 'sample3']))
with tempfile.TemporaryDirectory() as output_dir:
alpha_correlation(output_dir, alpha_div, md)
index_fp = os.path.join(output_dir, 'index.html')
self.assertTrue(os.path.exists(index_fp))
jsonp_fp = os.path.join(output_dir, 'category-value.jsonp')
self.assertTrue(os.path.exists(jsonp_fp))
self.assertTrue('"filtered": 2' in open(jsonp_fp).read())
self.assertTrue('"initial": 3' in open(jsonp_fp).read())
def test_extra_metadata(self):
alpha_div = pd.Series([2.0, 4.0, 6.0], name='alpha-div',
index=['sample1', 'sample2', 'sample3'])
md = qiime2.Metadata(
pd.DataFrame({'value': ['1.0', '2.0', '3.0', '4.0']},
index=['sample1', 'sample2', 'sample3', 'sample4']))
with tempfile.TemporaryDirectory() as output_dir:
alpha_correlation(output_dir, alpha_div, md)
index_fp = os.path.join(output_dir, 'index.html')
self.assertTrue(os.path.exists(index_fp))
jsonp_fp = os.path.join(output_dir, 'category-value.jsonp')
self.assertTrue(os.path.exists(jsonp_fp))
self.assertTrue('"sampleSize": 3' in open(jsonp_fp).read())
def test_extra_alpha_div(self):
alpha_div = pd.Series([2.0, 4.0, 6.0, 8.0], name='alpha-div',
index=['sample1', 'sample2', 'sample3',
'sample4'])
md = qiime2.Metadata(
pd.DataFrame({'value': ['1.0', '2.0', '3.0']},
index=['sample1', 'sample2', 'sample3']))
with tempfile.TemporaryDirectory() as output_dir:
alpha_correlation(output_dir, alpha_div, md)
index_fp = os.path.join(output_dir, 'index.html')
self.assertTrue(os.path.exists(index_fp))
jsonp_fp = os.path.join(output_dir, 'category-value.jsonp')
self.assertTrue(os.path.exists(jsonp_fp))
self.assertTrue('"sampleSize": 3' in open(jsonp_fp).read())
class AlphaGroupSignificanceTests(unittest.TestCase):
def test_alpha_group_significance(self):
alpha_div = pd.Series([2.0, 4.0, 6.0], name='alpha-div',
index=['sample1', 'sample2', 'sample3'])
md = qiime2.Metadata(
pd.DataFrame({'a or b': ['a', 'b', 'b']},
index=['sample1', 'sample2', 'sample3']))
with tempfile.TemporaryDirectory() as output_dir:
alpha_group_significance(output_dir, alpha_div, md)
index_fp = os.path.join(output_dir, 'index.html')
self.assertTrue(os.path.exists(index_fp))
self.assertTrue(os.path.exists(
os.path.join(output_dir,
'category-a%20or%20b.jsonp')))
self.assertTrue('Kruskal-Wallis (all groups)'
in open(index_fp).read())
self.assertTrue('Kruskal-Wallis (pairwise)'
in open(index_fp).read())
def test_alpha_group_significance_some_numeric(self):
alpha_div = pd.Series([2.0, 4.0, 6.0], name='alpha-div',
index=['sample1', 'sample2', 'sample3'])
md = qiime2.Metadata(
pd.DataFrame({'a or b': ['a', 'b', 'b'],
'bad': ['1.0', '2.0', '3.0']},
index=['sample1', 'sample2', 'sample3']))
with tempfile.TemporaryDirectory() as output_dir:
alpha_group_significance(output_dir, alpha_div, md)
index_fp = os.path.join(output_dir, 'index.html')
self.assertTrue(os.path.exists(index_fp))
self.assertTrue(os.path.exists(
os.path.join(output_dir,
'category-a%20or%20b.jsonp')))
self.assertFalse(os.path.exists(
os.path.join(output_dir,
'bad-value.jsonp')))
self.assertTrue('not categorical:' in open(index_fp).read())
self.assertTrue('<strong>bad' in open(index_fp).read())
def test_alpha_group_significance_one_group_all_unique_values(self):
alpha_div = pd.Series([2.0, 4.0, 6.0], name='alpha-div',
index=['sample1', 'sample2', 'sample3'])
md = qiime2.Metadata(
pd.DataFrame({'a or b': ['a', 'b', 'b'],
'bad': ['x', 'y', 'z']},
index=['sample1', 'sample2', 'sample3']))
with tempfile.TemporaryDirectory() as output_dir:
alpha_group_significance(output_dir, alpha_div, md)
index_fp = os.path.join(output_dir, 'index.html')
self.assertTrue(os.path.exists(index_fp))
self.assertTrue(os.path.exists(
os.path.join(output_dir,
'category-a%20or%20b.jsonp')))
self.assertFalse(os.path.exists(
os.path.join(output_dir,
'category-bad.jsonp')))
self.assertTrue('number of samples' in open(index_fp).read())
self.assertTrue('<strong>bad' in open(index_fp).read())
def test_alpha_group_significance_one_group_single_value(self):
alpha_div = pd.Series([2.0, 4.0, 6.0], name='alpha-div',
index=['sample1', 'sample2', 'sample3'])
md = qiime2.Metadata(
pd.DataFrame({'a or b': ['a', 'b', 'b'],
'bad': ['x', 'x', 'x']},
index=['sample1', 'sample2', 'sample3']))
with tempfile.TemporaryDirectory() as output_dir:
alpha_group_significance(output_dir, alpha_div, md)
index_fp = os.path.join(output_dir, 'index.html')
self.assertTrue(os.path.exists(index_fp))
self.assertTrue(os.path.exists(
os.path.join(output_dir,
'category-a%20or%20b.jsonp')))
self.assertFalse(os.path.exists(
os.path.join(output_dir,
'category-bad.jsonp')))
self.assertTrue('only a single' in open(index_fp).read())
self.assertTrue('<strong>bad' in open(index_fp).read())
def test_alpha_group_significance_KW_value_error(self):
alpha_div = pd.Series([2.0, 2.0, 3.0, 2.0], name='alpha-div',
index=['sample1', 'sample2', 'sample3',
'sample4'])
md = qiime2.Metadata(
pd.DataFrame({'x': ['a', 'b', 'b', 'c']},
index=['sample1', 'sample2', 'sample3', 'sample4']))
with tempfile.TemporaryDirectory() as output_dir:
alpha_group_significance(output_dir, alpha_div, md)
index_fp = os.path.join(output_dir, 'index.html')
self.assertTrue(os.path.exists(index_fp))
self.assertTrue(os.path.exists(
os.path.join(output_dir,
'category-x.jsonp')))
self.assertTrue('pairwise group comparisons have been omitted'
in open(index_fp).read())
self.assertTrue('x:c (n=1) vs x:a (n=1)' in open(index_fp).read())
def test_alpha_group_significance_numeric_only(self):
alpha_div = pd.Series([2.0, 4.0, 6.0], name='alpha-div',
index=['sample1', 'sample2', 'sample3'])
md = qiime2.Metadata(
pd.DataFrame({'value': ['1.0', '2.0', '3.0']},
index=['sample1', 'sample2', 'sample3']))
with tempfile.TemporaryDirectory() as output_dir:
with self.assertRaisesRegex(ValueError, 'Only numeric'):
alpha_group_significance(output_dir, alpha_div, md)
def test_alpha_group_significance_single_quote(self):
alpha_div = pd.Series([2.0, 4.0, 6.0], name='alpha-div',
index=['sample1', 'sample2', 'sample3'])
md = qiime2.Metadata(
pd.DataFrame({'a or b': ['a', "b'", 'b']},
index=['sample1', 'sample2', 'sample3']))
with tempfile.TemporaryDirectory() as output_dir:
alpha_group_significance(output_dir, alpha_div, md)
index_fp = os.path.join(output_dir, 'index.html')
self.assertTrue("\'" in open(index_fp).read())
| 47.408824
| 79
| 0.529872
| 1,874
| 16,119
| 4.417289
| 0.094984
| 0.056535
| 0.064267
| 0.087944
| 0.849601
| 0.835468
| 0.816985
| 0.786663
| 0.771805
| 0.737859
| 0
| 0.038834
| 0.30827
| 16,119
| 339
| 80
| 47.548673
| 0.703587
| 0.031392
| 0
| 0.70922
| 0
| 0.010638
| 0.131979
| 0.00782
| 0
| 0
| 0
| 0
| 0.205674
| 1
| 0.08156
| false
| 0
| 0.039007
| 0
| 0.131206
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3cd02a7259d9a49cead36e36c9d53c8f61230878
| 8,651
|
py
|
Python
|
utils/scripts/OOOlevelGen/src/sprite_templates/CaterpillarVersion2.py
|
fullscreennl/bullettime
|
8967449cdf926aaed6bb7ec217d92e0689fb0c3c
|
[
"MIT"
] | null | null | null |
utils/scripts/OOOlevelGen/src/sprite_templates/CaterpillarVersion2.py
|
fullscreennl/bullettime
|
8967449cdf926aaed6bb7ec217d92e0689fb0c3c
|
[
"MIT"
] | null | null | null |
utils/scripts/OOOlevelGen/src/sprite_templates/CaterpillarVersion2.py
|
fullscreennl/bullettime
|
8967449cdf926aaed6bb7ec217d92e0689fb0c3c
|
[
"MIT"
] | null | null | null |
import MonsterBuilder
from sprites import *
def create(lb,xpos):
xml = """
<level>
<!-- BEGIN Monster construction -->
<sprite shape="rect" type="Enemy.EnemySprite" x="115" y="19" width="46" height="24" angle="0" restitution="0.2" static="false" friction="0.5" density="5" setName="AcceledCaterpillar3" sheet="6" firstframe="caterpillar_body.png" classname="AcceledCaterpillarLimb" spritedata="AcceledCaterpillar" groupIndex="1"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="145" y="19" width="46" height="24" angle="0" restitution="0.2" static="false" friction="0.5" density="5" setName="AcceledCaterpillar5" sheet="6" firstframe="caterpillar_body.png" classname="AcceledCaterpillarLimb" spritedata="AcceledCaterpillar" groupIndex="-1"/>
<sprite shape="circ" type="Enemy.EnemySprite" x="100" y="19" width="38" height="38" angle="0" restitution="0.2" static="false" friction="0.7" density="3" sheet="6" firstframe="caterpillar_head.png" setName="AcceledCaterpillar1" classname="AcceledCaterpillarBrain" spritedata="AcceledCaterpillar" groupIndex="-1"/>
<sprite shape="circ" type="Enemy.EnemySprite" x="130" y="19" width="38" height="38" angle="0" restitution="0.2" static="false" friction="0.7" density="5" sheet="6" firstframe="caterpillar_body.png" setName="AcceledCaterpillar2" classname="AcceledCaterpillarLimb" spritedata="AcceledCaterpillar" groupIndex="-1"/>
<sprite type="Joints.RevoluteJoint" id="6" body1="AcceledCaterpillar1" body2="AcceledCaterpillar3" motor_speed="-5.0" torque="1000.0" enable_motor="true" lower_angle="12" upper_angle="45" enable_limit="false" collide_connected="false" bx="104" by="16" b2_Xoffset="-11" b2_Yoffset="-3" ax="100" ay="19" b1_Xoffset="0" b1_Yoffset="0"/>
<sprite type="Joints.RevoluteJoint" id="7" body1="AcceledCaterpillar2" body2="AcceledCaterpillar3" motor_speed="-5.0" torque="1000.0" enable_motor="true" lower_angle="12" upper_angle="45" enable_limit="false" collide_connected="false" bx="110" by="16" b2_Xoffset="-5" b2_Yoffset="-3" ax="130" ay="19" b1_Xoffset="0" b1_Yoffset="0"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="175" y="19" width="46" height="24" angle="0" restitution="0.2" static="false" friction="0.5" density="5" setName="AcceledCaterpillar6" sheet="6" firstframe="caterpillar_body.png" classname="AcceledCaterpillarLimb" spritedata="AcceledCaterpillar" groupIndex="-1"/>
<!-- schakel -->
<sprite shape="circ" type="Enemy.EnemySprite" x="160" y="19" width="38" height="38" angle="0" restitution="0.2" static="false" friction="0.5" density="3" sheet="6" firstframe="caterpillar_body.png" setName="AcceledCaterpillar4" classname="AcceledCaterpillarLimb" spritedata="AcceledCaterpillar" groupIndex="-1"/>
<!--connection joint -->
<sprite type="Joints.RevoluteJoint" id="8" body1="AcceledCaterpillar3" body2="AcceledCaterpillar5" motor_speed="0.0" torque="0.0" enable_motor="false" lower_angle="12" upper_angle="45" enable_limit="false" collide_connected="false" bx="110" by="16" b2_Xoffset="-35" b2_Yoffset="-3" ax="130" ay="19" b1_Xoffset="15" b1_Yoffset="0"/>
<!--wheel joint -->
<sprite type="Joints.RevoluteJoint" id="9" body1="AcceledCaterpillar4" body2="AcceledCaterpillar5" motor_speed="-5.0" torque="1000.0" enable_motor="true" lower_angle="12" upper_angle="45" enable_limit="false" collide_connected="false" bx="140" by="16" b2_Xoffset="-5" b2_Yoffset="-3" ax="160" ay="19" b1_Xoffset="0" b1_Yoffset="0"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="205" y="19" width="46" height="24" angle="0" restitution="0.2" static="false" friction="0.5" density="5" setName="AcceledCaterpillar8" sheet="6" firstframe="caterpillar_body.png" classname="AcceledCaterpillarLimb" spritedata="AcceledCaterpillar" groupIndex="-1"/>
<!-- schakel -->
<sprite shape="circ" type="Enemy.EnemySprite" x="190" y="19" width="38" height="38" angle="0" restitution="0.2" static="false" friction="0.7" density="3" sheet="6" firstframe="caterpillar_body.png" setName="AcceledCaterpillar7" classname="AcceledCaterpillarLimb" spritedata="AcceledCaterpillar" groupIndex="-1"/>
<!--connection joint -->
<sprite type="Joints.RevoluteJoint" id="10" body1="AcceledCaterpillar5" body2="AcceledCaterpillar6" motor_speed="0.0" torque="0.0" enable_motor="false" lower_angle="12" upper_angle="45" enable_limit="false" collide_connected="false" bx="140" by="16" b2_Xoffset="-35" b2_Yoffset="-3" ax="170" ay="19" b1_Xoffset="15" b1_Yoffset="0"/>
<!--wheel joint -->
<sprite type="Joints.RevoluteJoint" id="11" body1="AcceledCaterpillar7" body2="AcceledCaterpillar6" motor_speed="-5.0" torque="1000.0" enable_motor="true" lower_angle="12" upper_angle="45" enable_limit="false" collide_connected="false" bx="170" by="16" b2_Xoffset="-5" b2_Yoffset="-3" ax="190" ay="19" b1_Xoffset="0" b1_Yoffset="0"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="235" y="19" width="46" height="24" angle="0" restitution="0.2" static="false" friction="0.5" density="5" setName="AcceledCaterpillar10" sheet="6" firstframe="caterpillar_body.png" classname="AcceledCaterpillarLimb" spritedata="AcceledCaterpillar" groupIndex="-1"/>
<!-- schakel -->
<sprite shape="circ" type="Enemy.EnemySprite" x="220" y="19" width="38" height="38" angle="0" restitution="0.2" static="false" friction="0.7" density="3" sheet="6" firstframe="caterpillar_body.png" setName="AcceledCaterpillar9" classname="AcceledCaterpillarLimb" spritedata="AcceledCaterpillar" groupIndex="-1"/>
<!--connection joint -->
<sprite type="Joints.RevoluteJoint" id="12" body1="AcceledCaterpillar6" body2="AcceledCaterpillar8" motor_speed="0.0" torque="0.0" enable_motor="false" lower_angle="12" upper_angle="45" enable_limit="false" collide_connected="false" bx="170" by="16" b2_Xoffset="-35" b2_Yoffset="-3" ax="200" ay="19" b1_Xoffset="15" b1_Yoffset="0"/>
<!--wheel joint -->
<sprite type="Joints.RevoluteJoint" id="13" body1="AcceledCaterpillar9" body2="AcceledCaterpillar8" motor_speed="-5.0" torque="1000.0" enable_motor="true" lower_angle="12" upper_angle="45" enable_limit="false" collide_connected="false" bx="200" by="16" b2_Xoffset="-5" b2_Yoffset="-3" ax="220" ay="19" b1_Xoffset="0" b1_Yoffset="0"/>
<sprite shape="rect" type="Enemy.EnemySprite" x="265" y="19" width="46" height="24" angle="0" restitution="0.2" static="false" friction="0.5" density="5" setName="AcceledCaterpillar12" sheet="6" firstframe="caterpillar_body.png" classname="AcceledCaterpillarLimb" spritedata="AcceledCaterpillar" groupIndex="-1"/>
<!-- schakel -->
<sprite shape="circ" type="Enemy.EnemySprite" x="250" y="19" width="38" height="38" angle="0" restitution="0.2" static="false" friction="0.7" density="3" sheet="6" firstframe="caterpillar_body.png" setName="AcceledCaterpillar11" classname="AcceledCaterpillarLimb" spritedata="AcceledCaterpillar" groupIndex="-1"/>
<!--connection joint -->
<sprite type="Joints.RevoluteJoint" id="13" body1="AcceledCaterpillar8" body2="AcceledCaterpillar10" motor_speed="0.0" torque="0.0" enable_motor="false" lower_angle="12" upper_angle="45" enable_limit="false" collide_connected="false" bx="200" by="16" b2_Xoffset="-35" b2_Yoffset="-3" ax="230" ay="19" b1_Xoffset="15" b1_Yoffset="0"/>
<!--wheel joint -->
<sprite type="Joints.RevoluteJoint" id="14" body1="AcceledCaterpillar11" body2="AcceledCaterpillar10" motor_speed="-5.0" torque="1000.0" enable_motor="true" lower_angle="12" upper_angle="45" enable_limit="false" collide_connected="false" bx="230" by="16" b2_Xoffset="-5" b2_Yoffset="-3" ax="250" ay="19" b1_Xoffset="0" b1_Yoffset="0"/>
<!-- schakel -->
<sprite shape="circ" type="Enemy.EnemySprite" x="280" y="19" width="38" height="38" angle="0" restitution="0.2" static="false" friction="0.7" density="3" sheet="6" firstframe="caterpillar_body.png" setName="AcceledCaterpillar13" classname="AcceledCaterpillarLimb" spritedata="AcceledCaterpillar" groupIndex="-1"/>
<!--connection joint -->
<sprite type="Joints.RevoluteJoint" id="15" body1="AcceledCaterpillar10" body2="AcceledCaterpillar12" motor_speed="0.0" torque="0.0" enable_motor="false" lower_angle="12" upper_angle="45" enable_limit="false" collide_connected="false" bx="230" by="16" b2_Xoffset="-35" b2_Yoffset="-3" ax="260" ay="19" b1_Xoffset="15" b1_Yoffset="0"/>
<!--wheel joint -->
<sprite type="Joints.RevoluteJoint" id="16" body1="AcceledCaterpillar13" body2="AcceledCaterpillar12" motor_speed="-5.0" torque="1000.0" enable_motor="true" lower_angle="12" upper_angle="45" enable_limit="false" collide_connected="false" bx="260" by="16" b2_Xoffset="-5" b2_Yoffset="-3" ax="280" ay="19" b1_Xoffset="0" b1_Yoffset="0"/>
<!-- END Monster construction -->
</level>
"""
MonsterBuilder.createFromXMLString(lb,xpos,xml)
| 120.152778
| 335
| 0.735869
| 1,192
| 8,651
| 5.229027
| 0.107383
| 0.022942
| 0.041713
| 0.043799
| 0.824483
| 0.816461
| 0.816461
| 0.787903
| 0.771057
| 0.732713
| 0
| 0.081514
| 0.068316
| 8,651
| 71
| 336
| 121.84507
| 0.691811
| 0
| 0
| 0.3
| 0
| 0.5
| 0.983817
| 0.375332
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02
| false
| 0
| 0.04
| 0
| 0.06
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
595741fe847ce7070b534f2b84c09c345a58fa08
| 261
|
py
|
Python
|
scrapers/regex.py
|
jhabarsingh/CODECHEF-APP
|
2d907710b8acde0ed51d4b4b2f486fffa4147ab9
|
[
"MIT"
] | 2
|
2021-05-03T14:54:59.000Z
|
2022-01-07T19:03:15.000Z
|
scrapers/regex.py
|
jhabarsingh/CODECHEF-APP
|
2d907710b8acde0ed51d4b4b2f486fffa4147ab9
|
[
"MIT"
] | null | null | null |
scrapers/regex.py
|
jhabarsingh/CODECHEF-APP
|
2d907710b8acde0ed51d4b4b2f486fffa4147ab9
|
[
"MIT"
] | null | null | null |
str = '{"code":"JAN21C","getyear":"2021","getmonth":"1","getday":"11","reason":null,"penalised_in":null,"rating":"1524","rank":"12734","name":"January Challenge 2021 Division 3","end_date":"2021-01-11 15:00:04","color":"#1E7D22"}'
def get_str():
return str
| 37.285714
| 230
| 0.655172
| 39
| 261
| 4.307692
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.167347
| 0.061303
| 261
| 6
| 231
| 43.5
| 0.518367
| 0
| 0
| 0
| 0
| 0.333333
| 0.853846
| 0.753846
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
59586e2cd7f4f28f5759971afc1df8a015d305a7
| 231
|
py
|
Python
|
rewardify/backends/__init__.py
|
the16thpythonist/rewardify-base
|
4740ec78b22fb6b9d1a62697c8592e5830a38ac6
|
[
"BSD-3-Clause"
] | null | null | null |
rewardify/backends/__init__.py
|
the16thpythonist/rewardify-base
|
4740ec78b22fb6b9d1a62697c8592e5830a38ac6
|
[
"BSD-3-Clause"
] | null | null | null |
rewardify/backends/__init__.py
|
the16thpythonist/rewardify-base
|
4740ec78b22fb6b9d1a62697c8592e5830a38ac6
|
[
"BSD-3-Clause"
] | null | null | null |
from rewardify.backends.base import *
from rewardify.backends.forest import *
from rewardify.backends.goals import GoalBackend
from rewardify.backends.combine import combine_backends
from rewardify.backends.mock import MockBackend
| 38.5
| 55
| 0.861472
| 29
| 231
| 6.827586
| 0.37931
| 0.328283
| 0.530303
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08658
| 231
| 5
| 56
| 46.2
| 0.938389
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
5976a2fb16dcec68f7d53a548cebb15c20ed70be
| 2,575
|
py
|
Python
|
test.py
|
MaxTheMooshroom/air-bnb-optimal-pricer
|
c47b79de38b36f560fa7f15f51d30f4ea915440c
|
[
"MIT"
] | null | null | null |
test.py
|
MaxTheMooshroom/air-bnb-optimal-pricer
|
c47b79de38b36f560fa7f15f51d30f4ea915440c
|
[
"MIT"
] | null | null | null |
test.py
|
MaxTheMooshroom/air-bnb-optimal-pricer
|
c47b79de38b36f560fa7f15f51d30f4ea915440c
|
[
"MIT"
] | null | null | null |
temp = ['neighbourhood_group_cleansed_Mitte', 'neighbourhood_group_cleansed_Friedrichshain-Kreuzberg', 'neighbourhood_group_cleansed_Pankow', 'neighbourhood_group_cleansed_Tempelhof - Schöneberg', 'neighbourhood_group_cleansed_Steglitz - Zehlendorf', 'neighbourhood_group_cleansed_Neukölln', 'neighbourhood_group_cleansed_Spandau', 'neighbourhood_group_cleansed_Charlottenburg-Wilm.', 'neighbourhood_group_cleansed_Lichtenberg', 'neighbourhood_group_cleansed_Reinickendorf', 'neighbourhood_group_cleansed_Treptow - Köpenick', 'neighbourhood_group_cleansed_Marzahn - Hellersdorf', 'room_type_Private room', 'room_type_Entire home/apt', 'room_type_Shared room', 'accommodates', 'bathrooms', 'bedrooms', 'beds', 'bed_type_Real Bed', 'bed_type_Couch', 'bed_type_Futon', 'bed_type_Pull-out Sofa', 'bed_type_Airbed', 'security_deposit', 'cleaning_fee', 'minimum_nights', 'Washer', 'Hair dryer', 'Laptop friendly workspace', 'Hangers', 'Iron', 'Shampoo', 'TV', 'Hot water', 'Family/kid friendly', 'Internet', 'Host greets you', 'Smoke detector', 'Buzzer/wireless intercom', 'Lock on bedroom door', 'Free street parking', 'Elevator', 'Bed linens', 'Smoking allowed', 'First aid kit', 'Cable TV']
temp2 = ['accommodates', 'bedrooms', 'bathrooms', 'beds', 'security_deposit', 'cleaning_fee', 'minimum_nights', 'bed_type_Real Bed', 'bed_type_Couch', 'bed_type_Futon', 'bed_type_Pull-out Sofa', 'bed_type_Airbed', 'room_type_Private room', 'room_type_Entire home/apt', 'room_type_Shared room', 'neighbourhood_group_cleansed_Mitte', 'neighbourhood_group_cleansed_Friedrichshain-Kreuzberg', 'neighbourhood_group_cleansed_Pankow', 'neighbourhood_group_cleansed_Tempelhof - Schöneberg', 'neighbourhood_group_cleansed_Steglitz - Zehlendorf', 'neighbourhood_group_cleansed_Neukölln', 'neighbourhood_group_cleansed_Spandau', 'neighbourhood_group_cleansed_Charlottenburg-Wilm.', 'neighbourhood_group_cleansed_Lichtenberg', 'neighbourhood_group_cleansed_Reinickendorf', 'neighbourhood_group_cleansed_Treptow - Köpenick', 'neighbourhood_group_cleansed_Marzahn - Hellersdorf', 'Washer', 'Hair dryer', 'Laptop friendly workspace', 'Hangers', 'Iron', 'Shampoo', 'TV', 'Hot water', 'Family/kid friendly', 'Internet', 'Host greets you', 'Smoke detector', 'Buzzer/wireless intercom', 'Lock on bedroom door', 'Free street parking', 'Elevator', 'Bed linens', 'Smoking allowed', 'First aid kit', 'Cable TV']
res = []
res2 = []
for var in temp:
if var not in temp2:
res.append(var)
for var in temp2:
if var not in temp:
res2.append(var)
for tup in zip(res, res2):
print(tup)
| 160.9375
| 1,188
| 0.784078
| 308
| 2,575
| 6.198052
| 0.314935
| 0.226296
| 0.326873
| 0.032478
| 0.902043
| 0.902043
| 0.861184
| 0.861184
| 0.861184
| 0.861184
| 0
| 0.002554
| 0.087767
| 2,575
| 16
| 1,189
| 160.9375
| 0.810132
| 0
| 0
| 0
| 0
| 0
| 0.770186
| 0.367236
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.083333
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
598f38ae8146b2b09c163b9f56fdfb014a3d3f3a
| 212
|
py
|
Python
|
rasa_contrib/nlu/extractors/__init__.py
|
howl-anderson/rasa_contrib
|
fea3b818a343f1458d7cf15a4d9063464a304b19
|
[
"Apache-2.0"
] | 17
|
2019-07-02T05:27:33.000Z
|
2021-11-21T08:03:51.000Z
|
rasa_contrib/nlu/extractors/__init__.py
|
howl-anderson/rasa_nlu_addons
|
fea3b818a343f1458d7cf15a4d9063464a304b19
|
[
"Apache-2.0"
] | 13
|
2019-12-23T18:15:45.000Z
|
2022-03-11T23:50:37.000Z
|
rasa_contrib/nlu/extractors/__init__.py
|
howl-anderson/rasa_nlu_addons
|
fea3b818a343f1458d7cf15a4d9063464a304b19
|
[
"Apache-2.0"
] | 3
|
2019-09-10T08:42:33.000Z
|
2020-10-19T15:48:52.000Z
|
from rasa_contrib.nlu.extractors.bilstm_crf_tf_entity_extractor import BilstmCrfTensorFlowEntityExtractor
from rasa_contrib.nlu.extractors.bilstm_crf_paddle_entity_extractor import BilstmCrfPaddleEntityExtractor
| 70.666667
| 105
| 0.933962
| 24
| 212
| 7.833333
| 0.583333
| 0.085106
| 0.159574
| 0.191489
| 0.393617
| 0.393617
| 0.393617
| 0
| 0
| 0
| 0
| 0
| 0.037736
| 212
| 2
| 106
| 106
| 0.921569
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
59aa0b8c7974f7b645e4356bb694e59eb83923ff
| 66,058
|
py
|
Python
|
src/genie/libs/parser/junos/tests/ShowInterfaces/cli/equal/golden_output_1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/junos/tests/ShowInterfaces/cli/equal/golden_output_1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/junos/tests/ShowInterfaces/cli/equal/golden_output_1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output={
"interface-information": {
"physical-interface": [
{
"active-alarms": {
"interface-alarms": {
"alarm-not-present": True
}
},
"active-defects": {
"interface-alarms": {
"alarm-not-present": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "00:50:56:ff:56:b6",
"description": "TEST-DESC:1|TEST#1234 DEV",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "00:50:56:ff:56:b6",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"interface-flapped": {
"#text": "2019-08-29 09:09:19 UTC (29w6d 18:56 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"local-index": "148",
"logical-interface": [
{
"address-family": [],
"encapsulation": "ENET2",
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True,
"internal-flags": "0x4004000"
},
"local-index": "333",
"name": "ge-0/0/0.0",
"snmp-index": "606",
"traffic-statistics": {
"input-packets": "133657033",
"output-packets": "129243982"
}
}
],
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/0",
"oper-status": "Up",
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"snmp-index": "526",
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"traffic-statistics": {
"input-bps": "2952",
"input-pps": "5",
"output-bps": "3080",
"output-pps": "3"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"local-index": "145",
"logical-interface": [
{
"address-family": [],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "329",
"logical-interface-bandwidth": "0",
"name": "lc-0/0/0.32769",
"snmp-index": "520",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
}
],
"name": "lc-0/0/0",
"oper-status": "Up",
"snmp-index": "519",
"speed": "800mbps",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"local-index": "147",
"logical-interface": [
{
"address-family": [],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "330",
"logical-interface-bandwidth": "0",
"name": "pfe-0/0/0.16383",
"snmp-index": "523",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
}
],
"name": "pfe-0/0/0",
"oper-status": "Up",
"snmp-index": "522",
"speed": "800mbps",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"local-index": "146",
"logical-interface": [
{
"address-family": [],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "331",
"logical-interface-bandwidth": "0",
"name": "pfh-0/0/0.16383",
"snmp-index": "524",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"address-family": [],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "332",
"logical-interface-bandwidth": "0",
"name": "pfh-0/0/0.16384",
"snmp-index": "525",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
}
],
"name": "pfh-0/0/0",
"oper-status": "Up",
"snmp-index": "521",
"speed": "800mbps",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"active-alarms": {
"interface-alarms": {
"alarm-not-present": True
}
},
"active-defects": {
"interface-alarms": {
"alarm-not-present": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "00:50:56:ff:37:f9",
"description": "YW7079/9.6G/BB/sjkGDS221-EC11_xe-0/1/5[SJC]_Area8_Cost100",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "00:50:56:ff:37:f9",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"interface-flapped": {
"#text": "2019-08-29 09:09:19 UTC (29w6d 18:56 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"local-index": "149",
"logical-interface": [
{
"address-family": [],
"encapsulation": "ENET2",
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True,
"internal-flags": "0x4004000"
},
"local-index": "334",
"name": "ge-0/0/1.0",
"snmp-index": "605",
"traffic-statistics": {
"input-packets": "376821627",
"output-packets": "370477594"
}
}
],
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/1",
"oper-status": "Up",
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"snmp-index": "527",
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"traffic-statistics": {
"input-bps": "3696",
"input-pps": "6",
"output-bps": "7736",
"output-pps": "9"
}
},
{
"active-alarms": {
"interface-alarms": {
"alarm-not-present": True
}
},
"active-defects": {
"interface-alarms": {
"alarm-not-present": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "00:50:56:ff:1e:ba",
"description": "ve-hkgasr01_Gi2[DefaultCost1000]",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "00:50:56:ff:1e:ba",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"interface-flapped": {
"#text": "2020-03-05 16:04:34 UTC (2w6d 12:00 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"local-index": "150",
"logical-interface": [
{
"address-family": [],
"encapsulation": "ENET2",
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True,
"internal-flags": "0x4004000"
},
"local-index": "336",
"name": "ge-0/0/2.0",
"snmp-index": "536",
"traffic-statistics": {
"input-packets": "210359939",
"output-packets": "222589463"
}
}
],
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/2",
"oper-status": "Up",
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"snmp-index": "528",
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"traffic-statistics": {
"input-bps": "928",
"input-pps": "1",
"output-bps": "800",
"output-pps": "0"
}
},
{
"active-alarms": {
"interface-alarms": {
"alarm-not-present": True
}
},
"active-defects": {
"interface-alarms": {
"alarm-not-present": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "00:50:56:ff:93:cb",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "00:50:56:ff:93:cb",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"interface-flapped": {
"#text": "2019-10-25 08:50:18 UTC (21w5d 19:15 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"local-index": "151",
"logical-interface": [
{
"address-family": [],
"encapsulation": "ENET2",
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True,
"internal-flags": "0x4004000"
},
"local-index": "335",
"name": "ge-0/0/3.0",
"snmp-index": "537",
"traffic-statistics": {
"input-packets": "14609",
"output-packets": "17416"
}
}
],
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/3",
"oper-status": "Up",
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"snmp-index": "529",
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0"
}
},
{
"active-alarms": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"active-defects": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "00:50:56:ff:3e:28",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "00:50:56:ff:3e:28",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-hardware-down": True,
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-down": True,
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"interface-flapped": {
"#text": "2019-08-29 09:09:20 UTC (29w6d 18:55 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"local-index": "152",
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/4",
"oper-status": "Down",
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"snmp-index": "530",
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0"
}
},
{
"active-alarms": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"active-defects": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "2c:6b:f5:ff:01:1d",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "2c:6b:f5:ff:01:1d",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-hardware-down": True,
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-down": True,
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"interface-flapped": {
"#text": "2019-08-29 09:09:20 UTC (29w6d 18:55 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"local-index": "153",
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/5",
"oper-status": "Down",
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"snmp-index": "531",
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0"
}
},
{
"active-alarms": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"active-defects": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "2c:6b:f5:ff:01:1e",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "2c:6b:f5:ff:01:1e",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-hardware-down": True,
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-down": True,
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"interface-flapped": {
"#text": "2019-08-29 09:09:20 UTC (29w6d 18:55 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"local-index": "154",
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/6",
"oper-status": "Down",
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"snmp-index": "532",
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0"
}
},
{
"active-alarms": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"active-defects": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "2c:6b:f5:ff:01:1f",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "2c:6b:f5:ff:01:1f",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-hardware-down": True,
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-down": True,
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"interface-flapped": {
"#text": "2019-08-29 09:09:20 UTC (29w6d 18:55 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"local-index": "155",
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/7",
"oper-status": "Down",
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"snmp-index": "533",
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0"
}
},
{
"active-alarms": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"active-defects": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "2c:6b:f5:ff:01:20",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "2c:6b:f5:ff:01:20",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-hardware-down": True,
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-down": True,
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"interface-flapped": {
"#text": "2019-08-29 09:09:20 UTC (29w6d 18:55 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"local-index": "156",
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/8",
"oper-status": "Down",
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"snmp-index": "534",
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0"
}
},
{
"active-alarms": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"active-defects": {
"interface-alarms": {
"ethernet-alarm-link-down": True
}
},
"admin-status": {
"@junos:format": "Enabled"
},
"bpdu-error": "None",
"current-physical-address": "2c:6b:f5:ff:01:21",
"eth-switch-error": "None",
"ethernet-fec-statistics": {
"fec_ccw_count": "0",
"fec_ccw_error_rate": "0",
"fec_nccw_count": "0",
"fec_nccw_error_rate": "0"
},
"ethernet-pcs-statistics": {
"bit-error-seconds": "0",
"errored-blocks-seconds": "0"
},
"hardware-physical-address": "2c:6b:f5:ff:01:21",
"if-auto-negotiation": "Enabled",
"if-config-flags": {
"iff-hardware-down": True,
"iff-snmp-traps": True,
"internal-flags": "0x4000"
},
"if-device-flags": {
"ifdf-down": True,
"ifdf-present": True,
"ifdf-running": True
},
"if-flow-control": "Enabled",
"if-media-flags": {
"ifmf-none": True
},
"if-remote-fault": "Online",
"interface-flapped": {
"#text": "2019-08-29 09:09:20 UTC (29w6d 18:55 ago)"
},
"interface-transmit-statistics": "Disabled",
"ld-pdu-error": "None",
"link-level-type": "Ethernet",
"local-index": "157",
"loopback": "Disabled",
"mru": "1522",
"mtu": "1514",
"name": "ge-0/0/9",
"oper-status": "Down",
"pad-to-minimum-frame-size": "Disabled",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8"
},
"snmp-index": "535",
"sonet-mode": "LAN-PHY",
"source-filtering": "Disabled",
"speed": "1000mbps",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "2c:6b:f5:ff:01:29",
"hardware-physical-address": "2c:6b:f5:ff:01:29",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Ethernet",
"link-type": "Full-Duplex",
"local-index": "129",
"mtu": "9192",
"name": "cbp0",
"oper-status": "Up",
"snmp-index": "501",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-type": "Full-Duplex",
"local-index": "128",
"name": "demux0",
"oper-status": "Up",
"snmp-index": "502",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"local-index": "5",
"name": "dsc",
"oper-status": "Up",
"snmp-index": "5",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "00:50:56:ff:e2:c1",
"hardware-physical-address": "00:50:56:ff:e2:c1",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"interface-flapped": {
"#text": "2019-08-29 09:03:11 UTC (29w6d 19:02 ago)"
},
"link-level-type": "Ethernet",
"local-index": "65",
"logical-interface": [
{
"address-family": [],
"encapsulation": "ENET2",
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True,
"internal-flags": "0x4000000"
},
"local-index": "3",
"name": "em1.0",
"snmp-index": "24",
"traffic-statistics": {
"input-packets": "724625563",
"output-packets": "793953088"
}
}
],
"mtu": "1514",
"name": "em1",
"oper-status": "Up",
"snmp-index": "23",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "VxLAN-Tunnel-Endpoint",
"link-type": "Full-Duplex",
"local-index": "134",
"mtu": "Unlimited",
"name": "esi",
"oper-status": "Up",
"snmp-index": "503",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Flexible-tunnel-Interface",
"link-type": "Full-Duplex",
"local-index": "136",
"mtu": "Unlimited",
"name": "fti0",
"oper-status": "Up",
"snmp-index": "504",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Flexible-tunnel-Interface",
"link-type": "Full-Duplex",
"local-index": "137",
"mtu": "Unlimited",
"name": "fti1",
"oper-status": "Up",
"snmp-index": "505",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Flexible-tunnel-Interface",
"link-type": "Full-Duplex",
"local-index": "138",
"mtu": "Unlimited",
"name": "fti2",
"oper-status": "Up",
"snmp-index": "506",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Flexible-tunnel-Interface",
"link-type": "Full-Duplex",
"local-index": "139",
"mtu": "Unlimited",
"name": "fti3",
"oper-status": "Up",
"snmp-index": "507",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Flexible-tunnel-Interface",
"link-type": "Full-Duplex",
"local-index": "140",
"mtu": "Unlimited",
"name": "fti4",
"oper-status": "Up",
"snmp-index": "508",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Flexible-tunnel-Interface",
"link-type": "Full-Duplex",
"local-index": "141",
"mtu": "Unlimited",
"name": "fti5",
"oper-status": "Up",
"snmp-index": "509",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Flexible-tunnel-Interface",
"link-type": "Full-Duplex",
"local-index": "142",
"mtu": "Unlimited",
"name": "fti6",
"oper-status": "Up",
"snmp-index": "510",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Flexible-tunnel-Interface",
"link-type": "Full-Duplex",
"local-index": "143",
"mtu": "Unlimited",
"name": "fti7",
"oper-status": "Up",
"snmp-index": "511",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "00:50:56:ff:0a:95",
"hardware-physical-address": "00:50:56:ff:0a:95",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"interface-flapped": {
"#text": "2019-08-29 09:03:11 UTC (29w6d 19:02 ago)"
},
"link-level-type": "Ethernet",
"local-index": "64",
"logical-interface": [
{
"address-family": [],
"encapsulation": "ENET2",
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True,
"internal-flags": "0x4000000"
},
"local-index": "4",
"name": "fxp0.0",
"snmp-index": "13",
"traffic-statistics": {
"input-packets": "563129",
"output-packets": "805208"
}
}
],
"mtu": "1514",
"name": "fxp0",
"oper-status": "Up",
"snmp-index": "1",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"link-level-type": "GRE",
"local-index": "10",
"mtu": "Unlimited",
"name": "gre",
"oper-status": "Up",
"snmp-index": "8",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"link-level-type": "IP-over-IP",
"local-index": "11",
"mtu": "Unlimited",
"name": "ipip",
"oper-status": "Up",
"snmp-index": "9",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "2c:6b:f5:ff:08:09",
"hardware-physical-address": "2c:6b:f5:ff:08:09",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Ethernet",
"link-type": "Full-Duplex",
"local-index": "132",
"mtu": "1514",
"name": "irb",
"oper-status": "Up",
"snmp-index": "512",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "2c:6b:f5:ff:08:d8",
"hardware-physical-address": "2c:6b:f5:ff:08:d8",
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Ethernet",
"link-type": "Full-Duplex",
"local-index": "144",
"logical-interface": [
{
"address-family": [],
"encapsulation": "unknown",
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True,
"internal-flags": "0x24004000"
},
"local-index": "325",
"logical-interface-bandwidth": "1Gbps",
"name": "jsrv.1",
"snmp-index": "514",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
}
],
"mtu": "1514",
"name": "jsrv",
"oper-status": "Up",
"snmp-index": "513",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-loopback": True,
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"local-index": "6",
"logical-interface": [
{
"address-family": [],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "320",
"name": "lo0.0",
"snmp-index": "16",
"traffic-statistics": {
"input-packets": "83",
"output-packets": "83"
}
},
{
"address-family": [],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "322",
"name": "lo0.16384",
"snmp-index": "21",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"address-family": [],
"if-config-flags": {
"iff-snmp-traps": True,
"iff-up": True
},
"local-index": "321",
"name": "lo0.16385",
"snmp-index": "22",
"traffic-statistics": {
"input-packets": "33920495",
"output-packets": "33920495"
}
}
],
"name": "lo0",
"oper-status": "Up",
"snmp-index": "6",
"traffic-statistics": {
"input-packets": "33920578",
"output-packets": "33920578"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "LSI",
"local-index": "4",
"mtu": "Unlimited",
"name": "lsi",
"oper-status": "Up",
"snmp-index": "4",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"link-level-type": "GRE",
"local-index": "66",
"mtu": "Unlimited",
"name": "mtun",
"oper-status": "Up",
"snmp-index": "12",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"link-level-type": "PIM-Decapsulator",
"local-index": "26",
"mtu": "Unlimited",
"name": "pimd",
"oper-status": "Up",
"snmp-index": "11",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"link-level-type": "PIM-Encapsulator",
"local-index": "25",
"mtu": "Unlimited",
"name": "pime",
"oper-status": "Up",
"snmp-index": "10",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"current-physical-address": "2c:6b:f5:ff:08:c8",
"hardware-physical-address": "2c:6b:f5:ff:08:c8",
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Ethernet",
"link-type": "Full-Duplex",
"local-index": "130",
"mtu": "9192",
"name": "pip0",
"oper-status": "Up",
"snmp-index": "515",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"link-level-type": "PPPoE",
"link-type": "Full-Duplex",
"local-index": "131",
"mtu": "1532",
"name": "pp0",
"oper-status": "Up",
"snmp-index": "516"
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Remote-BEB",
"link-type": "Full-Duplex",
"local-index": "135",
"mtu": "Unlimited",
"name": "rbeb",
"oper-status": "Up",
"snmp-index": "517",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-config-flags": {
"iff-snmp-traps": True
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "Interface-Specific",
"local-index": "12",
"mtu": "Unlimited",
"name": "tap",
"oper-status": "Up",
"snmp-index": "7",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
},
{
"admin-status": {
"@junos:format": "Enabled"
},
"if-device-flags": {
"ifdf-present": True,
"ifdf-running": True
},
"if-media-flags": {
"ifmf-none": True
},
"interface-flapped": {
"#text": "Never"
},
"link-level-type": "VxLAN-Tunnel-Endpoint",
"link-type": "Full-Duplex",
"local-index": "133",
"mtu": "Unlimited",
"name": "vtep",
"oper-status": "Up",
"snmp-index": "518",
"speed": "Unlimited",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0"
}
}
]
}
}
| 37.048794
| 91
| 0.323579
| 4,484
| 66,058
| 4.743533
| 0.078947
| 0.026328
| 0.055853
| 0.059991
| 0.907005
| 0.873202
| 0.866714
| 0.865538
| 0.847109
| 0.836953
| 0
| 0.049751
| 0.532623
| 66,058
| 1,783
| 92
| 37.048794
| 0.639179
| 0
| 0
| 0.621985
| 0
| 0.000561
| 0.34336
| 0.058584
| 0
| 0
| 0.001877
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
59d8dcdb92e1124657bf34ee82c57a70b3f81c45
| 275
|
py
|
Python
|
src/local/lexers.py
|
RLogik/phpytex
|
4e422a07ec23b4ade5263db499318b3e2c75f1f9
|
[
"MIT"
] | null | null | null |
src/local/lexers.py
|
RLogik/phpytex
|
4e422a07ec23b4ade5263db499318b3e2c75f1f9
|
[
"MIT"
] | 8
|
2021-08-24T12:27:02.000Z
|
2021-10-14T07:50:12.000Z
|
src/local/lexers.py
|
RLogik/phpytex
|
4e422a07ec23b4ade5263db499318b3e2c75f1f9
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# EXPORTS
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
from lark.indenter import Indenter;
from lark import Lark;
from lark import Tree;
| 25
| 66
| 0.36
| 21
| 275
| 4.714286
| 0.619048
| 0.242424
| 0.282828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008065
| 0.098182
| 275
| 10
| 67
| 27.5
| 0.391129
| 0.658182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ab6edc5526517a3553683812fcc251abcdefc132
| 195
|
py
|
Python
|
samcli/lib/utils/profile.py
|
michaelbrewer/aws-sam-cli
|
4248d649aac021150f6141d5a30a0581605d35e4
|
[
"BSD-2-Clause",
"Apache-2.0"
] | 2,959
|
2018-05-08T21:48:56.000Z
|
2020-08-24T14:35:39.000Z
|
samcli/lib/utils/profile.py
|
michaelbrewer/aws-sam-cli
|
4248d649aac021150f6141d5a30a0581605d35e4
|
[
"BSD-2-Clause",
"Apache-2.0"
] | 1,469
|
2018-05-08T22:44:28.000Z
|
2020-08-24T20:19:24.000Z
|
samcli/lib/utils/profile.py
|
michaelbrewer/aws-sam-cli
|
4248d649aac021150f6141d5a30a0581605d35e4
|
[
"BSD-2-Clause",
"Apache-2.0"
] | 642
|
2018-05-08T22:09:19.000Z
|
2020-08-17T09:04:37.000Z
|
"""
Module for aws profile related helpers
"""
from typing import List
from botocore.session import Session
def list_available_profiles() -> List[str]:
return Session().available_profiles
| 17.727273
| 43
| 0.764103
| 25
| 195
| 5.84
| 0.68
| 0.232877
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148718
| 195
| 10
| 44
| 19.5
| 0.879518
| 0.194872
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
abe8b599d5402c367bb7c84b7e370964d8273518
| 593
|
py
|
Python
|
model/loss.py
|
yzxing87/Invertible-ISP
|
344dd333dd2a075f6a9e4ffc445dc387ca3014c4
|
[
"MIT"
] | 246
|
2021-03-20T08:12:53.000Z
|
2022-03-30T02:30:19.000Z
|
model/loss.py
|
FengJunxi/Invertible-ISP
|
6f93fdd5e66397577aa9a6182e1b752af7a2a65d
|
[
"MIT"
] | 14
|
2021-04-09T03:24:39.000Z
|
2022-03-22T06:31:38.000Z
|
model/loss.py
|
FengJunxi/Invertible-ISP
|
6f93fdd5e66397577aa9a6182e1b752af7a2a65d
|
[
"MIT"
] | 25
|
2021-04-06T01:10:54.000Z
|
2022-02-13T13:47:42.000Z
|
import torch.nn.functional as F
import torch
def l1_loss(output, target_rgb, target_raw, weight=1.):
raw_loss = F.l1_loss(output['reconstruct_raw'], target_raw)
rgb_loss = F.l1_loss(output['reconstruct_rgb'], target_rgb)
total_loss = raw_loss + weight * rgb_loss
return total_loss, raw_loss, rgb_loss
def l2_loss(output, target_rgb, target_raw, weight=1.):
raw_loss = F.mse_loss(output['reconstruct_raw'], target_raw)
rgb_loss = F.mse_loss(output['reconstruct_rgb'], target_rgb)
total_loss = raw_loss + weight * rgb_loss
return total_loss, raw_loss, rgb_loss
| 39.533333
| 64
| 0.743676
| 96
| 593
| 4.239583
| 0.208333
| 0.14742
| 0.206388
| 0.157248
| 0.884521
| 0.884521
| 0.859951
| 0.859951
| 0.859951
| 0.658477
| 0
| 0.011881
| 0.148398
| 593
| 15
| 65
| 39.533333
| 0.794059
| 0
| 0
| 0.333333
| 0
| 0
| 0.10101
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.166667
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f9f0b377fe9d75b7a4700cc2d1f1fccfcf81d56d
| 4,694
|
py
|
Python
|
tests/functional/test_lessons.py
|
karlneco/kanji-test-maker
|
de0ae52d8de28fe81be2ec49018acf1ad0f2308e
|
[
"MIT"
] | 2
|
2019-12-11T07:09:57.000Z
|
2020-02-02T23:34:51.000Z
|
tests/functional/test_lessons.py
|
karlneco/kanji-test-maker
|
de0ae52d8de28fe81be2ec49018acf1ad0f2308e
|
[
"MIT"
] | 1
|
2021-10-04T05:59:17.000Z
|
2021-10-04T05:59:17.000Z
|
tests/functional/test_lessons.py
|
karlneco/kanji-test-maker
|
de0ae52d8de28fe81be2ec49018acf1ad0f2308e
|
[
"MIT"
] | null | null | null |
from flask import url_for
import requests
from hktm import db
from hktm.models import User
def test_new_good_lesson(client,auth_user,init_database,authenticated_request):
"""
GIVEN a Flask application
WHEN the user creates a valid new lesson
THEN inform them and go to the edit screen
"""
response = client.post(url_for('root.index'),data=dict(email='testuser@gmail.com',password='password'))
# try to get home
response = client.post(url_for('lessons.add'),data=dict(name='test lesson 1',grade='1'),follow_redirects=True)
assert response.status_code == 200
assert '新しい単元が追加されました。'.encode('utf-8') in response.data #user informed
assert '編集したいプリントを選べます'.encode('utf-8') in response.data #at the edit screen
def test_new_lesson_no_name(client,auth_user,init_database,authenticated_request):
"""
GIVEN a Flask application
WHEN the user tries to add a lesson with no 'name'
THEN display an error message
"""
response = client.post(url_for('root.index'),data=dict(email='testuser@gmail.com',password='password'))
# try to get home
response = client.post(url_for('lessons.add'),data=dict(name='',grade='1'),follow_redirects=True)
assert response.status_code == 200
assert 'この項目は必須です'.encode('utf-8') in response.data #part of error message
def test_lesson_list_empty(client, auth_user, init_database, add_data):
"""
GIVEN a Flask application
WHEN the asks for a list of lessons but the user doesnt have any
THEN display an empty list
"""
response = client.post(url_for('root.index'),data=dict(email='userempty@gmail.com',password='password'))
# try to get home
response = client.get(url_for('lessons.list'))
assert response.status_code == 200
assert b'list-group-item-action disabled' in response.data #part of the table with lessons
def test_lesson_list_1grade(client, auth_user, init_database, add_data):
"""
GIVEN a Flask application
WHEN the asks for a list of lessons the user has 2 grades
THEN display an empty list
"""
response = client.post(url_for('root.index'),data=dict(email='user1@gmail.com',password='password'))
# try to get home
response = client.get(url_for('lessons.list'))
assert response.status_code == 200
#assert 0
assert b'Grade 1 ' in response.data #part of the table with lessons for grade 1
assert b'Grade 2 ' not in response.data #part of the table with lessons for grade 6
assert b'Grade 6 ' not in response.data #part of the table with lessons for grade 6
def test_lesson_list_2grades(client, auth_user, init_database, add_data):
"""
GIVEN a Flask application
WHEN the asks for a list of lessons the user has 2 grades
THEN display an empty list
"""
response = client.post(url_for('root.index'),data=dict(email='user26@gmail.com',password='password'))
# try to get home
response = client.get(url_for('lessons.list'))
assert response.status_code == 200
#assert 0
assert b'Grade 2 ' in response.data #part of the table with lessons for grade 2
assert b'Grade 6 ' in response.data #part of the table with lessons for grade 6
def test_lesson_list_admin(client, auth_user, init_database, add_data):
"""
GIVEN a Flask application
WHEN the asks for a list of lessons the user has 2 grades
THEN display an empty list
"""
response = client.post(url_for('root.index'),data=dict(email='admin@hoshuko.com',password='password'))
# try to get home
response = client.get(url_for('lessons.list'))
assert response.status_code == 200
#assert 0
assert b'Grade 1 ' in response.data #part of the table with lessons for grade 2
assert b'Grade 2 ' in response.data #part of the table with lessons for grade 2
assert b'Grade 6 ' in response.data #part of the table with lessons for grade 6
#TODO: as this is testing input form a drop down its lower priority for now
#def test_new_bad_lesson(client,auth_user,init_database,authenticated_request):
# """
# GIVEN a Flask application
# WHEN the '/users/login or / (index) page is posted to (POST) with valid creds
# THEN login the user
# """
# response = client.post(url_for('root.index'),data=dict(username='testuser@gmail.com',password='password'))
#
# # add a lesson thats not in the users class - this would have to be a hack as its a drop down
# response = client.post(url_for('lessons.add'),data=dict(name='test lesson 1',grade='99'),follow_redirects=True)
# assert response.status_code == 200
# assert b'New Lesson Created' not in response.data
# assert b'You cannot add lessons to that grade' in response.data
| 46.019608
| 117
| 0.713677
| 739
| 4,694
| 4.441137
| 0.185386
| 0.027422
| 0.05972
| 0.063985
| 0.766606
| 0.755027
| 0.73309
| 0.73248
| 0.73248
| 0.692261
| 0
| 0.014906
| 0.185343
| 4,694
| 101
| 118
| 46.475248
| 0.843358
| 0.415637
| 0
| 0.45
| 0
| 0
| 0.172441
| 0.008564
| 0
| 0
| 0
| 0.009901
| 0.45
| 1
| 0.15
| false
| 0.15
| 0.1
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
e61ad3b47ce1946aec92434f328f762716234798
| 5,025
|
py
|
Python
|
esl/school_lunch/migrations/0006_auto_20160222_0438.py
|
thisjustin/eat_school_lunch
|
b01fb02f093373dbbc4e676ab3a6b3e496165cbf
|
[
"MIT"
] | null | null | null |
esl/school_lunch/migrations/0006_auto_20160222_0438.py
|
thisjustin/eat_school_lunch
|
b01fb02f093373dbbc4e676ab3a6b3e496165cbf
|
[
"MIT"
] | null | null | null |
esl/school_lunch/migrations/0006_auto_20160222_0438.py
|
thisjustin/eat_school_lunch
|
b01fb02f093373dbbc4e676ab3a6b3e496165cbf
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-02-22 04:38
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('school_lunch', '0005_child_is_head_start'),
]
operations = [
migrations.AlterModelOptions(
name='child',
options={'verbose_name_plural': 'children'},
),
migrations.AlterModelOptions(
name='coreapplication',
options={'ordering': ('created_date',), 'verbose_name': 'Application'},
),
migrations.AlterField(
model_name='adult',
name='first_name',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='adult',
name='last_name',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='adult',
name='other_income',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='adult',
name='other_income_frequency',
field=models.CharField(blank=True, choices=[('weekly', 'weekly'), ('biweekly', 'biweekly'), ('twice-monthly', 'twice-monthly'), ('monthly', 'monthly')], max_length=15, null=True),
),
migrations.AlterField(
model_name='adult',
name='public_income',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='adult',
name='public_income_frequency',
field=models.CharField(blank=True, choices=[('weekly', 'weekly'), ('biweekly', 'biweekly'), ('twice-monthly', 'twice-monthly'), ('monthly', 'monthly')], max_length=15, null=True),
),
migrations.AlterField(
model_name='adult',
name='work_income',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='adult',
name='work_income_frequency',
field=models.CharField(blank=True, choices=[('weekly', 'weekly'), ('biweekly', 'biweekly'), ('twice-monthly', 'twice-monthly'), ('monthly', 'monthly')], max_length=15, null=True),
),
migrations.AlterField(
model_name='child',
name='is_foster',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='child',
name='is_head_start',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='child',
name='is_homeless_migrant_runaway',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='child',
name='is_student',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='child',
name='middle_initial',
field=models.CharField(blank=True, max_length=1, null=True),
),
migrations.AlterField(
model_name='coreapplication',
name='child_income',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='coreapplication',
name='child_income_frequency',
field=models.CharField(blank=True, choices=[('weekly', 'weekly'), ('biweekly', 'biweekly'), ('twice-monthly', 'twice-monthly'), ('monthly', 'monthly')], max_length=15, null=True),
),
migrations.AlterField(
model_name='coreapplication',
name='city',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='coreapplication',
name='email',
field=models.EmailField(blank=True, max_length=254, null=True),
),
migrations.AlterField(
model_name='coreapplication',
name='is_signed',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='coreapplication',
name='phone_number',
field=models.CharField(blank=True, max_length=12, null=True),
),
migrations.AlterField(
model_name='coreapplication',
name='state',
field=models.CharField(blank=True, max_length=2, null=True),
),
migrations.AlterField(
model_name='coreapplication',
name='street_address',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='coreapplication',
name='zip_code',
field=models.CharField(blank=True, max_length=10, null=True),
),
]
| 37.5
| 191
| 0.571542
| 466
| 5,025
| 6.002146
| 0.201717
| 0.157311
| 0.196639
| 0.228102
| 0.809796
| 0.809796
| 0.789417
| 0.731141
| 0.664283
| 0.641402
| 0
| 0.013787
| 0.292736
| 5,025
| 133
| 192
| 37.781955
| 0.773213
| 0.013333
| 0
| 0.690476
| 1
| 0
| 0.178204
| 0.028052
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.015873
| 0
| 0.039683
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e962c1a3fd86b2bb7ac056659a0e2c1c4c817d93
| 173
|
py
|
Python
|
checkout_sdk/vaults/responses/token.py
|
checkout/checkout-sdk-python
|
908d25c2904508fb0130e186d7d5de2ad116f0c3
|
[
"MIT"
] | 13
|
2018-08-29T09:09:11.000Z
|
2021-11-26T08:30:58.000Z
|
checkout_sdk/vaults/responses/token.py
|
checkout/checkout-sdk-python
|
908d25c2904508fb0130e186d7d5de2ad116f0c3
|
[
"MIT"
] | 17
|
2018-08-30T07:39:15.000Z
|
2022-03-31T16:09:38.000Z
|
checkout_sdk/vaults/responses/token.py
|
checkout/checkout-sdk-python
|
908d25c2904508fb0130e186d7d5de2ad116f0c3
|
[
"MIT"
] | 13
|
2018-09-11T13:00:55.000Z
|
2021-05-19T15:19:30.000Z
|
# from checkout_sdk import constants
# from checkout_sdk.common import ResponseDTO, HTTPResponse
from checkout_sdk.common import Resource
class Token(Resource):
pass
| 19.222222
| 59
| 0.809249
| 22
| 173
| 6.227273
| 0.545455
| 0.262774
| 0.328467
| 0.306569
| 0.394161
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144509
| 173
| 8
| 60
| 21.625
| 0.925676
| 0.531792
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 8
|
75dd5f0570fb2db2e49c6321ce5cdbdfbf284190
| 5,425
|
py
|
Python
|
server_loteria.py
|
albertopeces2000/Loteria
|
86041e9d5bffa2b7318f97fd686ce9e575e648dd
|
[
"Apache-2.0"
] | null | null | null |
server_loteria.py
|
albertopeces2000/Loteria
|
86041e9d5bffa2b7318f97fd686ce9e575e648dd
|
[
"Apache-2.0"
] | null | null | null |
server_loteria.py
|
albertopeces2000/Loteria
|
86041e9d5bffa2b7318f97fd686ce9e575e648dd
|
[
"Apache-2.0"
] | null | null | null |
import socket
import sys
import random
#print(random.randrange(10)
def IP_PORT():
IP = input('Introduce una IP: ')
PORT = int(input('Introduce un PORT: '))
return IP,PORT
IP,PORT = IP_PORT()
##############################################################
servidor_loteria = socket.socket()
servidor_loteria.bind( (IP, PORT) )
servidor_loteria.listen(5) #He puesto un número arbitrario.
conexion, address = servidor_loteria.accept()# devuelve dos tuplas. Una la IP y la otra el puerto correspondiente.
print('Nueva conexión establecida. El servidor está analizando datos...')
print('Esta es la IP del concursante:',address)
#De la línea 12-19 es solo la inicialización del servidor.
###############################################################
lista_address = list(address)
#a = ['192.168.1.45', 57327]
IP_address = lista_address[0].replace("'","")
list_aux = IP_address.replace(".",',').split(',')
list_IP = []
for ip_number in list_aux:
ip_number = int(ip_number)
list_IP.append(ip_number)
#De la línea 23-31 los distintos números de la ip los meto en una lista por separado para su futura operación.
#################################################################
suma = sum(list_IP)
resto = suma % 10
print(resto)
#print(random.randrange(10)) # Del 0-9
random_number = random.randrange(10)
print(random_number)
##################################################################
repetir = True
while repetir:
try:
if resto == random_number:
message_server = str.encode('ENHORABUENA, TE HA TOCADO LA LOTERIA!!!. Su numero era: ')
message_random_number = str.encode(str(random_number))#Aquí cambiamos el formato de int a str y de str a byte.
conexion.send(message_server)#Aquí enviamos al cliente la información
conexion.send(message_random_number)#Aquí enviamos al cliente el número aleatorio
###################################################################
IP,PORT = IP_PORT()
##############################################################
servidor_loteria = socket.socket()
servidor_loteria.bind( (IP, PORT) )
servidor_loteria.listen(5) #He puesto un número arbitrario.
conexion, address = servidor_loteria.accept()# devuelve dos tuplas. Una la IP y la otra el puerto correspondiente.
print('Nueva conexión establecida. El servidor está analizando datos...')
print('Esta es la IP del concursante:',address)
#De la línea 52-59 es solo la inicialización del servidor.
###############################################################
lista_address = list(address)
IP_address = lista_address[0].replace("'","")
list_aux = IP_address.replace(".",',').split(',')
list_IP = []
for ip_number in list_aux:
ip_number = int(ip_number)
list_IP.append(ip_number)
#De la línea 61-69 los distintos números de la ip los meto en una lista por separado para su futura operación.
suma = sum(list_IP)
resto = suma % 10
print(resto)
random_number = random.randrange(10)
print(random_number)
repetir = True
else:
message_server = str.encode('Lo sentimos, no ha ganado la loteria. Su numero aleatorio era: ')
message_random_number = str.encode(str(random_number))
conexion.send(message_server)#Aquí enviamos al cliente la información
conexion.send(message_random_number)#Aquí enviamos al cliente el número aleatorio
###################################################################
IP,PORT = IP_PORT()
##############################################################
servidor_loteria = socket.socket()
servidor_loteria.bind( (IP, PORT) )
servidor_loteria.listen(5) #He puesto un número arbitrario.
conexion, address = servidor_loteria.accept()# devuelve dos tuplas. Una la IP y la otra el puerto correspondiente.
print('Nueva conexión establecida. El servidor está analizando datos...')
print('Esta es la IP del concursante:',address)
#De la línea 85-92 es solo la inicialización del servidor.
###############################################################
lista_address = list(address)
IP_address = lista_address[0].replace("'","")
list_aux = IP_address.replace(".",',').split(',')
list_IP = []
for ip_number in list_aux:
ip_number = int(ip_number)
list_IP.append(ip_number)
#De la línea 94-102 los distintos números de la ip los meto en una lista por separado para su futura operación.
suma = sum(list_IP)
resto = suma % 10
print(resto)
random_number = random.randrange(10)
print(random_number)
repetir = True
except ConnectionAbortedError:
servidor_loteria.close()
print('El servidor se está cerrando...')
repetir = False
except KeyboardInterrupt:
conexion.close() #Cerramos la conexion de la IP
servidor_loteria.close() #Cerramos el servidor
print('El servidor se está cerrando...')
repetir = False
| 43.75
| 127
| 0.557419
| 612
| 5,425
| 4.813725
| 0.215686
| 0.071283
| 0.028513
| 0.04277
| 0.815003
| 0.815003
| 0.815003
| 0.815003
| 0.787169
| 0.75594
| 0
| 0.015366
| 0.24424
| 5,425
| 123
| 128
| 44.105691
| 0.703171
| 0.212903
| 0
| 0.818182
| 0
| 0
| 0.146958
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011364
| false
| 0
| 0.034091
| 0
| 0.056818
| 0.159091
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f960bbc339c14f5eafbb74349f37d938144d9071
| 106,369
|
py
|
Python
|
guimotomandualcomplete.py
|
Jilermo/Teach-pedant-simulation-Motoman-dual
|
e36f1c0726dfcd15271462ce696e8302bd8e9870
|
[
"MIT"
] | null | null | null |
guimotomandualcomplete.py
|
Jilermo/Teach-pedant-simulation-Motoman-dual
|
e36f1c0726dfcd15271462ce696e8302bd8e9870
|
[
"MIT"
] | null | null | null |
guimotomandualcomplete.py
|
Jilermo/Teach-pedant-simulation-Motoman-dual
|
e36f1c0726dfcd15271462ce696e8302bd8e9870
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
from mpl_toolkits import mplot3d
import numpy as np
import sympy as sp
from matplotlib.widgets import Slider,CheckButtons,Button,TextBox,RadioButtons
from robolink import *
import serial
import threading
import time
import sys
fig, ax = plt.subplots()
plt.subplots_adjust(left=0.37,bottom=0.28,right=1,top=1)
ax = plt.axes(projection = "3d")
def mmatrix(*matrices):
n=0
for m in matrices:
if (n==0):
ma=m
n=n+1
elif (n==1):
r=np.dot(ma,m)
n=n+1
else:
r=np.dot(r,m)
return r
def dibujar():
plt.draw()
plt.pause(0.001)
def sind(t):
res=np.sin(t*np.pi/180)
return res
def cosd(t):
res=np.cos(t*np.pi/180)
return res
def setaxis(lim=2):
x1=-lim
x2=lim
y1=-lim
y2=lim
z1=-lim
z2=lim
ax.set_xlim3d(x1,x2)
ax.set_ylim3d(y1,y2)
ax.set_zlim3d(z1,z2)
ax.view_init(elev=30,azim=40)
ax.grid(True)
def sistemafijo(rango=1):
x=[0,1*rango]
y=[0,1*rango]
z=[0,1*rango]
ax.plot3D(x,[0,0],[0,0],color='red')
ax.plot3D([0,0],y,[0,0],color='green')
ax.plot3D([0,0],[0,0],z,color='blue')
def rotax(t):
Rx=np.array(([1,0,0,0],[0,cosd(t),-sind(t),0],[0,sind(t),cosd(t),0],[0,0,0,1]))
return Rx
def rotay(t):
Ry=np.array(([cosd(t),0,sind(t),0],[0,1,0,0],[-sind(t),0,cosd(t),0],[0,0,0,1]))
return Ry
def rotaz(t):
Rz=np.array(([cosd(t),-sind(t),0,0],[sind(t),cosd(t),0,0],[0,0,1,0],[0,0,0,1]))
return Rz
def rotaxf(t,r):
px=r[0,3]
py=r[1,3]
pz=r[2,3]
Rx=np.array(([1,0,0,0],[0,cosd(t),-sind(t),0],[0,sind(t),cosd(t),0],[0,0,0,1]))
Rx=np.dot(Rx,r)
Rx[0,3]=px
Rx[1,3]=py
Rx[2,3]=pz
return Rx
def rotayf(t,r):
px=r[0,3]
py=r[1,3]
pz=r[2,3]
Ry=np.array(([cosd(t),0,sind(t),0],[0,1,0,0],[-sind(t),0,cosd(t),0],[0,0,0,1]))
Ry=np.dot(Ry,r)
Ry[0,3]=px
Ry[1,3]=py
Ry[2,3]=pz
return Ry
def rotazf(t,r):
px=r[0,3]
py=r[1,3]
pz=r[2,3]
Rz=np.array(([cosd(t),-sind(t),0,0],[sind(t),cosd(t),0,0],[0,0,1,0],[0,0,0,1]))
Rz=np.dot(Rz,r)
Rz[0,3]=px
Rz[1,3]=py
Rz[2,3]=pz
return Rz
def trasx(Dx):
Tx=np.array(([[1,0,0,Dx],[0,1,0,0],[0,0,1,0],[0,0,0,1]]))
return Tx
def trasy(Dy):
Ty=np.array(([[1,0,0,0],[0,1,0,Dy],[0,0,1,0],[0,0,0,1]]))
return Ty
def trasz(Dz):
Tz=np.array(([[1,0,0,0],[0,1,0,0],[0,0,1,Dz],[0,0,0,1]]))
return Tz
def ur5movej(p1,p2):
n=1
tetar=[p1[0],p1[1],p1[2],p1[3],p1[4],p1[5]]
paso=[0,0,0,0,0,0]
paso[0]=(p2[0]-p1[0])/30
paso[1]=(p2[1]-p1[1])/30
paso[2]=(p2[2]-p1[2])/30
paso[3]=(p2[3]-p1[3])/30
paso[4]=(p2[4]-p1[4])/30
paso[5]=(p2[5]-p1[5])/30
while n<31:
tetar[0]=tetar[0]+paso[0]
tetar[1]=tetar[1]+paso[1]
tetar[2]=tetar[2]+paso[2]
tetar[3]=tetar[3]+paso[3]
tetar[4]=tetar[4]+paso[4]
tetar[5]=tetar[5]+paso[5]
n=n+1
ax.cla()
setaxis(1000)
print(tetar)
ur5(tetar[0],tetar[1],tetar[2],tetar[3],tetar[4],tetar[5])
dibujar()
valores1=tetar.copy()
valores1[1]=valores1[1]-90
valores1[2]=valores1[2]-90
valores1[4]=valores1[4]+90
valores1[4]=valores1[4]%360
robot.MoveJ(valores1)
def ur5movel(p1,p2,sem):
con=0
n=0
pn=p1
p1x=p1[0,3]
p1y=p1[1,3]
p1z=p1[2,3]
p2x=p2[0,3]
p2y=p2[1,3]
p2z=p2[2,3]
dx=p2x-p1x
dy=p2y-p1y
dz=p2z-p1z
p1ea=mrot2eangle(p1)
angrot=obtangrot(p2,p1ea)
angrot[0]=angrot[0]/30
angrot[1]=angrot[1]/30
angrot[2]=angrot[2]/30
angroti=[0,0,0]
while n<1:
pn=p1@rotaz(angroti[2])@rotay(angroti[1])@rotax(angroti[0])
angroti[0]=angroti[0]+angrot[0]
angroti[1]=angroti[1]+angrot[1]
angroti[2]=angroti[2]+angrot[2]
x=p1x+dx*n
y=p1y+dy*n
z=p1z+dz*n
n+=1/30
con+=1
pn[0,3]=x
pn[1,3]=y
pn[2,3]=z
print(pn)
print(x)
print(y)
print(z)
tetas=ur5newton(pn,sem)
sem=tetas.copy()
ax.cla()
setaxis(1000)
ur5(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5])
valores1=tetas.copy()
valores1[1]=valores1[1]-90
valores1[2]=valores1[2]-90
valores1[4]=valores1[4]+90
valores1[4]=valores1[4]%360
robot.MoveJ(valores1)
dibujar()
def ur5movec(p1,p2,sem):
print(p1)
print(p2)
x,y,z=sp.symbols('x,y,z')
con=0
n=0
pn=p1
x1=p1[0,3]
y1=p1[1,3]
z1=p1[2,3]
x2=p2[0,3]
y2=p2[1,3]
z2=p2[2,3]
p1x=p1[0,3]
p1y=p1[1,3]
p1z=p1[2,3]
p2x=p2[0,3]
p2y=p2[1,3]
p2z=p2[2,3]
dx=x2-x1
dy=y2-y1
dz=z2-z1
xm=x1+dx*0.5
ym=y1+dy*0.5
zm=z1+dz*0.5
r=np.sqrt((x1-xm)**2+(y1-ym)**2+(z1-zm)**2)
esfera=(x-xm)**2+(y-ym)**2+(z-zm)**2-r**2
nd=[dy,-dx,0]
plano=nd[0]*(x-x1)+nd[1]*(y-y1)+nd[2]*(z-z1)
cir=sp.nonlinsolve([esfera,plano],[x,y,z])
var=cir.free_symbols
cirnp1=sp.lambdify(var, cir.args[0], "numpy")
cirnp2=sp.lambdify(var, cir.args[1], "numpy")
p1ea=mrot2eangle(p1)
angrot=obtangrot(p2,p1ea)
angrot[0]=angrot[0]/50
angrot[1]=angrot[1]/50
angrot[2]=angrot[2]/50
angroti=[0,0,0]
if np.around(cirnp1(z1)[0],4)== x1:
rango1=np.arange(start=np.around(zm+r,4), stop=z2, step=-(zm+r-z2)/50)
puntos1=cirnp2(rango1)
rango2=np.arange(start=z1, stop=zm+r, step=(zm+r-z1)/50)
puntos2=cirnp1(rango2)
print(puntos1)
print(puntos2)
while n<puntos1[0].size:
pn=p1@rotaz(angroti[2])@rotay(angroti[1])@rotax(angroti[0])
angroti[0]=angroti[0]+angrot[0]
angroti[1]=angroti[1]+angrot[1]
angroti[2]=angroti[2]+angrot[2]
px=puntos2[0][n]
py=puntos2[1][n]
pz=puntos2[2][n]
n=n+1
pn[0,3]=px
pn[1,3]=py
pn[2,3]=pz
tetas=ur5newton(pn,sem)
sem=tetas.copy()
ax.cla()
setaxis(1000)
ur5(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5])
valores1=tetas.copy()
valores1[1]=valores1[1]-90
valores1[2]=valores1[2]-90
valores1[4]=valores1[4]+90
valores1[4]=valores1[4]%360
robot.MoveJ(valores1)
dibujar()
n=0
while n<puntos2[0].size:
pn=p1@rotaz(angroti[2])@rotay(angroti[1])@rotax(angroti[0])
angroti[0]=angroti[0]+angrot[0]
angroti[1]=angroti[1]+angrot[1]
angroti[2]=angroti[2]+angrot[2]
px=puntos1[0][n]
py=puntos1[1][n]
pz=puntos1[2][n]
n=n+1
pn[0,3]=px
pn[1,3]=py
pn[2,3]=pz
tetas=ur5newton(pn,sem)
sem=tetas.copy()
ax.cla()
setaxis(1000)
ur5(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5])
valores1=tetas.copy()
valores1[1]=valores1[1]-90
valores1[2]=valores1[2]-90
valores1[4]=valores1[4]+90
valores1[4]=valores1[4]%360
robot.MoveJ(valores1)
dibujar()
else:
rango1=np.arange(start=zm+r, stop=z2, step=-(zm+r-z2)/50)
puntos1=cirnp1(rango1)
rango2=np.arange(start=z1, stop=zm+r, step=(zm+r-z1)/50)
puntos2=cirnp2(rango2)
while n<puntos1[0].size:
pn=p1@rotaz(angroti[2])@rotay(angroti[1])@rotax(angroti[0])
angroti[0]=angroti[0]+angrot[0]
angroti[1]=angroti[1]+angrot[1]
angroti[2]=angroti[2]+angrot[2]
px=puntos2[0][n]
py=puntos2[1][n]
pz=puntos2[2][n]
n=n+1
pn[0,3]=px
pn[1,3]=py
pn[2,3]=pz
tetas=ur5newton(pn,sem)
sem=tetas.copy()
ax.cla()
setaxis(1000)
ur5(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5])
valores1=tetas.copy()
valores1[1]=valores1[1]-90
valores1[2]=valores1[2]-90
valores1[4]=valores1[4]+90
valores1[4]=valores1[4]%360
robot.MoveJ(valores1)
dibujar()
n=0
while n<puntos2[0].size:
pn=p1@rotaz(angroti[2])@rotay(angroti[1])@rotax(angroti[0])
angroti[0]=angroti[0]+angrot[0]
angroti[1]=angroti[1]+angrot[1]
angroti[2]=angroti[2]+angrot[2]
px=puntos1[0][n]
py=puntos1[1][n]
pz=puntos1[2][n]
n=n+1
pn[0,3]=px
pn[1,3]=py
pn[2,3]=pz
tetas=ur5newton(pn,sem)
sem=tetas.copy()
ax.cla()
setaxis(1000)
ur5(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5])
valores1=tetas.copy()
valores1[1]=valores1[1]-90
valores1[2]=valores1[2]-90
valores1[4]=valores1[4]+90
valores1[4]=valores1[4]%360
robot.MoveJ(valores1)
dibujar()
def mrot2eangle(r):
teta=[0,0,0]
if r[0,2]==1 or r[0,2]==-1:
teta[0]=0
teta[1]=r[0,2]*90
teta[2]=np.degrees(np.arctan2(r[1,0],r[1,1]))
else:
teta[0]=np.degrees(np.arctan2(-r[1,2],r[2,2]))
teta[1]=np.degrees(np.arcsin(r[0,2]))
teta[2]=np.degrees(np.arctan2(-r[0,1],r[0,0]))
return teta
def minv(R):
r=np.zeros((4,4))
a=np.zeros((3,3))
p=np.zeros((3,1))
a[0,0]=R[0,0]
a[0,1]=R[0,1]
a[0,2]=R[0,2]
a[1,0]=R[1,0]
a[1,1]=R[1,1]
a[1,2]=R[1,2]
a[2,0]=R[2,0]
a[2,1]=R[2,1]
a[2,2]=R[2,2]
a=np.transpose(a)
r[0,0]=a[0,0]
r[0,1]=a[0,1]
r[0,2]=a[0,2]
r[1,0]=a[1,0]
r[1,1]=a[1,1]
r[1,2]=a[1,2]
r[2,0]=a[2,0]
r[2,1]=a[2,1]
r[2,2]=a[2,2]
a=-1*a
p[0,0]=R[0,3]
p[1,0]=R[1,3]
p[2,0]=R[2,3]
p1=np.dot(a,p)
r[0,3]=p1[0,0]
r[1,3]=p1[1,0]
r[2,3]=p1[2,0]
r[3,3]=1
return r
def sistemamovil(r,rango=1):
ux=r[0,0]
uy=r[1,0]
uz=r[2,0]
vx=r[0,1]
vy=r[1,1]
vz=r[2,1]
wx=r[0,2]
wy=r[1,2]
wz=r[2,2]
px=r[0,3]
py=r[1,3]
pz=r[2,3]
ax.plot3D([px,px+ux*rango],[py,py+uy*rango],[pz,pz+uz*rango],color='red') #Dibuja eje movil u
ax.plot3D([px,px+vx*rango],[py,py+vy*rango],[pz,pz+vz*rango],color='green') #Dibuja eje movil v
ax.plot3D([px,px+wx*rango],[py,py+wy*rango],[pz,pz+wz*rango],color='blue') #Dibuja eje movil w
def ppp(d1,d2,d3):
t0=np.eye(4)
t01=trasz(d1)@rotax(-90)
t12=trasz(d2)@rotax(-90)@rotay(90)
t23=trasz(d3)@rotaz(180)
t02=t01@t12
t03=t02@t23
sistemafijo()
sistemamovil(t01)
sistemamovil(t02)
sistemamovil(t03)
ax.plot3D([t0[0,3],t01[0,3]],[t0[1,3],t01[1,3]],[t0[2,3],t01[2,3]],color='red')
ax.plot3D([t01[0,3],t02[0,3]],[t01[1,3],t02[1,3]],[t01[2,3],t02[2,3]],color='red')
ax.plot3D([t02[0,3],t03[0,3]],[t02[1,3],t03[1,3]],[t02[2,3],t03[2,3]],color='red')
def rpp(t1,d2,d3):
t0=np.eye(4)
t01=rotaz(t1)
t12=trasz(d2)
t23=rotay(90)@trasz(d3)
t02=t01@t12
t03=t02@t23
sistemafijo()
sistemamovil(t01)
sistemamovil(t02)
sistemamovil(t03)
ax.plot3D([t0[0,3],t01[0,3]],[t0[1,3],t01[1,3]],[t0[2,3],t01[2,3]],color='red')
ax.plot3D([t01[0,3],t02[0,3]],[t01[1,3],t02[1,3]],[t01[2,3],t02[2,3]],color='red')
ax.plot3D([t02[0,3],t03[0,3]],[t02[1,3],t03[1,3]],[t02[2,3],t03[2,3]],color='red')
def rrp(t1,t2,d3):
t0=np.eye(4)
t01=rotaz(t1)
t12=trasz(5)@rotay(90)@rotaz(90)@rotaz(t2)
t23=rotay(90)@rotaz(-90)@trasz(d3)
t02=t01@t12
t03=t02@t23
sistemafijo()
sistemamovil(t01)
sistemamovil(t02)
sistemamovil(t03)
ax.plot3D([t0[0,3],t01[0,3]],[t0[1,3],t01[1,3]],[t0[2,3],t01[2,3]],color='red')
ax.plot3D([t01[0,3],t02[0,3]],[t01[1,3],t02[1,3]],[t01[2,3],t02[2,3]],color='red')
ax.plot3D([t02[0,3],t03[0,3]],[t02[1,3],t03[1,3]],[t02[2,3],t03[2,3]],color='red')
def rrr(t1,t2,t3):
t0=np.eye(4)
t01=rotaz(t1)
t12=trasz(4)@rotax(90)@rotaz(t2)
t23=trasx(4)@rotaz(t3)
t34=trasx(4)@rotay(90)@rotaz(-90)
t02=t01@t12
t03=t02@t23
t04=t03@t34
sistemafijo()
sistemamovil(t01)
sistemamovil(t02)
sistemamovil(t03)
sistemamovil(t04)
ax.plot3D([t0[0,3],t01[0,3]],[t0[1,3],t01[1,3]],[t0[2,3],t01[2,3]],color='red')
ax.plot3D([t01[0,3],t02[0,3]],[t01[1,3],t02[1,3]],[t01[2,3],t02[2,3]],color='red')
ax.plot3D([t02[0,3],t03[0,3]],[t02[1,3],t03[1,3]],[t02[2,3],t03[2,3]],color='red')
ax.plot3D([t03[0,3],t04[0,3]],[t03[1,3],t04[1,3]],[t03[2,3],t04[2,3]],color='red')
def scara(t1,t2,d3,t4):
t0=np.eye(4)
t01=rotaz(t1)@trasz(4)
t12=trasx(4)
t23=rotaz(t2)@trasz(-1)
t34=trasx(4)@rotax(180)@rotaz(-90)
t45=trasz(d3)
t56=rotaz(t4)@trasz(1)
t02=t01@t12
t03=t02@t23
t04=t03@t34
t05=t04@t45
t06=t05@t56
sistemafijo()
sistemamovil(t01)
sistemamovil(t02)
sistemamovil(t03)
sistemamovil(t04)
sistemamovil(t05)
sistemamovil(t06)
ax.plot3D([t0[0,3],t01[0,3]],[t0[1,3],t01[1,3]],[t0[2,3],t01[2,3]],color='red')
ax.plot3D([t01[0,3],t02[0,3]],[t01[1,3],t02[1,3]],[t01[2,3],t02[2,3]],color='red')
ax.plot3D([t02[0,3],t03[0,3]],[t02[1,3],t03[1,3]],[t02[2,3],t03[2,3]],color='red')
ax.plot3D([t03[0,3],t04[0,3]],[t03[1,3],t04[1,3]],[t03[2,3],t04[2,3]],color='red')
ax.plot3D([t04[0,3],t05[0,3]],[t04[1,3],t05[1,3]],[t04[2,3],t05[2,3]],color='red')
ax.plot3D([t05[0,3],t06[0,3]],[t05[1,3],t06[1,3]],[t05[2,3],t06[2,3]],color='red')
def cobras800(t1,t2,d3,t4):
t0=np.eye(4)
t01=rotaz(t1)@trasz(342)
t12=trasx(425)
t23=rotaz(t2)@trasz(56)
t34=trasx(375)
t45=trasz(-210)@trasz(d3)
t56=rotax(180)@rotaz(-180)@rotaz(t4)
t02=t01@t12
t03=t02@t23
t04=t03@t34
t05=t04@t45
t06=t05@t56
sistemafijo(100)
#sistemamovil(t01,100)
#sistemamovil(t02,100)
sistemamovil(t03,100)
#sistemamovil(t04,100)
#sistemamovil(t05,100)
sistemamovil(t06,100)
ax.plot3D([t0[0,3],t01[0,3]],[t0[1,3],t01[1,3]],[t0[2,3],t01[2,3]],color='red')
ax.plot3D([t01[0,3],t02[0,3]],[t01[1,3],t02[1,3]],[t01[2,3],t02[2,3]],color='red')
ax.plot3D([t02[0,3],t03[0,3]],[t02[1,3],t03[1,3]],[t02[2,3],t03[2,3]],color='red')
ax.plot3D([t03[0,3],t04[0,3]],[t03[1,3],t04[1,3]],[t03[2,3],t04[2,3]],color='red')
ax.plot3D([t04[0,3],t05[0,3]],[t04[1,3],t05[1,3]],[t04[2,3],t05[2,3]],color='red')
ax.plot3D([t05[0,3],t06[0,3]],[t05[1,3],t06[1,3]],[t05[2,3],t06[2,3]],color='red')
def ur5(t1,t2,t3,t4,t5,t6,mb=np.eye(4)):
t0=np.eye(4)@mb
t01=mb@rotaz(t1)@trasz(89.2)#
t12=trasy(-134.2)@rotax(90)@rotaz(t2)#
t23=trasy(425)
t34=trasz(-118.45)@rotaz(t3)#
t45=trasx(392.25)@rotaz(t4)#
t56=trasz(94.75)@rotax(-90)@rotaz(t5)#
t67=trasz(94.75)
t78=trasx(82.5)@rotay(90)@rotaz(-90)@rotaz(t6)#
t02=t01@t12
t03=t02@t23
t04=t03@t34
t05=t04@t45
t06=t05@t56
t07=t06@t67
t08=t07@t78
print(t08)
sistemamovil(t0,100)
#sistemamovil(t01,100)
sistemamovil(t02,100)
sistemamovil(t03,100)
#sistemamovil(t04,100)
sistemamovil(t05,100)
sistemamovil(t06,100)
sistemamovil(t07,100)
sistemamovil(t08,100)
ax.plot3D([t0[0,3],t01[0,3]],[t0[1,3],t01[1,3]],[t0[2,3],t01[2,3]],color='red')
ax.plot3D([t01[0,3],t02[0,3]],[t01[1,3],t02[1,3]],[t01[2,3],t02[2,3]],color='red')
ax.plot3D([t02[0,3],t03[0,3]],[t02[1,3],t03[1,3]],[t02[2,3],t03[2,3]],color='red')
ax.plot3D([t03[0,3],t04[0,3]],[t03[1,3],t04[1,3]],[t03[2,3],t04[2,3]],color='red')
ax.plot3D([t04[0,3],t05[0,3]],[t04[1,3],t05[1,3]],[t04[2,3],t05[2,3]],color='red')
ax.plot3D([t05[0,3],t06[0,3]],[t05[1,3],t06[1,3]],[t05[2,3],t06[2,3]],color='red')
ax.plot3D([t06[0,3],t07[0,3]],[t06[1,3],t07[1,3]],[t06[2,3],t07[2,3]],color='red')
ax.plot3D([t07[0,3],t08[0,3]],[t07[1,3],t08[1,3]],[t07[2,3],t08[2,3]],color='red')
def motoman(tb,t1a,t2a,t3a,t4a,t5a,t6a,t7a,t1b,t2b,t3b,t4b,t5b,t6b,t7b):
T0=np.eye(4)
Ti=rotaz(tb)
Ti1=Ti@trasz(8)
Ti2=Ti1@trasx(1.57)
Ti3=Ti2@trasy(2.5)@rotax(-90)
Tib3=Ti2@trasy(-2.5)@rotax(90)
T01=Ti3@rotaz(270)@rotaz(t1a)@trasz(.3);
T12=trasx(-1.09)
T23=trasz(2.5)
T34=trasx(1.09)@rotay(90)@rotaz(t2a)
T45=rotay(-90)@trasx(1.2)
T56=trasz(1)
T67=trasx(-1.2)
T78=trasz(0.45)@rotaz(t3a)
T89=trasz(2.225)
T910=trasx(1.04)
T1011=trasz(1.225)
T1112=trasx(-1.04)@rotay(90)@rotaz(t4a)
T1213=rotay(-90)@trasx(-0.98)
T1314=trasz(1.4)
T1415=trasx(0.98)
T1516=trasz(0.7)@rotaz(t5a)
T1617=trasz(0.7)
T1718=trasx(-0.86)
T1819=trasz(1.4)
T1920=trasx(0.86)@rotay(90)@rotaz(t6a)
T2021=rotay(-90)@trasx(0.8)
T2122=trasz(0.9)
T2223=trasx(-0.8)
T2324=trasz(0.9)@rotaz(90)@rotaz(t7a)
T02=T01@T12
T03=T02@T23
T04=T03@T34
T05=T04@T45
T06=T05@T56
T07=T06@T67
T08=T07@T78
T09=T08@T89
T10=T09@T910
T11=T10@T1011
T12=T11@T1112
T13=T12@T1213
T14=T13@T1314
T15=T14@T1415
T16=T15@T1516
T17=T16@T1617
T18=T17@T1718
T19=T18@T1819
T20=T19@T1920
T21=T20@T2021
T22=T21@T2122
T23=T22@T2223
T24=T23@T2324
print(T24)
Tb01=Tib3@rotaz(270)@rotaz(t1b)@trasz(0.3)
Tb12=trasx(-1.09)
Tb23=trasz(2.5)
TB34=trasx(1.09)@rotay(90)@rotaz(t2b)
Tb45=rotay(-90)@trasx(1.2)
Tb56=trasz(1)
Tb67=trasx(-1.2)
Tb78=trasz(0.45)@rotaz(t3b)
Tb89=trasz(2.225)
TB910=trasx(1.04)
Tb1011=trasz(1.225)
Tb1112=trasx(-1.04)@rotay(90)@rotaz(t4b)
Tb1213=rotay(-90)@trasx(-0.98)
Tb1314=trasz(1.4)
Tb1415=trasx(0.98)
Tb1516=trasz(0.7)@rotaz(t5b)
Tb1617=trasz(0.7)
Tb1718=trasx(-0.86)
Tb1819=trasz(1.4)
Tb1920=trasx(0.86)@rotay(90)@rotaz(t6b)
Tb2021=rotay(-90)@trasx(0.8)
Tb2122=trasz(0.9)
Tb2223=trasx(-0.8)
Tb2324=trasz(0.9)@rotaz(90)@rotaz(t7b)
Tb02=Tb01@Tb12
Tb03=Tb02@Tb23
Tb04=Tb03@TB34
Tb05=Tb04@Tb45
Tb06=Tb05@Tb56
Tb07=Tb06@Tb67
Tb08=Tb07@Tb78
Tb09=Tb08@Tb89
Tb10=Tb09@TB910
Tb11=Tb10@Tb1011
Tb12=Tb11@Tb1112
Tb13=Tb12@Tb1213
Tb14=Tb13@Tb1314
Tb15=Tb14@Tb1415
Tb16=Tb15@Tb1516
Tb17=Tb16@Tb1617
Tb18=Tb17@Tb1718
Tb19=Tb18@Tb1819
Tb20=Tb19@Tb1920
Tb21=Tb20@Tb2021
Tb22=Tb21@Tb2122
Tb23=Tb22@Tb2223
Tb24=Tb23@Tb2324
sistemafijo()
sistemamovil(T0)
sistemamovil(T01)
sistemamovil(T04)
sistemamovil(T08)
sistemamovil(T12)
sistemamovil(T16)
sistemamovil(T20)
sistemamovil(T24)
sistemamovil(T0)
sistemamovil(Tb01)
sistemamovil(Tb04)
sistemamovil(Tb08)
sistemamovil(Tb12)
sistemamovil(Tb16)
sistemamovil(Tb20)
sistemamovil(Tb24)
ax.plot3D([Ti[0,3], Ti1[0,3]], [Ti[1,3], Ti1[1,3]], [Ti[2,3], Ti1[2,3]],color='red')
ax.plot3D([Ti1[0,3], Ti2[0,3]], [Ti1[1,3], Ti2[1,3]], [Ti1[2,3], Ti2[2,3]],color='red')
ax.plot3D([Ti2[0,3], Ti3[0,3]], [Ti2[1,3], Ti3[1,3]], [Ti2[2,3], Ti3[2,3]],color='red')
ax.plot3D([Ti3[0,3], T01[0,3]], [Ti3[1,3], T01[1,3]], [Ti3[2,3], T01[2,3]],color='red')
ax.plot3D([T01[0,3], T02[0,3]], [T01[1,3], T02[1,3]], [T01[2,3], T02[2,3]],color='red')
ax.plot3D([T02[0,3], T03[0,3]], [T02[1,3], T03[1,3]], [T02[2,3], T03[2,3]],color='red')
ax.plot3D([T03[0,3], T04[0,3]], [T03[1,3], T04[1,3]], [T03[2,3], T04[2,3]],color='red')
ax.plot3D([T04[0,3], T05[0,3]], [T04[1,3], T05[1,3]], [T04[2,3], T05[2,3]],color='red')
ax.plot3D([T05[0,3], T06[0,3]], [T05[1,3], T06[1,3]], [T05[2,3], T06[2,3]],color='red')
ax.plot3D([T06[0,3], T07[0,3]], [T06[1,3], T07[1,3]], [T06[2,3], T07[2,3]],color='red')
ax.plot3D([T07[0,3], T08[0,3]], [T07[1,3], T08[1,3]], [T07[2,3], T08[2,3]],color='red')
ax.plot3D([T08[0,3], T09[0,3]], [T08[1,3], T09[1,3]], [T08[2,3], T09[2,3]],color='red')
ax.plot3D([T09[0,3], T10[0,3]], [T09[1,3], T10[1,3]], [T09[2,3], T10[2,3]],color='red')
ax.plot3D([T10[0,3], T11[0,3]], [T10[1,3], T11[1,3]], [T10[2,3], T11[2,3]],color='red')
ax.plot3D([T11[0,3], T12[0,3]], [T11[1,3], T12[1,3]], [T11[2,3], T12[2,3]],color='red')
ax.plot3D([T12[0,3], T13[0,3]], [T12[1,3], T13[1,3]], [T12[2,3], T13[2,3]],color='red')
ax.plot3D([T13[0,3], T14[0,3]], [T13[1,3], T14[1,3]], [T13[2,3], T14[2,3]],color='red')
ax.plot3D([T14[0,3], T15[0,3]], [T14[1,3], T15[1,3]], [T14[2,3], T15[2,3]],color='red')
ax.plot3D([T15[0,3], T16[0,3]], [T15[1,3], T16[1,3]], [T15[2,3], T16[2,3]],color='red')
ax.plot3D([T16[0,3], T17[0,3]], [T16[1,3], T17[1,3]], [T16[2,3], T17[2,3]],color='red')
ax.plot3D([T17[0,3], T18[0,3]], [T17[1,3], T18[1,3]], [T17[2,3], T18[2,3]],color='red')
ax.plot3D([T18[0,3], T19[0,3]], [T18[1,3], T19[1,3]], [T18[2,3], T19[2,3]],color='red')
ax.plot3D([T19[0,3], T20[0,3]], [T19[1,3], T20[1,3]], [T19[2,3], T20[2,3]],color='red')
ax.plot3D([T20[0,3], T21[0,3]], [T20[1,3], T21[1,3]], [T20[2,3], T21[2,3]],color='red')
ax.plot3D([T21[0,3], T22[0,3]], [T21[1,3], T22[1,3]], [T21[2,3], T22[2,3]],color='red')
ax.plot3D([T22[0,3], T23[0,3]], [T22[1,3], T23[1,3]], [T22[2,3], T23[2,3]],color='red')
ax.plot3D([T23[0,3], T24[0,3]], [T23[1,3], T24[1,3]], [T23[2,3], T24[2,3]],color='red')
ax.plot3D([Ti[0,3], Ti1[0,3]], [Ti[1,3], Ti1[1,3]], [Ti[2,3], Ti1[2,3]],color='red')
ax.plot3D([Ti1[0,3], Ti2[0,3]], [Ti1[1,3], Ti2[1,3]], [Ti1[2,3], Ti2[2,3]],color='red')
ax.plot3D([Ti2[0,3], Tib3[0,3]], [Ti2[1,3], Tib3[1,3]], [Ti2[2,3], Tib3[2,3]],color='red')
ax.plot3D([Tib3[0,3], Tb01[0,3]], [Tib3[1,3], Tb01[1,3]], [Tib3[2,3], Tb01[2,3]],color='red')
ax.plot3D([Tb01[0,3], Tb02[0,3]], [Tb01[1,3], Tb02[1,3]], [Tb01[2,3], Tb02[2,3]],color='red')
ax.plot3D([Tb02[0,3], Tb03[0,3]], [Tb02[1,3], Tb03[1,3]], [Tb02[2,3], Tb03[2,3]],color='red')
ax.plot3D([Tb03[0,3], Tb04[0,3]], [Tb03[1,3], Tb04[1,3]], [Tb03[2,3], Tb04[2,3]],color='red')
ax.plot3D([Tb04[0,3], Tb05[0,3]], [Tb04[1,3], Tb05[1,3]], [Tb04[2,3], Tb05[2,3]],color='red')
ax.plot3D([Tb05[0,3], Tb06[0,3]], [Tb05[1,3], Tb06[1,3]], [Tb05[2,3], Tb06[2,3]],color='red')
ax.plot3D([Tb06[0,3], Tb07[0,3]], [Tb06[1,3], Tb07[1,3]], [Tb06[2,3], Tb07[2,3]],color='red')
ax.plot3D([Tb07[0,3], Tb08[0,3]], [Tb07[1,3], Tb08[1,3]], [Tb07[2,3], Tb08[2,3]],color='red')
ax.plot3D([Tb08[0,3], Tb09[0,3]], [Tb08[1,3], Tb09[1,3]], [Tb08[2,3], Tb09[2,3]],color='red')
ax.plot3D([Tb09[0,3], Tb10[0,3]], [Tb09[1,3], Tb10[1,3]], [Tb09[2,3], Tb10[2,3]],color='red')
ax.plot3D([Tb10[0,3], Tb11[0,3]], [Tb10[1,3], Tb11[1,3]], [Tb10[2,3], Tb11[2,3]],color='red')
ax.plot3D([Tb11[0,3], Tb12[0,3]], [Tb11[1,3], Tb12[1,3]], [Tb11[2,3], Tb12[2,3]],color='red')
ax.plot3D([Tb12[0,3], Tb13[0,3]], [Tb12[1,3], Tb13[1,3]], [Tb12[2,3], Tb13[2,3]],color='red')
ax.plot3D([Tb13[0,3], Tb14[0,3]], [Tb13[1,3], Tb14[1,3]], [Tb13[2,3], Tb14[2,3]],color='red')
ax.plot3D([Tb14[0,3], Tb15[0,3]], [Tb14[1,3], Tb15[1,3]], [Tb14[2,3], Tb15[2,3]],color='red')
ax.plot3D([Tb15[0,3], Tb16[0,3]], [Tb15[1,3], Tb16[1,3]], [Tb15[2,3], Tb16[2,3]],color='red')
ax.plot3D([Tb16[0,3], Tb17[0,3]], [Tb16[1,3], Tb17[1,3]], [Tb16[2,3], Tb17[2,3]],color='red')
ax.plot3D([Tb17[0,3], Tb18[0,3]], [Tb17[1,3], Tb18[1,3]], [Tb17[2,3], Tb18[2,3]],color='red')
ax.plot3D([Tb18[0,3], Tb19[0,3]], [Tb18[1,3], Tb19[1,3]], [Tb18[2,3], Tb19[2,3]],color='red')
ax.plot3D([Tb19[0,3], Tb20[0,3]], [Tb19[1,3], Tb20[1,3]], [Tb19[2,3], Tb20[2,3]],color='red')
ax.plot3D([Tb20[0,3], Tb21[0,3]], [Tb20[1,3], Tb21[1,3]], [Tb20[2,3], Tb21[2,3]],color='red')
ax.plot3D([Tb21[0,3], Tb22[0,3]], [Tb21[1,3], Tb22[1,3]], [Tb21[2,3], Tb22[2,3]],color='red')
ax.plot3D([Tb22[0,3], Tb23[0,3]], [Tb22[1,3], Tb23[1,3]], [Tb22[2,3], Tb23[2,3]],color='red')
ax.plot3D([Tb23[0,3], Tb24[0,3]], [Tb23[1,3], Tb24[1,3]], [Tb23[2,3], Tb24[2,3]],color='red')
def accmotoman(tb,t1a,t2a,t3a,t4a,t5a,t6a,t7a,t1b,t2b,t3b,t4b,t5b,t6b,t7b):
T0=np.eye(4)
Ti=trasz(893.5)
Ti1=Ti@trasx(92.5)@rotaz(tb)
Ti2=Ti1@trasx(100)@trasz(306.5)
Ti3=Ti2@rotax(-90)@rotaz(-180)@rotaz(t1a)@trasz(265)
Tib3=Ti2@rotax(90)@rotaz(-180)@rotaz(t1b)@trasz(265)
T01=Ti3@rotax(-90)@rotaz(t2a)
T12=trasz(-80)
T23=trasy(-90)
T34=trasz(80)
T45=trasy(-90)@rotax(90)@rotaz(t3a)
T56=trasz(90)
T67=trasy(-80)
T78=trasz(90)
T89=trasy(80)@rotax(-90)@rotaz(t4a)
T910=trasz(80)
T1011=trasy(-90)
T1112=trasz(-80)
T1213=trasy(-90)@rotax(90)@rotaz(t5a)
T1314=trasz(90)
T1415=trasy(80)
T1516=trasz(90)
T1617=trasy(-80)@rotax(-90)@rotaz(t6a)
T1718=trasz(-80)
T1819=trasy(-87.5)
T1920=trasz(80)
T2021=trasy(-87.5)@rotax(90)@rotaz(t7a)
T02=T01@T12
T03=T02@T23
T04=T03@T34
T05=T04@T45
T06=T05@T56
T07=T06@T67
T08=T07@T78
T09=T08@T89
T10=T09@T910
T11=T10@T1011
T12=T11@T1112
T13=T12@T1213
T14=T13@T1314
T15=T14@T1415
T16=T15@T1516
T17=T16@T1617
T18=T17@T1718
T19=T18@T1819
T20=T19@T1920
T21=T20@T2021
print("derecho ++++++")
print(T21)
Tb01=Tib3@rotax(-90)@rotaz(t2b)
Tb12=trasz(-80)
Tb23=trasy(-90)
Tb34=trasz(80)
Tb45=trasy(-90)@rotax(90)@rotaz(t3b)
Tb56=trasz(90)
Tb67=trasy(-80)
Tb78=trasz(90)
Tb89=trasy(80)@rotax(-90)@rotaz(t4b)
Tb910=trasz(80)
Tb1011=trasy(-90)
Tb1112=trasz(-80)
Tb1213=trasy(-90)@rotax(90)@rotaz(t5b)
Tb1314=trasz(90)
Tb1415=trasy(80)
Tb1516=trasz(90)
Tb1617=trasy(-80)@rotax(-90)@rotaz(t6b)
Tb1718=trasz(-80)
Tb1819=trasy(-87.5)
Tb1920=trasz(80)
Tb2021=trasy(-87.5)@rotax(90)@rotaz(t7b)
Tb02=Tb01@Tb12
Tb03=Tb02@Tb23
Tb04=Tb03@Tb34
Tb05=Tb04@Tb45
Tb06=Tb05@Tb56
Tb07=Tb06@Tb67
Tb08=Tb07@Tb78
Tb09=Tb08@Tb89
Tb10=Tb09@Tb910
Tb11=Tb10@Tb1011
Tb12=Tb11@Tb1112
Tb13=Tb12@Tb1213
Tb14=Tb13@Tb1314
Tb15=Tb14@Tb1415
Tb16=Tb15@Tb1516
Tb17=Tb16@Tb1617
Tb18=Tb17@Tb1718
Tb19=Tb18@Tb1819
Tb20=Tb19@Tb1920
Tb21=Tb20@Tb2021
print("Izquierdo ++++++")
print(Tb21)
sistemafijo(100)
sistemamovil(T0,100)
sistemamovil(T01,100)
sistemamovil(T05,100)
sistemamovil(T09,100)
sistemamovil(T13,100)
sistemamovil(T17,100)
sistemamovil(T21,100)
sistemamovil(Tb01,100)
sistemamovil(Tb05,100)
sistemamovil(Tb09,100)
sistemamovil(Tb13,100)
sistemamovil(Tb17,100)
sistemamovil(Tb21,100)
ax.plot3D([T0[0,3], Ti[0,3]], [T0[1,3], Ti[1,3]], [T0[2,3], Ti[2,3]],color='red')
ax.plot3D([Ti[0,3], Ti1[0,3]], [Ti[1,3], Ti1[1,3]], [Ti[2,3], Ti1[2,3]],color='red')
ax.plot3D([Ti1[0,3], Ti2[0,3]], [Ti1[1,3], Ti2[1,3]], [Ti1[2,3], Ti2[2,3]],color='red')
ax.plot3D([Ti2[0,3], Ti3[0,3]], [Ti2[1,3], Ti3[1,3]], [Ti2[2,3], Ti3[2,3]],color='red')
ax.plot3D([Ti3[0,3], T01[0,3]], [Ti3[1,3], T01[1,3]], [Ti3[2,3], T01[2,3]],color='red')
ax.plot3D([T01[0,3], T02[0,3]], [T01[1,3], T02[1,3]], [T01[2,3], T02[2,3]],color='red')
ax.plot3D([T02[0,3], T03[0,3]], [T02[1,3], T03[1,3]], [T02[2,3], T03[2,3]],color='red')
ax.plot3D([T03[0,3], T04[0,3]], [T03[1,3], T04[1,3]], [T03[2,3], T04[2,3]],color='red')
ax.plot3D([T04[0,3], T05[0,3]], [T04[1,3], T05[1,3]], [T04[2,3], T05[2,3]],color='red')
ax.plot3D([T05[0,3], T06[0,3]], [T05[1,3], T06[1,3]], [T05[2,3], T06[2,3]],color='red')
ax.plot3D([T06[0,3], T07[0,3]], [T06[1,3], T07[1,3]], [T06[2,3], T07[2,3]],color='red')
ax.plot3D([T07[0,3], T08[0,3]], [T07[1,3], T08[1,3]], [T07[2,3], T08[2,3]],color='red')
ax.plot3D([T08[0,3], T09[0,3]], [T08[1,3], T09[1,3]], [T08[2,3], T09[2,3]],color='red')
ax.plot3D([T09[0,3], T10[0,3]], [T09[1,3], T10[1,3]], [T09[2,3], T10[2,3]],color='red')
ax.plot3D([T10[0,3], T11[0,3]], [T10[1,3], T11[1,3]], [T10[2,3], T11[2,3]],color='red')
ax.plot3D([T11[0,3], T12[0,3]], [T11[1,3], T12[1,3]], [T11[2,3], T12[2,3]],color='red')
ax.plot3D([T12[0,3], T13[0,3]], [T12[1,3], T13[1,3]], [T12[2,3], T13[2,3]],color='red')
ax.plot3D([T13[0,3], T14[0,3]], [T13[1,3], T14[1,3]], [T13[2,3], T14[2,3]],color='red')
ax.plot3D([T14[0,3], T15[0,3]], [T14[1,3], T15[1,3]], [T14[2,3], T15[2,3]],color='red')
ax.plot3D([T15[0,3], T16[0,3]], [T15[1,3], T16[1,3]], [T15[2,3], T16[2,3]],color='red')
ax.plot3D([T16[0,3], T17[0,3]], [T16[1,3], T17[1,3]], [T16[2,3], T17[2,3]],color='red')
ax.plot3D([T17[0,3], T18[0,3]], [T17[1,3], T18[1,3]], [T17[2,3], T18[2,3]],color='red')
ax.plot3D([T18[0,3], T19[0,3]], [T18[1,3], T19[1,3]], [T18[2,3], T19[2,3]],color='red')
ax.plot3D([T19[0,3], T20[0,3]], [T19[1,3], T20[1,3]], [T19[2,3], T20[2,3]],color='red')
ax.plot3D([T20[0,3], T21[0,3]], [T20[1,3], T21[1,3]], [T20[2,3], T21[2,3]],color='red')
ax.plot3D([Ti[0,3], Ti1[0,3]], [Ti[1,3], Ti1[1,3]], [Ti[2,3], Ti1[2,3]],color='red')
ax.plot3D([Ti1[0,3], Ti2[0,3]], [Ti1[1,3], Ti2[1,3]], [Ti1[2,3], Ti2[2,3]],color='red')
ax.plot3D([Ti2[0,3], Tib3[0,3]], [Ti2[1,3], Tib3[1,3]], [Ti2[2,3], Tib3[2,3]],color='red')
ax.plot3D([Tib3[0,3], Tb01[0,3]], [Tib3[1,3], Tb01[1,3]], [Tib3[2,3], Tb01[2,3]],color='red')
ax.plot3D([Tb01[0,3], Tb02[0,3]], [Tb01[1,3], Tb02[1,3]], [Tb01[2,3], Tb02[2,3]],color='red')
ax.plot3D([Tb02[0,3], Tb03[0,3]], [Tb02[1,3], Tb03[1,3]], [Tb02[2,3], Tb03[2,3]],color='red')
ax.plot3D([Tb03[0,3], Tb04[0,3]], [Tb03[1,3], Tb04[1,3]], [Tb03[2,3], Tb04[2,3]],color='red')
ax.plot3D([Tb04[0,3], Tb05[0,3]], [Tb04[1,3], Tb05[1,3]], [Tb04[2,3], Tb05[2,3]],color='red')
ax.plot3D([Tb05[0,3], Tb06[0,3]], [Tb05[1,3], Tb06[1,3]], [Tb05[2,3], Tb06[2,3]],color='red')
ax.plot3D([Tb06[0,3], Tb07[0,3]], [Tb06[1,3], Tb07[1,3]], [Tb06[2,3], Tb07[2,3]],color='red')
ax.plot3D([Tb07[0,3], Tb08[0,3]], [Tb07[1,3], Tb08[1,3]], [Tb07[2,3], Tb08[2,3]],color='red')
ax.plot3D([Tb08[0,3], Tb09[0,3]], [Tb08[1,3], Tb09[1,3]], [Tb08[2,3], Tb09[2,3]],color='red')
ax.plot3D([Tb09[0,3], Tb10[0,3]], [Tb09[1,3], Tb10[1,3]], [Tb09[2,3], Tb10[2,3]],color='red')
ax.plot3D([Tb10[0,3], Tb11[0,3]], [Tb10[1,3], Tb11[1,3]], [Tb10[2,3], Tb11[2,3]],color='red')
ax.plot3D([Tb11[0,3], Tb12[0,3]], [Tb11[1,3], Tb12[1,3]], [Tb11[2,3], Tb12[2,3]],color='red')
ax.plot3D([Tb12[0,3], Tb13[0,3]], [Tb12[1,3], Tb13[1,3]], [Tb12[2,3], Tb13[2,3]],color='red')
ax.plot3D([Tb13[0,3], Tb14[0,3]], [Tb13[1,3], Tb14[1,3]], [Tb13[2,3], Tb14[2,3]],color='red')
ax.plot3D([Tb14[0,3], Tb15[0,3]], [Tb14[1,3], Tb15[1,3]], [Tb14[2,3], Tb15[2,3]],color='red')
ax.plot3D([Tb15[0,3], Tb16[0,3]], [Tb15[1,3], Tb16[1,3]], [Tb15[2,3], Tb16[2,3]],color='red')
ax.plot3D([Tb16[0,3], Tb17[0,3]], [Tb16[1,3], Tb17[1,3]], [Tb16[2,3], Tb17[2,3]],color='red')
ax.plot3D([Tb17[0,3], Tb18[0,3]], [Tb17[1,3], Tb18[1,3]], [Tb17[2,3], Tb18[2,3]],color='red')
ax.plot3D([Tb18[0,3], Tb19[0,3]], [Tb18[1,3], Tb19[1,3]], [Tb18[2,3], Tb19[2,3]],color='red')
ax.plot3D([Tb19[0,3], Tb20[0,3]], [Tb19[1,3], Tb20[1,3]], [Tb19[2,3], Tb20[2,3]],color='red')
ax.plot3D([Tb20[0,3], Tb21[0,3]], [Tb20[1,3], Tb21[1,3]], [Tb20[2,3], Tb21[2,3]],color='red')
def accmotomanv(tb,t1a,t2a,t3a,t4a,t5a,t6a,t7a,t1b,t2b,t3b,t4b,t5b,t6b,t7b,mh=np.eye(4)):
T0=np.eye(4)
Ti=trasz(893.5)
Ti1=Ti@trasx(92.5)@rotaz(tb)
Ti2=Ti1@trasx(100)@trasz(306.5)
Ti3=Ti2@rotax(-90)@rotaz(-180)@rotaz(t1a)@trasz(265)
Tib3=Ti2@rotax(90)@rotaz(-180)@rotaz(t1b)@trasz(265)
T01=Ti3@rotax(-90)@rotaz(t2a)
T12=trasz(-80)
T23=trasy(-90)
T34=trasz(80)
T45=trasy(-90)@rotax(90)@rotaz(t3a)
T56=trasz(90)
T67=trasy(-80)
T78=trasz(90)
T89=trasy(80)@rotax(-90)@rotaz(t4a)
T910=trasz(80)
T1011=trasy(-90)
T1112=trasz(-80)
T1213=trasy(-90)@rotax(90)@rotaz(t5a)
T1314=trasz(90)
T1415=trasy(80)
T1516=trasz(90)
T1617=trasy(-80)@rotax(-90)@rotaz(t6a)
T1718=trasz(-80)
T1819=trasy(-87.5)
T1920=trasz(80)
T2021=trasy(-87.5)@rotax(90)@rotaz(t7a)
T02=T01@T12
T03=T02@T23
T04=T03@T34
T05=T04@T45
T06=T05@T56
T07=T06@T67
T08=T07@T78
T09=T08@T89
T10=T09@T910
T11=T10@T1011
T12=T11@T1112
T13=T12@T1213
T14=T13@T1314
T15=T14@T1415
T16=T15@T1516
T17=T16@T1617
T18=T17@T1718
T19=T18@T1819
T20=T19@T1920
T21=T20@T2021
Tb01=Tib3@rotax(-90)@rotaz(t2b)
Tb12=trasz(-80)
Tb23=trasy(-90)
Tb34=trasz(80)
Tb45=trasy(-90)@rotax(90)@rotaz(t3b)
Tb56=trasz(90)
Tb67=trasy(-80)
Tb78=trasz(90)
Tb89=trasy(80)@rotax(-90)@rotaz(t4b)
Tb910=trasz(80)
Tb1011=trasy(-90)
Tb1112=trasz(-80)
Tb1213=trasy(-90)@rotax(90)@rotaz(t5b)
Tb1314=trasz(90)
Tb1415=trasy(80)
Tb1516=trasz(90)
Tb1617=trasy(-80)@rotax(-90)@rotaz(t6b)
Tb1718=trasz(-80)
Tb1819=trasy(-87.5)
Tb1920=trasz(80)
Tb2021=trasy(-87.5)@rotax(90)@rotaz(t7b)
Tb02=Tb01@Tb12
Tb03=Tb02@Tb23
Tb04=Tb03@Tb34
Tb05=Tb04@Tb45
Tb06=Tb05@Tb56
Tb07=Tb06@Tb67
Tb08=Tb07@Tb78
Tb09=Tb08@Tb89
Tb10=Tb09@Tb910
Tb11=Tb10@Tb1011
Tb12=Tb11@Tb1112
Tb13=Tb12@Tb1213
Tb14=Tb13@Tb1314
Tb15=Tb14@Tb1415
Tb16=Tb15@Tb1516
Tb17=Tb16@Tb1617
Tb18=Tb17@Tb1718
Tb19=Tb18@Tb1819
Tb20=Tb19@Tb1920
Tb21=Tb20@Tb2021
return T21, Tb21
def motomannewton(vd,vd2,sem,mh=np.eye(4)):
d=0.01
calc=True
j=np.zeros([24,15])
b=np.ones([24,1])
tb=sem[0]
t1=sem[1]
t2=sem[2]
t3=sem[3]
t4=sem[4]
t5=sem[5]
t6=sem[6]
t7=sem[7]
tb1=sem[8]
tb2=sem[9]
tb3=sem[10]
tb4=sem[11]
tb5=sem[12]
tb6=sem[13]
tb7=sem[14]
tbd=tb+d
t1d=t1+d
t2d=t2+d
t3d=t3+d
t4d=t4+d
t5d=t5+d
t6d=t6+d
t7d=t7+d
tb1d=tb1+d
tb2d=tb2+d
tb3d=tb3+d
tb4d=tb4+d
tb5d=tb5+d
tb6d=tb6+d
tb7d=tb7+d
con=0
while calc:
con=con+1
t,t2al=accmotomanv(tb,t1,t2,t3,t4,t5,t6,t7,tb1,tb2,tb3,tb4,tb5,tb6,tb7)
tn=t-vd
tn2=t2al-vd2
b[0,0]=tn[0,0]
b[1,0]=tn[0,1]
b[2,0]=tn[0,2]
b[3,0]=tn[0,3]
b[4,0]=tn[1,0]
b[5,0]=tn[1,1]
b[6,0]=tn[1,2]
b[7,0]=tn[1,3]
b[8,0]=tn[2,0]
b[9,0]=tn[2,1]
b[10,0]=tn[2,2]
b[11,0]=tn[2,3]
b[12,0]=tn2[0,0]
b[13,0]=tn2[0,1]
b[14,0]=tn2[0,2]
b[15,0]=tn2[0,3]
b[16,0]=tn2[1,0]
b[17,0]=tn2[1,1]
b[18,0]=tn2[1,2]
b[19,0]=tn2[1,3]
b[20,0]=tn2[2,0]
b[21,0]=tn2[2,1]
b[22,0]=tn2[2,2]
b[23,0]=tn2[2,3]
tv1=[t1d,t1,t1,t1,t1,t1,t1,t1,t1,t1,t1,t1,t1,t1,t1]
tv2=[t2,t2d,t2,t2,t2,t2,t2,t2,t2,t2,t2,t2,t2,t2,t2]
tv3=[t3,t3,t3d,t3,t3,t3,t3,t3,t3,t3,t3,t3,t3,t3,t3]
tv4=[t4,t4,t4,t4d,t4,t4,t4,t4,t4,t4,t4,t4,t4,t4,t4]
tv5=[t5,t5,t5,t5,t5d,t5,t5,t5,t5,t5,t5,t5,t5,t5,t5]
tv6=[t6,t6,t6,t6,t6,t6d,t6,t6,t6,t6,t6,t6,t6,t6,t6]
tv7=[t7,t7,t7,t7,t7,t7,t7d,t7,t7,t7,t7,t7,t7,t7,t7]
tvb1=[tb1,tb1,tb1,tb1,tb1,tb1,tb1,tb1d,tb1,tb1,tb1,tb1,tb1,tb1,tb1]
tvb2=[tb2,tb2,tb2,tb2,tb2,tb2,tb2,tb2,tb2d,tb2,tb2,tb2,tb2,tb2,tb2]
tvb3=[tb3,tb3,tb3,tb3,tb3,tb3,tb3,tb3,tb3,tb3d,tb3,tb3,tb3,tb3,tb3]
tvb4=[tb4,tb4,tb4,tb4,tb4,tb4,tb4,tb4,tb4,tb4,tb4d,tb4,tb4,tb4,tb4]
tvb5=[tb5,tb5,tb5,tb5,tb5,tb5,tb5,tb5,tb5,tb5,tb5,tb5d,tb5,tb5,tb5]
tvb6=[tb6,tb6,tb6,tb6,tb6,tb6,tb6,tb6,tb6,tb6,tb6,tb6,tb6d,tb6,tb6]
tvb7=[tb7,tb7,tb7,tb7,tb7,tb7,tb7,tb7,tb7,tb7,tb7,tb7,tb7,tb7d,tb7]
tvb=[tb,tb,tb,tb,tb,tb,tb,tb,tb,tb,tb,tb,tb,tb,tbd]
n=0
while n<15:
td,td2=accmotomanv(tvb[n],tv1[n],tv2[n],tv3[n],tv4[n],tv5[n],tv6[n],tv7[n],tvb1[n],tvb2[n],tvb3[n],tvb4[n],tvb5[n],tvb6[n],tvb7[n],mh)
tj=(td-t)/d
tj2=(td2-t2al)/d
j[0,n]=tj[0,0]
j[1,n]=tj[0,1]
j[2,n]=tj[0,2]
j[3,n]=tj[0,3]
j[4,n]=tj[1,0]
j[5,n]=tj[1,1]
j[6,n]=tj[1,2]
j[7,n]=tj[1,3]
j[8,n]=tj[2,0]
j[9,n]=tj[2,1]
j[10,n]=tj[2,2]
j[11,n]=tj[2,3]
j[12,n]=tj2[0,0]
j[13,n]=tj2[0,1]
j[14,n]=tj2[0,2]
j[15,n]=tj2[0,3]
j[16,n]=tj2[1,0]
j[17,n]=tj2[1,1]
j[18,n]=tj2[1,2]
j[19,n]=tj2[1,3]
j[20,n]=tj2[2,0]
j[21,n]=tj2[2,1]
j[22,n]=tj2[2,2]
j[23,n]=tj2[2,3]
n=n+1
R=np.linalg.pinv(j)@(-b)
t1=t1+R[0,0]
t2=t2+R[1,0]
t3=t3+R[2,0]
t4=t4+R[3,0]
t5=t5+R[4,0]
t6=t6+R[5,0]
t7=t6+R[6,0]
tb1=tb1+R[7,0]
tb2=tb2+R[8,0]
tb3=tb3+R[9,0]
tb4=tb4+R[10,0]
tb5=tb5+R[11,0]
tb6=tb6+R[12,0]
tb7=tb7+R[13,0]
tb=tb+R[14,0]
t1=t1%360
t2=t2%360
t3=t3%360
t4=t4%360
t5=t5%360
t6=t6%360
t7=t7%360
tb1=tb1%360
tb2=tb2%360
tb3=tb3%360
tb4=tb4%360
tb5=tb5%360
tb6=tb6%360
tb7=tb7%360
tb=tb%360
t1d=t1+d
t2d=t2+d
t3d=t3+d
t4d=t4+d
t5d=t5+d
t6d=t6+d
t7d=t7+d
tb1d=tb1+d
tb2d=tb2+d
tb3d=tb3+d
tb4d=tb4+d
tb5d=tb5+d
tb6d=tb6+d
tb7d=tb7+d
tbd=tb+d
if (abs(b[0,0])<0.0001 and abs(b[1,0])<0.0001 and abs(b[2,0])<0.0001 and abs(b[3,0])<0.0001 and abs(b[4,0])<0.0001 and abs(b[5,0])<0.0001 and abs(b[6,0])<0.0001 and abs(b[7,0])<0.0001 and abs(b[8,0])<0.0001 and abs(b[9,0])<0.0001 and abs(b[10,0])<0.0001 and abs(b[11,0])<0.0001 and abs(b[12,0])<0.0001 and abs(b[13,0])<0.0001 and abs(b[14,0])<0.0001 and abs(b[15,0])<0.0001 and abs(b[16,0])<0.0001 and abs(b[17,0])<0.0001 and abs(b[18,0])<0.0001 and abs(b[19,0])<0.0001 and abs(b[20,0])<0.0001 and abs(b[21,0])<0.0001 and abs(b[22,0])<0.0001 and abs(b[23,0])<0.0001):
calc=False
if con>100:
calc=False
t1=sem[0]
t2=sem[1]
t3=sem[2]
t4=sem[3]
t5=sem[4]
t6=sem[5]
t7=sem[6]
tb1=sem[7]
tb2=sem[8]
tb3=sem[9]
tb4=sem[10]
tb5=sem[11]
tb6=sem[12]
tb7=sem[13]
tb=sem[14]
tetas=[tb,t1,t2,t3,t4,t5,t6,t7,tb1,tb2,tb3,tb4,tb5,tb6,tb7]
print(con)
print(tetas)
return tetas
def accmotomanv2(tb,t1a,t2a,t3a,t4a,t5a,t6a,t7a,t1b,t2b,t3b,t4b,t5b,t6b,t7b,mh=np.eye(4)):
T0=np.eye(4)
Ti=trasz(893.5)
Ti1=Ti@trasx(92.5)@rotaz(tb)
Ti2=Ti1@trasx(100)@trasz(306.5)
Ti3=Ti2@rotax(-90)@rotaz(-180)@rotaz(t1a)@trasz(265)
Tib3=Ti2@rotax(90)@rotaz(-180)@rotaz(t1b)@trasz(265)
T01=Ti3@rotax(-90)@rotaz(t2a)
T12=trasz(-80)
T23=trasy(-90)
T34=trasz(80)
T45=trasy(-90)@rotax(90)@rotaz(t3a)
T56=trasz(90)
T67=trasy(-80)
T78=trasz(90)
T89=trasy(80)@rotax(-90)@rotaz(t4a)
T910=trasz(80)
T1011=trasy(-90)
T1112=trasz(-80)
T1213=trasy(-90)@rotax(90)@rotaz(t5a)
T1314=trasz(90)
T1415=trasy(80)
T1516=trasz(90)
T1617=trasy(-80)@rotax(-90)@rotaz(t6a)
T1718=trasz(-80)
T1819=trasy(-87.5)
T1920=trasz(80)
T2021=trasy(-87.5)@rotax(90)@rotaz(t7a)
T02=T01@T12
T03=T02@T23
T04=T03@T34
T05=T04@T45
T06=T05@T56
T07=T06@T67
T08=T07@T78
T09=T08@T89
T10=T09@T910
T11=T10@T1011
T12=T11@T1112
T13=T12@T1213
T14=T13@T1314
T15=T14@T1415
T16=T15@T1516
T17=T16@T1617
T18=T17@T1718
T19=T18@T1819
T20=T19@T1920
T21=T20@T2021
Tb01=Tib3@rotax(-90)@rotaz(t2b)
Tb12=trasz(-80)
Tb23=trasy(-90)
Tb34=trasz(80)
Tb45=trasy(-90)@rotax(90)@rotaz(t3b)
Tb56=trasz(90)
Tb67=trasy(-80)
Tb78=trasz(90)
Tb89=trasy(80)@rotax(-90)@rotaz(t4b)
Tb910=trasz(80)
Tb1011=trasy(-90)
Tb1112=trasz(-80)
Tb1213=trasy(-90)@rotax(90)@rotaz(t5b)
Tb1314=trasz(90)
Tb1415=trasy(80)
Tb1516=trasz(90)
Tb1617=trasy(-80)@rotax(-90)@rotaz(t6b)
Tb1718=trasz(-80)
Tb1819=trasy(-87.5)
Tb1920=trasz(80)
Tb2021=trasy(-87.5)@rotax(90)@rotaz(t7b)
Tb02=Tb01@Tb12
Tb03=Tb02@Tb23
Tb04=Tb03@Tb34
Tb05=Tb04@Tb45
Tb06=Tb05@Tb56
Tb07=Tb06@Tb67
Tb08=Tb07@Tb78
Tb09=Tb08@Tb89
Tb10=Tb09@Tb910
Tb11=Tb10@Tb1011
Tb12=Tb11@Tb1112
Tb13=Tb12@Tb1213
Tb14=Tb13@Tb1314
Tb15=Tb14@Tb1415
Tb16=Tb15@Tb1516
Tb17=Tb16@Tb1617
Tb18=Tb17@Tb1718
Tb19=Tb18@Tb1819
Tb20=Tb19@Tb1920
Tb21=Tb20@Tb2021
return T21, Tb21
def motomannewton2(vd,vd2,sem,mh=np.eye(4)):
d=10
calc=True
j=np.zeros([24,15])
b=np.ones([24,1])
tb=sem[0]
t1=sem[1]
t2=sem[2]
t3=sem[3]
t4=sem[4]
t5=sem[5]
t6=sem[6]
t7=sem[7]
tb1=sem[8]
tb2=sem[9]
tb3=sem[10]
tb4=sem[11]
tb5=sem[12]
tb6=sem[13]
tb7=sem[14]
tbd=tb+d
t1d=t1+d
t2d=t2+d
t3d=t3+d
t4d=t4+d
t5d=t5+d
t6d=t6+d
t7d=t7+d
tb1d=tb1+d
tb2d=tb2+d
tb3d=tb3+d
tb4d=tb4+d
tb5d=tb5+d
tb6d=tb6+d
tb7d=tb7+d
con=0
while calc:
con=con+1
t,t2al=accmotomanv(tb,t1,t2,t3,t4,t5,t6,t7,tb1,tb2,tb3,tb4,tb5,tb6,tb7)
#print(t)
#print(t2al)
#print(vd)
#print(vd2)
tn=t-vd
tn2=t2al-vd2
#print(tn)
#print(tn2)
b[0,0]=tn[0,0]
b[1,0]=tn[0,1]
b[2,0]=tn[0,2]
b[3,0]=tn[0,3]
b[4,0]=tn[1,0]
b[5,0]=tn[1,1]
b[6,0]=tn[1,2]
b[7,0]=tn[1,3]
b[8,0]=tn[2,0]
b[9,0]=tn[2,1]
b[10,0]=tn[2,2]
b[11,0]=tn[2,3]
b[12,0]=tn2[0,0]
b[13,0]=tn2[0,1]
b[14,0]=tn2[0,2]
b[15,0]=tn2[0,3]
b[16,0]=tn2[1,0]
b[17,0]=tn2[1,1]
b[18,0]=tn2[1,2]
b[19,0]=tn2[1,3]
b[20,0]=tn2[2,0]
b[21,0]=tn2[2,1]
b[22,0]=tn2[2,2]
b[23,0]=tn2[2,3]
#print(np.around(b,4))
tv1=[t1d,t1,t1,t1,t1,t1,t1,t1,t1,t1,t1,t1,t1,t1,t1]
tv2=[t2,t2d,t2,t2,t2,t2,t2,t2,t2,t2,t2,t2,t2,t2,t2]
tv3=[t3,t3,t3d,t3,t3,t3,t3,t3,t3,t3,t3,t3,t3,t3,t3]
tv4=[t4,t4,t4,t4d,t4,t4,t4,t4,t4,t4,t4,t4,t4,t4,t4]
tv5=[t5,t5,t5,t5,t5d,t5,t5,t5,t5,t5,t5,t5,t5,t5,t5]
tv6=[t6,t6,t6,t6,t6,t6d,t6,t6,t6,t6,t6,t6,t6,t6,t6]
tv7=[t7,t7,t7,t7,t7,t7,t7d,t7,t7,t7,t7,t7,t7,t7,t7]
tvb1=[tb1,tb1,tb1,tb1,tb1,tb1,tb1,tb1d,tb1,tb1,tb1,tb1,tb1,tb1,tb1]
tvb2=[tb2,tb2,tb2,tb2,tb2,tb2,tb2,tb2,tb2d,tb2,tb2,tb2,tb2,tb2,tb2]
tvb3=[tb3,tb3,tb3,tb3,tb3,tb3,tb3,tb3,tb3,tb3d,tb3,tb3,tb3,tb3,tb3]
tvb4=[tb4,tb4,tb4,tb4,tb4,tb4,tb4,tb4,tb4,tb4,tb4d,tb4,tb4,tb4,tb4]
tvb5=[tb5,tb5,tb5,tb5,tb5,tb5,tb5,tb5,tb5,tb5,tb5,tb5d,tb5,tb5,tb5]
tvb6=[tb6,tb6,tb6,tb6,tb6,tb6,tb6,tb6,tb6,tb6,tb6,tb6,tb6d,tb6,tb6]
tvb7=[tb7,tb7,tb7,tb7,tb7,tb7,tb7,tb7,tb7,tb7,tb7,tb7,tb7,tb7d,tb7]
tvb=[tb,tb,tb,tb,tb,tb,tb,tb,tb,tb,tb,tb,tb,tb,tbd]
n=0
while n<15:
td,td2=accmotomanv(tvb[n],tv1[n],tv2[n],tv3[n],tv4[n],tv5[n],tv6[n],tv7[n],tvb1[n],tvb2[n],tvb3[n],tvb4[n],tvb5[n],tvb6[n],tvb7[n],mh)
tj=(td-t)/d
tj2=(td2-t2al)/d
j[0,n]=tj[0,0]
j[1,n]=tj[0,1]
j[2,n]=tj[0,2]
j[3,n]=tj[0,3]
j[4,n]=tj[1,0]
j[5,n]=tj[1,1]
j[6,n]=tj[1,2]
j[7,n]=tj[1,3]
j[8,n]=tj[2,0]
j[9,n]=tj[2,1]
j[10,n]=tj[2,2]
j[11,n]=tj[2,3]
j[12,n]=tj2[0,0]
j[13,n]=tj2[0,1]
j[14,n]=tj2[0,2]
j[15,n]=tj2[0,3]
j[16,n]=tj2[1,0]
j[17,n]=tj2[1,1]
j[18,n]=tj2[1,2]
j[19,n]=tj2[1,3]
j[20,n]=tj2[2,0]
j[21,n]=tj2[2,1]
j[22,n]=tj2[2,2]
j[23,n]=tj2[2,3]
n=n+1
R=np.linalg.pinv(j)@(-b)
#print(np.around(j,4))
#print(np.around(R,4))
t1=t1+R[0,0]
t2=t2+R[1,0]
t3=t3+R[2,0]
t4=t4+R[3,0]
t5=t5+R[4,0]
t6=t6+R[5,0]
t7=t7+R[6,0]
tb1=tb1+R[7,0]
tb2=tb2+R[8,0]
tb3=tb3+R[9,0]
tb4=tb4+R[10,0]
tb5=tb5+R[11,0]
tb6=tb6+R[12,0]
tb7=tb7+R[13,0]
tb=tb+R[14,0]
t1=t1%360
t2=t2%360
t3=t3%360
t4=t4%360
t5=t5%360
t6=t6%360
t7=t7%360
tb1=tb1%360
tb2=tb2%360
tb3=tb3%360
tb4=tb4%360
tb5=tb5%360
tb6=tb6%360
tb7=tb7%360
tb=tb%360
if tb>180:
tb=-(360-tb)
if tb<-180:
tb=360+tb
if t1>180:
t1=-(360-t1)
if t1<-180:
t1=360+t1
if t2>180:
t2=-(360-t2)
if t2<-180:
t2=360+t2
if t3>180:
t3=-(360-t3)
if t3<-180:
t3=360+t3
if t4>180:
t4=-(360-t4)
if t4<-180:
t4=360+t4
if t5>180:
t5=-(360-t5)
if t5<-180:
t5=360+t5
if t6>180:
t6=-(360-t6)
if t6<-180:
t6=360+t6
if t7>180:
t7=-(360-t7)
if t7<-180:
t7=360+t7
if tb1>180:
tb1=-(360-tb1)
if tb1<-180:
tb1=360+tb1
if tb2>180:
tb2=-(360-tb2)
if tb2<-180:
tb2=360+tb2
if tb3>180:
tb3=-(360-tb3)
if tb3<-180:
tb3=360+tb3
if tb4>180:
tb4=-(360-tb4)
if tb4<-180:
tb4=360+tb4
if tb5>180:
tb5=-(360-tb5)
if tb5<-180:
tb5=360+tb5
if tb6>180:
tb6=-(360-tb6)
if tb6<-180:
tb6=360+tb6
if tb7>180:
tb7=-(360-tb7)
if tb7<-180:
tb7=360+tb7
t1d=t1+d
t2d=t2+d
t3d=t3+d
t4d=t4+d
t5d=t5+d
t6d=t6+d
t7d=t7+d
tb1d=tb1+d
tb2d=tb2+d
tb3d=tb3+d
tb4d=tb4+d
tb5d=tb5+d
tb6d=tb6+d
tb7d=tb7+d
tbd=tb+d
if (abs(b[0,0])<0.0001 and abs(b[1,0])<0.0001 and abs(b[2,0])<0.0001 and abs(b[3,0])<0.0001 and abs(b[4,0])<0.0001 and abs(b[5,0])<0.0001 and abs(b[6,0])<0.0001 and abs(b[7,0])<0.0001 and abs(b[8,0])<0.0001 and abs(b[9,0])<0.0001 and abs(b[10,0])<0.0001 and abs(b[11,0])<0.0001 and abs(b[12,0])<0.0001 and abs(b[13,0])<0.0001 and abs(b[14,0])<0.0001 and abs(b[15,0])<0.0001 and abs(b[16,0])<0.0001 and abs(b[17,0])<0.0001 and abs(b[18,0])<0.0001 and abs(b[19,0])<0.0001 and abs(b[20,0])<0.0001 and abs(b[21,0])<0.0001 and abs(b[22,0])<0.0001 and abs(b[23,0])<0.0001):
calc=False
if con>100:
calc=False
t1=sem[1]
t2=sem[2]
t3=sem[3]
t4=sem[4]
t5=sem[5]
t6=sem[6]
t7=sem[7]
tb1=sem[8]
tb2=sem[9]
tb3=sem[10]
tb4=sem[11]
tb5=sem[12]
tb6=sem[13]
tb7=sem[14]
tb=sem[0]
tetas=[tb,t1,t2,t3,t4,t5,t6,t7,tb1,tb2,tb3,tb4,tb5,tb6,tb7]
print(con)
print(tetas)
return tetas
def motomanmovej(p1,p2):
n=1
y=0
while y<15:
if p1[y]>180:
p1[y]=-(360-p1[y])
if p1[y]<-180:
p1[y]=360+p1[y]
y=y+1
y=0
while y<15:
if p2[y]>180:
p2[y]=-(360-p2[y])
if p2[y]<-180:
p2[y]=360+p2[y]
y=y+1
tetas=[p1[0],p1[1],p1[2],p1[3],p1[4],p1[5],p1[6],p1[7],p1[8],p1[9],p1[10],p1[11],p1[12],p1[13],p1[14]]
paso=[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
paso[0]=(p2[0]-p1[0])/30
paso[1]=(p2[1]-p1[1])/30
paso[2]=(p2[2]-p1[2])/30
paso[3]=(p2[3]-p1[3])/30
paso[4]=(p2[4]-p1[4])/30
paso[5]=(p2[5]-p1[5])/30
paso[6]=(p2[6]-p1[6])/30
paso[7]=(p2[7]-p1[7])/30
paso[8]=(p2[8]-p1[8])/30
paso[9]=(p2[9]-p1[9])/30
paso[10]=(p2[10]-p1[10])/30
paso[11]=(p2[11]-p1[11])/30
paso[12]=(p2[12]-p1[12])/30
paso[13]=(p2[13]-p1[13])/30
paso[14]=(p2[14]-p1[14])/30
while n<31:
tetas[0]=tetas[0]+paso[0]
tetas[1]=tetas[1]+paso[1]
tetas[2]=tetas[2]+paso[2]
tetas[3]=tetas[3]+paso[3]
tetas[4]=tetas[4]+paso[4]
tetas[5]=tetas[5]+paso[5]
tetas[6]=tetas[6]+paso[6]
tetas[7]=tetas[7]+paso[7]
tetas[8]=tetas[8]+paso[8]
tetas[9]=tetas[9]+paso[9]
tetas[10]=tetas[10]+paso[10]
tetas[11]=tetas[11]+paso[11]
tetas[12]=tetas[12]+paso[12]
tetas[13]=tetas[13]+paso[13]
tetas[14]=tetas[14]+paso[14]
n=n+1
ax.cla()
setaxis(1000)
#print(tetar)
accmotoman(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14])
dibujar()
return tetas
def motomanmovel(tetas,tetas2):
con=0
n=0
sem=tetas.copy()
mtr1,mtr2=accmotomanv(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14])
mtr12,mtr22=accmotomanv(tetas2[0],tetas2[1],tetas2[2],tetas2[3],tetas2[4],tetas2[5],tetas2[6],tetas2[7],tetas2[8],tetas2[9],tetas2[10],tetas2[11],tetas2[12],tetas2[13],tetas2[14])
pn=mtr1
pn2=mtr2
p1xd=mtr1[0,3]
p1yd=mtr1[1,3]
p1zd=mtr1[2,3]
p1xi=mtr2[0,3]
p1yi=mtr2[1,3]
p1zi=mtr2[2,3]
p2xd=mtr12[0,3]
p2yd=mtr12[1,3]
p2zd=mtr12[2,3]
p2xi=mtr22[0,3]
p2yi=mtr22[1,3]
p2zi=mtr22[2,3]
dxd=p2xd-p1xd
dyd=p2yd-p1yd
dzd=p2zd-p1zd
dxi=p2xi-p1xi
dyi=p2yi-p1yi
dzi=p2zi-p1zi
angrot1=obtangrot(mtr12,mtr1)
angrot1[0]=angrot1[0]/30
angrot1[1]=angrot1[1]/30
angrot1[2]=angrot1[2]/30
angroti1=[0,0,0]
angrot2=obtangrot(mtr22,mtr2)
angrot2[0]=angrot2[0]/30
angrot2[1]=angrot2[1]/30
angrot2[2]=angrot2[2]/30
angroti2=[0,0,0]
while n<1:
pn=mtr1@rotaz(angroti1[2])@rotay(angroti1[1])@rotax(angroti1[0])
angroti1[0]=angroti1[0]+angrot1[0]
angroti1[1]=angroti1[1]+angrot1[1]
angroti1[2]=angroti1[2]+angrot1[2]
xd=p1xd+dxd*n
yd=p1yd+dyd*n
zd=p1zd+dzd*n
pn2=mtr1@rotaz(angroti2[2])@rotay(angroti2[1])@rotax(angroti2[0])
angroti2[0]=angroti2[0]+angrot2[0]
angroti2[1]=angroti2[1]+angrot2[1]
angroti2[2]=angroti2[2]+angrot2[2]
xi=p1xi+dxi*n
yi=p1yi+dyi*n
zi=p1zi+dzi*n
n+=1/30
con+=1
pn[0,3]=xd
pn[1,3]=yd
pn[2,3]=zd
pn2[0,3]=xi
pn2[1,3]=yi
pn2[2,3]=zi
tetas=motomannewton2(pn,pn2,sem)
sem=[tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14]]
ax.cla()
setaxis(1000)
accmotoman(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14])
dibujar()
return tetas
def IRB1600(t1,t2,t3,t4,t5,t6):
t0=np.eye(4)
t01=rotaz(t1)#
t12=trasz(486.5)@rotax(-90)#
t23=trasx(150)@rotaz(t2)
t34=trasy(-700)@rotaz(90)@rotaz(t3)#
t45=trasx(-110)@rotax(90)@rotaz(t4)#
t56=trasz(640)@rotax(-90)@rotaz(t5)#
t67=trasy(-150)@rotax(90)@rotaz(t6)
t78=trasz(50)#
t02=t01@t12
t03=t02@t23
t04=t03@t34
t05=t04@t45
t06=t05@t56
t07=t06@t67
t08=t07@t78
sistemafijo(100)
sistemamovil(t01,100)
#sistemamovil(t02,100)
sistemamovil(t03,100)
sistemamovil(t04,100)
sistemamovil(t05,100)
sistemamovil(t06,100)
sistemamovil(t07,100)
#sistemamovil(t08,100)
ax.plot3D([t0[0,3],t01[0,3]],[t0[1,3],t01[1,3]],[t0[2,3],t01[2,3]],color='red')
ax.plot3D([t01[0,3],t02[0,3]],[t01[1,3],t02[1,3]],[t01[2,3],t02[2,3]],color='red')
ax.plot3D([t02[0,3],t03[0,3]],[t02[1,3],t03[1,3]],[t02[2,3],t03[2,3]],color='red')
ax.plot3D([t03[0,3],t04[0,3]],[t03[1,3],t04[1,3]],[t03[2,3],t04[2,3]],color='red')
ax.plot3D([t04[0,3],t05[0,3]],[t04[1,3],t05[1,3]],[t04[2,3],t05[2,3]],color='red')
ax.plot3D([t05[0,3],t06[0,3]],[t05[1,3],t06[1,3]],[t05[2,3],t06[2,3]],color='red')
ax.plot3D([t06[0,3],t07[0,3]],[t06[1,3],t07[1,3]],[t06[2,3],t07[2,3]],color='red')
ax.plot3D([t07[0,3],t08[0,3]],[t07[1,3],t08[1,3]],[t07[2,3],t08[2,3]],color='red')
return(t08)
def IRB1600v(t1,t2,t3,t4,t5,t6,mh=np.eye(4)):
t0=np.eye(4)
t01=rotaz(t1)#
t12=trasz(486.5)@rotax(-90)#
t23=trasx(150)@rotaz(t2)
t34=trasy(-700)@rotaz(90)@rotaz(t3)#
t45=trasx(-110)@rotax(90)@rotaz(t4)#
t56=trasz(640)@rotax(-90)@rotaz(t5)#
t67=trasy(-150)@rotax(90)@rotaz(t6)
t78=trasz(50)#
t02=t01@t12
t03=t02@t23
t04=t03@t34
t05=t04@t45
t06=t05@t56
t07=t06@t67
t08=t07@t78
t08=t08@mh
#print(t08)
return t08
def IRB1600newton(vd,sem,mh=np.eye(4)):
d=0.01
calc=True
j=np.zeros([12,6])
b=np.ones([12,1])
t1=sem[0]
t2=sem[1]
t3=sem[2]
t4=sem[3]
t5=sem[4]
t6=sem[5]
t1d=t1+d
t2d=t2+d
t3d=t3+d
t4d=t4+d
t5d=t5+d
t6d=t6+d
con=0
while calc:
con=con+1
t=IRB1600v(t1,t2,t3,t4,t5,t6,mh)
tn=t-vd
b[0,0]=tn[0,0]
b[1,0]=tn[0,1]
b[2,0]=tn[0,2]
b[3,0]=tn[0,3]
b[4,0]=tn[1,0]
b[5,0]=tn[1,1]
b[6,0]=tn[1,2]
b[7,0]=tn[1,3]
b[8,0]=tn[2,0]
b[9,0]=tn[2,1]
b[10,0]=tn[2,2]
b[11,0]=tn[2,3]
tv1=[t1d,t1,t1,t1,t1,t1]
tv2=[t2,t2d,t2,t2,t2,t2]
tv3=[t3,t3,t3d,t3,t3,t3]
tv4=[t4,t4,t4,t4d,t4,t4]
tv5=[t5,t5,t5,t5,t5d,t5]
tv6=[t6,t6,t6,t6,t6,t6d]
n=0
while n<6:
td=IRB1600v(tv1[n],tv2[n],tv3[n],tv4[n],tv5[n],tv6[n],mh)
tj=(td-t)/d
j[0,n]=tj[0,0]
j[1,n]=tj[0,1]
j[2,n]=tj[0,2]
j[3,n]=tj[0,3]
j[4,n]=tj[1,0]
j[5,n]=tj[1,1]
j[6,n]=tj[1,2]
j[7,n]=tj[1,3]
j[8,n]=tj[2,0]
j[9,n]=tj[2,1]
j[10,n]=tj[2,2]
j[11,n]=tj[2,3]
n=n+1
R=np.linalg.pinv(j)@(-b)
t1=t1+R[0,0]
t2=t2+R[1,0]
t3=t3+R[2,0]
t4=t4+R[3,0]
t5=t5+R[4,0]
t6=t6+R[5,0]
t1=t1%360
t2=t2%360
t3=t3%360
t4=t4%360
t5=t5%360
t6=t6%360
if t1>180:
t1=-(360-t1)
if t1<-180:
t1=360+t1
if t2>150:
t2=-(360-t2)
if t2<-90:
t2=360+t2
if t3>79:
t3=-(360-t3)
if t3<-238:
t3=360+t3
if t4>155:
t4=-(360-t4)
if t4<-155:
t4=360+t4
if t5>135:
t5=-(360-t5)
if t5<-90:
t5=360+t5
if t6>200:
t6=-(360-t6)
if t6<-200:
t6=360+t6
if t2>150 or t2<-90:
if (abs(b[0,0])<0.1 and abs(b[1,0])<0.1 and abs(b[2,0])<0.1 and abs(b[3,0])<0.1 and abs(b[4,0])<0.1 and abs(b[5,0])<0.1 and abs(b[6,0])<0.1 and abs(b[7,0])<0.1 and abs(b[8,0])<0.1 and abs(b[9,0])<0.1 and abs(b[10,0])<0.1 and abs(b[11,0])<0.1):
print("**********")
print("**********")
print("T2 fuera de limite")
print("**********")
print("**********")
t2=t2+100
b[0,0]=1
if t3>79 or t3<-238:
if (abs(b[0,0])<0.1 and abs(b[1,0])<0.1 and abs(b[2,0])<0.1 and abs(b[3,0])<0.1 and abs(b[4,0])<0.1 and abs(b[5,0])<0.1 and abs(b[6,0])<0.1 and abs(b[7,0])<0.1 and abs(b[8,0])<0.1 and abs(b[9,0])<0.1 and abs(b[10,0])<0.1 and abs(b[11,0])<0.1):
print("**********")
print("**********")
print("T3 fuera de limite")
print("**********")
print("**********")
t3=t3+100
b[0,0]=1
if t4>155 or t4<-155:
if (abs(b[0,0])<0.1 and abs(b[1,0])<0.1 and abs(b[2,0])<0.1 and abs(b[3,0])<0.1 and abs(b[4,0])<0.1 and abs(b[5,0])<0.1 and abs(b[6,0])<0.1 and abs(b[7,0])<0.1 and abs(b[8,0])<0.1 and abs(b[9,0])<0.1 and abs(b[10,0])<0.1 and abs(b[11,0])<0.1):
print("**********")
print("**********")
print("T4 fuera de limite")
print("**********")
print("**********")
t4=t4+100
b[0,0]=1
print()
if t5>135 or t5<-90:
if (abs(b[0,0])<0.1 and abs(b[1,0])<0.1 and abs(b[2,0])<0.1 and abs(b[3,0])<0.1 and abs(b[4,0])<0.1 and abs(b[5,0])<0.1 and abs(b[6,0])<0.1 and abs(b[7,0])<0.1 and abs(b[8,0])<0.1 and abs(b[9,0])<0.1 and abs(b[10,0])<0.1 and abs(b[11,0])<0.1):
print("**********")
print("**********")
print("T5 fuera de limite")
print("**********")
print("**********")
t5=t5+100
b[0,0]=1
t1d=t1+d
t2d=t2+d
t3d=t3+d
t4d=t4+d
t5d=t5+d
t6d=t6+d
if (abs(b[0,0])<0.0001 and abs(b[1,0])<0.0001 and abs(b[2,0])<0.0001 and abs(b[3,0])<0.0001 and abs(b[4,0])<0.0001 and abs(b[5,0])<0.0001 and abs(b[6,0])<0.0001 and abs(b[7,0])<0.0001 and abs(b[8,0])<0.0001 and abs(b[9,0])<0.0001 and abs(b[10,0])<0.0001 and abs(b[11,0])<0.0001):
calc=False
if con>100:
calc=False
t1=sem[0]
t2=sem[1]
t3=sem[2]
t4=sem[3]
t5=sem[4]
t6=sem[5]
tetas=[t1,t2,t3,t4,t5,t6]
print(con)
print(tetas)
return tetas
def ur5v(t1,t2,t3,t4,t5,t6,mh=np.eye(4),mb=np.eye(4)):
t0=np.eye(4)
t01=mb@rotaz(t1)@trasz(89.2)#
t12=trasy(-134.2)@rotax(90)@rotaz(t2)#
t23=trasy(425)
t34=trasz(-118.45)@rotaz(t3)#
t45=trasx(392.25)@rotaz(t4)#
t56=trasz(94.75)@rotax(-90)@rotaz(t5)#
t67=trasz(94.75)
t78=trasx(82.5)@rotay(90)@rotaz(-90)@rotaz(t6)#
t02=t01@t12
t03=t02@t23
t04=t03@t34
t05=t04@t45
t06=t05@t56
t07=t06@t67
t08=t07@t78
t08=t08@mh
return t08
def ur5newton(vd,sem,mh=np.eye(4),mb=np.eye(4)):
d=0.01
calc=True
j=np.zeros([12,6])
b=np.ones([12,1])
t1=sem[0]
t2=sem[1]
t3=sem[2]
t4=sem[3]
t5=sem[4]
t6=sem[5]
t1d=t1+d
t2d=t2+d
t3d=t3+d
t4d=t4+d
t5d=t5+d
t6d=t6+d
con=0
while calc:
con=con+1
t=ur5v(t1,t2,t3,t4,t5,t6,mh,mb)
tn=t-vd
b[0,0]=tn[0,0]
b[1,0]=tn[0,1]
b[2,0]=tn[0,2]
b[3,0]=tn[0,3]
b[4,0]=tn[1,0]
b[5,0]=tn[1,1]
b[6,0]=tn[1,2]
b[7,0]=tn[1,3]
b[8,0]=tn[2,0]
b[9,0]=tn[2,1]
b[10,0]=tn[2,2]
b[11,0]=tn[2,3]
tv1=[t1d,t1,t1,t1,t1,t1]
tv2=[t2,t2d,t2,t2,t2,t2]
tv3=[t3,t3,t3d,t3,t3,t3]
tv4=[t4,t4,t4,t4d,t4,t4]
tv5=[t5,t5,t5,t5,t5d,t5]
tv6=[t6,t6,t6,t6,t6,t6d]
n=0
while n<6:
td=ur5v(tv1[n],tv2[n],tv3[n],tv4[n],tv5[n],tv6[n],mh,mb)
tj=(td-t)/d
j[0,n]=tj[0,0]
j[1,n]=tj[0,1]
j[2,n]=tj[0,2]
j[3,n]=tj[0,3]
j[4,n]=tj[1,0]
j[5,n]=tj[1,1]
j[6,n]=tj[1,2]
j[7,n]=tj[1,3]
j[8,n]=tj[2,0]
j[9,n]=tj[2,1]
j[10,n]=tj[2,2]
j[11,n]=tj[2,3]
n=n+1
R=np.linalg.pinv(j)@(-b)
t1=t1+R[0,0]
t2=t2+R[1,0]
t3=t3+R[2,0]
t4=t4+R[3,0]
t5=t5+R[4,0]
t6=t6+R[5,0]
t1=t1%360
t2=t2%360
t3=t3%360
t4=t4%360
t5=t5%360
t6=t6%360
if t1>180:
t1=-(360-t1)
if t1<-180:
t1=360+t1
if t2>180:
t2=-(360-t2)
if t2<-180:
t2=360+t2
if t3>180:
t3=-(360-t3)
if t3<-180:
t3=360+t3
if t4>180:
t4=-(360-t4)
if t4<-180:
t4=360+t4
if t5>180:
t5=-(360-t5)
if t5<-180:
t5=360+t5
if t6>180:
t6=-(360-t6)
if t6<-180:
t6=360+t6
t1d=t1+d
t2d=t2+d
t3d=t3+d
t4d=t4+d
t5d=t5+d
t6d=t6+d
if (abs(b[0,0])<0.0001 and abs(b[1,0])<0.0001 and abs(b[2,0])<0.0001 and abs(b[3,0])<0.0001 and abs(b[4,0])<0.0001 and abs(b[5,0])<0.0001 and abs(b[6,0])<0.0001 and abs(b[7,0])<0.0001 and abs(b[8,0])<0.0001 and abs(b[9,0])<0.0001 and abs(b[10,0])<0.0001 and abs(b[11,0])<0.0001):
calc=False
if con>100:
calc=False
t1=sem[0]
t2=sem[1]
t3=sem[2]
t4=sem[3]
t5=sem[4]
t6=sem[5]
tetas=[t1,t2,t3,t4,t5,t6]
print(con)
print(tetas)
return tetas
def obtangrotv(t1,t2,t3,p1ea):
#t0=rotax(p1ea[0])@rotay(p1ea[1])@rotaz(p1ea[2])
t=p1ea@rotaz(t3)@rotay(t2)@rotax(t1)
return t
def obtangrot(p2,p1ea):
d=0.01
calc=True
j=np.zeros([9,3])
b=np.ones([9,1])
t1=0
t2=0
t3=0
t1d=t1+d
t2d=t2+d
t3d=t3+d
con=0
while calc:
con=con+1
t=obtangrotv(t1,t2,t3,p1ea)
tn=t-p2
b[0,0]=tn[0,0]
b[1,0]=tn[0,1]
b[2,0]=tn[0,2]
b[3,0]=tn[1,0]
b[4,0]=tn[1,1]
b[5,0]=tn[1,2]
b[6,0]=tn[2,0]
b[7,0]=tn[2,1]
b[8,0]=tn[2,2]
tv1=[t1d,t1,t1]
tv2=[t2,t2d,t2]
tv3=[t3,t3,t3d]
n=0
while n<3:
td=obtangrotv(tv1[n],tv2[n],tv3[n],p1ea)
tj=(td-t)/d
j[0,n]=tj[0,0]
j[1,n]=tj[0,1]
j[2,n]=tj[0,2]
j[3,n]=tj[1,0]
j[4,n]=tj[1,1]
j[5,n]=tj[1,2]
j[6,n]=tj[2,0]
j[7,n]=tj[2,1]
j[8,n]=tj[2,2]
n=n+1
R=np.linalg.pinv(j)@(-b)
t1=t1+R[0,0]
t2=t2+R[1,0]
t3=t3+R[2,0]
t1=t1%360
t2=t2%360
t3=t3%360
if t1>180:
t1=-(360-t1)
if t1<-180:
t1=360+t1
if t2>180:
t2=-(360-t2)
if t2<-180:
t2=360+t2
if t3>180:
t3=-(360-t3)
if t3<-180:
t3=360+t3
t1d=t1+d
t2d=t2+d
t3d=t3+d
if (abs(b[0,0])<0.0001 and abs(b[1,0])<0.0001 and abs(b[2,0])<0.0001 and abs(b[3,0])<0.0001 and abs(b[4,0])<0.0001 and abs(b[5,0])<0.0001 and abs(b[6,0])<0.0001 and abs(b[7,0])<0.0001 and abs(b[8,0])<0.0001):
calc=False
if con>100:
calc=False
t1=sem[0]
t2=sem[1]
t3=sem[2]
tetas=[t1,t2,t3]
print(con)
print(tetas)
return tetas
def herramienta1v(z=0,so=6.4):
t0=np.eye(4)
t01=rotaz(z)@trasz(100)#
t12=trasy(-39)#
t23=trasz(30)
t34=rotax(-6.21)@trasy(-79)#
t45=trasz(225)#
t56=rotax(-45)@trasz(111)#
t67=trasz(so)
t02=t01@t12
t03=t02@t23
t04=t03@t34
t05=t04@t45
t06=t05@t56
t07=t06@t67
return t07
def herramienta1(mr,z=0,so=6.4):
t0=np.eye(4)
t01=mr@rotaz(z)@trasz(100)#
t12=trasy(-39)#
t23=trasz(30)
t34=rotax(-6.21)@trasy(-79)#
t45=trasz(225)#
t56=rotax(-45)@trasz(111)#
t67=trasz(so)
t02=t01@t12
t03=t02@t23
t04=t03@t34
t05=t04@t45
t06=t05@t56
t07=t06@t67
sistemamovil(t01,100)
sistemamovil(t02,100)
sistemamovil(t03,100)
sistemamovil(t04,100)
sistemamovil(t05,100)
sistemamovil(t06,100)
sistemamovil(t07,100)
ax.plot3D([mr[0,3],t01[0,3]],[mr[1,3],t01[1,3]],[mr[2,3],t01[2,3]],color='red')
ax.plot3D([t01[0,3],t02[0,3]],[t01[1,3],t02[1,3]],[t01[2,3],t02[2,3]],color='red')
ax.plot3D([t02[0,3],t03[0,3]],[t02[1,3],t03[1,3]],[t02[2,3],t03[2,3]],color='red')
ax.plot3D([t03[0,3],t04[0,3]],[t03[1,3],t04[1,3]],[t03[2,3],t04[2,3]],color='red')
ax.plot3D([t04[0,3],t05[0,3]],[t04[1,3],t05[1,3]],[t04[2,3],t05[2,3]],color='red')
ax.plot3D([t05[0,3],t06[0,3]],[t05[1,3],t06[1,3]],[t05[2,3],t06[2,3]],color='red')
ax.plot3D([t06[0,3],t07[0,3]],[t06[1,3],t07[1,3]],[t06[2,3],t07[2,3]],color='red')
def animsistemamovilx(t):
n=0
while n<t:
ax.cla()
setaxis(-1,1,-1,1,-1,1)
r=rotax(n)
sistemafijo()
sistemamovil(r)
n=n+1
dibujar()
def animsistemamovily(t):
n=0
while n<t:
ax.cla()
setaxis(-1,1,-1,1,-1,1)
r=rotay(n)
sistemafijo()
sistemamovil(r)
n=n+1
dibujar()
def animsistemamovilz(t):
n=0
while n<t:
ax.cla()
setaxis()
r=rotaz(n)
sistemafijo()
sistemamovil(r)
n=n+1
dibujar()
def muevemoscax(t):
n=0
while n<t:
ax.cla()
setaxis()
r=rotax(n)
ax.scatter(0,0.4,0.6,'o')
Auvw=np.array([[0],[0.4],[0.6]])
Axyz=np.dot(r,Auvw)
x=Axyz[0,0]
y=Axyz[1,0]
z=Axyz[2,0]
sistemafijo()
sistemamovil(r)
ax.scatter(x,y,z,'o')
n=n+1
dibujar()
def muevemoscay(t):
n=0
while n<t:
ax.cla()
setaxis()
r=rotay(n)
ax.scatter(0,0.4,0.6,'o')
Auvw=np.array([[0],[0.4],[0.6]])
Axyz=np.dot(r,Auvw)
x=Axyz[0,0]
y=Axyz[1,0]
z=Axyz[2,0]
sistemafijo()
sistemamovil(r)
ax.scatter(x,y,z,'o')
n=n+1
dibujar()
def muevemoscaz(t):
n=0
while n<t:
ax.cla()
setaxis()
r=rotaz(n)
ax.scatter(0,0.4,0.6,'o')
Auvw=np.array([[0],[0.4],[0.6]])
Axyz=np.dot(r,Auvw)
x=Axyz[0,0]
y=Axyz[1,0]
z=Axyz[2,0]
sistemafijo()
sistemamovil(r)
ax.scatter(x,y,z,'o')
n=n+1
dibujar()
def dibujarcaja(d=1,w=1,l=1,r=0):
#setaxis()
a1=np.array([[0],[0],[0],[1]], dtype=object)
b1=np.array([[0],[0],[l],[1]], dtype=object)
c1=np.array([[0],[w],[l],[1]], dtype=object)
d1=np.array([[0],[w],[0],[1]], dtype=object)
e1=np.array([[d],[0],[0],[1]], dtype=object)
f1=np.array([[d],[0],[l],[1]], dtype=object)
g1=np.array([[d],[w],[l],[1]], dtype=object)
h1=np.array([[d],[w],[0],[1]], dtype=object)
a=np.dot(r,a1)
b=np.dot(r,b1)
c=np.dot(r,c1)
d=np.dot(r,d1)
e=np.dot(r,e1)
f=np.dot(r,f1)
g=np.dot(r,g1)
h=np.dot(r,h1)
ax.plot3D([a[0,0],b[0,0]],[a[1,0],b[1,0]],[a[2,0],b[2,0]],color='red')
ax.plot3D([a[0,0],d[0,0]],[a[1,0],d[1,0]],[a[2,0],d[2,0]],color='red')
ax.plot3D([a[0,0],e[0,0]],[a[1,0],e[1,0]],[a[2,0],e[2,0]],color='red')
ax.plot3D([b[0,0],c[0,0]],[b[1,0],c[1,0]],[b[2,0],c[2,0]],color='red')
ax.plot3D([b[0,0],f[0,0]],[b[1,0],f[1,0]],[b[2,0],f[2,0]],color='red')
ax.plot3D([c[0,0],d[0,0]],[c[1,0],d[1,0]],[c[2,0],d[2,0]],color='red')
ax.plot3D([c[0,0],g[0,0]],[c[1,0],g[1,0]],[c[2,0],g[2,0]],color='red')
ax.plot3D([d[0,0],h[0,0]],[d[1,0],h[1,0]],[d[2,0],h[2,0]],color='red')
ax.plot3D([e[0,0],h[0,0]],[e[1,0],h[1,0]],[e[2,0],h[2,0]],color='red')
ax.plot3D([e[0,0],f[0,0]],[e[1,0],f[1,0]],[e[2,0],f[2,0]],color='red')
ax.plot3D([g[0,0],f[0,0]],[g[1,0],f[1,0]],[g[2,0],f[2,0]],color='red')
ax.plot3D([g[0,0],h[0,0]],[g[1,0],h[1,0]],[g[2,0],h[2,0]],color='red')
def animcajax(t):
n=0
while n<t:
ax.cla()
setaxis()
r=rotax(n)
dibujarcaja(r=r)
n=n+1
sistemafijo()
dibujar()
def animcajay(t):
n=0
while n<t:
ax.cla()
setaxis()
r=rotay(n)
dibujarcaja(r=r)
n=n+1
sistemafijo()
dibujar()
def animcajaz(t):
n=0
while n<t:
ax.cla()
setaxis()
r=rotaz(n)
dibujarcaja(r=r)
n=n+1
sistemafijo()
dibujar()
def animcajaxyz(t1,t2,t3,t4):
n=0
while n<t1:
ax.cla()
setaxis()
r=rotaz(n)
dibujarcaja(r=r)
n=n+1
sistemafijo()
dibujar()
Rc=r
n=0
while n<t2:
ax.cla()
setaxis()
r=rotax(n)
r=np.dot(r,Rc)
dibujarcaja(r=r)
n=n+1
sistemafijo()
dibujar()
Rc=r
n=0
while n<t3:
ax.cla()
setaxis()
r=rotay(n)
r=np.dot(Rc,r)
dibujarcaja(r=r)
n=n+1
sistemafijo()
dibujar()
Rc=r
n=0
while n<t4:
ax.cla()
setaxis()
r=rotax(n)
r=np.dot(r,Rc)
dibujarcaja(r=r)
n=n+1
sistemafijo()
dibujar()
# Ryft4 Rzft2 Rxft1 I Rxmt3 Rzmt5
def animcajaxyz2(t1,t2,t3,t4,t5):
n1=0
n2=0
n3=0
n4=0
n5=0
while n1<t1:
ax.cla()
setaxis()
r=mmatrix(rotay(n4),rotaz(n2),rotax(n1),rotax(n3),rotaz(n5))
dibujarcaja(r=r)
n1=n1+1
sistemafijo()
sistemamovil(r)
dibujar()
Rc=r
n=0
while n2<t2:
ax.cla()
setaxis()
r=mmatrix(rotay(n4),rotaz(n2),rotax(n1),rotax(n3),rotaz(n5))
dibujarcaja(r=r)
n2=n2+1
sistemafijo()
sistemamovil(r)
dibujar()
Rc=r
n=0
while n3<t3:
ax.cla()
setaxis()
r=mmatrix(rotay(n4),rotaz(n2),rotax(n1),rotax(n3),rotaz(n5))
dibujarcaja(r=r)
n3=n3+1
sistemafijo()
sistemamovil(r)
dibujar()
Rc=r
n=0
while n4<t4:
ax.cla()
setaxis()
r=mmatrix(rotay(n4),rotaz(n2),rotax(n1),rotax(n3),rotaz(n5))
dibujarcaja(r=r)
n4=n4+1
sistemafijo()
sistemamovil(r)
dibujar()
Rc=r
n=0
while n5<t5:
ax.cla()
setaxis()
r=mmatrix(rotay(n4),rotaz(n2),rotax(n1),rotax(n3),rotaz(n5))
dibujarcaja(r=r)
n5=n5+1
sistemafijo()
sistemamovil(r)
dibujar()
def animcajaxyzt(Dx,t1,t2):
n=0
while n<Dx+0.01:
ax.cla()
setaxis(4)
r=trasx(n)
print(r)
dibujarcaja(r=r)
n=n+0.2
sistemafijo()
sistemamovil(r)
dibujar()
Rc=r
n=0
while n<t1+0.01:
ax.cla()
setaxis(4)
r=rotaz(n)
r=np.dot(Rc,r)
dibujarcaja(r=r)
n=n+5
sistemafijo()
sistemamovil(r)
dibujar()
Rc=r
n=0
while n<t2+0.01:
ax.cla()
setaxis(4)
r=rotaxf(n,Rc)
dibujarcaja(r=r)
n=n+5
sistemafijo()
sistemamovil(r)
dibujar()
def animcajaxyzt2(Dx,Dy,t1,t2):
n=0
while n<Dx+0.01:
ax.cla()
setaxis(4)
r=trasx(n)
a=minv(r)
a1=np.linalg.inv(r)
print('incio')
print('r')
print(np.round(r,3))
print('a')
print(np.round(a,3))
print('a1')
print(np.round(a1,3))
print('fin')
dibujarcaja(r=r)
n=n+0.2
sistemafijo()
sistemamovil(r)
dibujar()
Rc=r
n=0
while n<Dy+0.01:
ax.cla()
setaxis(4)
r=trasy(n)
r=np.dot(Rc,r)
a=minv(r)
a1=np.linalg.inv(r)
print('incio')
print('r')
print(np.round(r,3))
print('a')
print(np.round(a,3))
print('a1')
print(np.round(a1,3))
print('fin')
dibujarcaja(r=r)
n=n+0.2
sistemafijo()
sistemamovil(r)
dibujar()
Rc=r
n=0
while n<t1+0.01:
ax.cla()
setaxis(4)
r=rotaz(n)
r=np.dot(Rc,r)
a=minv(r)
a1=np.linalg.inv(r)
print('incio')
print('r')
print(np.round(r,3))
print('a')
print(np.round(a,3))
print('a1')
print(np.round(a1,3))
print('fin')
dibujarcaja(r=r)
n=n+5
sistemafijo()
sistemamovil(r)
dibujar()
Rc=r
n=0
while n<t2+0.01:
ax.cla()
setaxis(4)
r=rotaxf(n,Rc)
a=minv(r)
a1=np.linalg.inv(r)
print('incio')
print('r')
print(np.round(r,3))
print('a')
print(np.round(a,3))
print('a1')
print(np.round(a1,3))
print('fin')
dibujarcaja(r=r)
n=n+5
sistemafijo()
sistemamovil(r)
dibujar()
def animejeresaotro():
n=0
while n<3+0.01:
ax.cla()
setaxis(10)
tab=trasx(n)
n=n+0.2
sistemafijo()
sistemamovil(tab)
dibujar()
Rtab=tab
n=0
while n<5+0.01:
ax.cla()
setaxis(10)
tab=trasy(n)
tab=np.dot(Rtab,tab)
n=n+0.2
sistemafijo()
sistemamovil(tab)
dibujar()
Rtab=tab
n=0
while n<45+0.01:
ax.cla()
setaxis(10)
tab=rotax(n)
tab=np.dot(Rtab,tab)
n=n+5
sistemafijo()
sistemamovil(tab)
dibujar()
n=0
while n>-5-0.01:
ax.cla()
setaxis(10)
tac=trasx(n)
n=n-0.2
sistemafijo()
sistemamovil(tac)
sistemamovil(tab)
dibujar()
Rtac=tac
n=0
while n>-4-0.01:
ax.cla()
setaxis(10)
tac=trasy(n)
tac=np.dot(Rtac,tac)
n=n-0.2
sistemafijo()
sistemamovil(tac)
sistemamovil(tab)
dibujar()
tba=minv(tab)
tbc=np.dot(tba,tac)
n=0
while n>-6-0.01:
ax.cla()
setaxis(10)
#ntbc=rotazf(n,tbc)
ntbc=np.dot(trasy(n),tbc)
tac=np.dot(tab,ntbc)
n=n-0.2
sistemafijo()
sistemamovil(tac)
sistemamovil(tab)
dibujar()
def animppp(d1,d2,d3):
n1=0
n2=0
n3=0
while n1<d1+0.01:
ax.cla()
setaxis(10)
ppp(n1,n2,n3)
n1=n1+0.2
dibujar()
while n2<d2+0.01:
ax.cla()
setaxis(10)
ppp(n1,n2,n3)
n2=n2+0.2
dibujar()
while n3<d3+0.01:
ax.cla()
setaxis(10)
ppp(n1,n2,n3)
n3=n3+0.2
dibujar()
def animrpp(t1,d2,d3):
n1=0
n2=2
n3=1
while n1<t1+0.01:
ax.cla()
setaxis(5)
rpp(n1,n2,n3)
n1=n1+5
dibujar()
while n2<d2+0.01:
ax.cla()
setaxis(5)
rpp(n1,n2,n3)
n2=n2+0.2
dibujar()
while n3<d3+0.01:
ax.cla()
setaxis(5)
rpp(n1,n2,n3)
n3=n3+0.2
dibujar()
def animrrp(t1,t2,d3):
n1=0
n2=0
n3=1
while n1<t1+0.01:
ax.cla()
setaxis(5)
rrp(n1,n2,n3)
n1=n1+5
dibujar()
while n2<t2+0.01:
ax.cla()
setaxis(5)
rrp(n1,n2,n3)
n2=n2+5
dibujar()
while n3<d3+0.01:
ax.cla()
setaxis(5)
rrp(n1,n2,n3)
n3=n3+0.2
dibujar()
def animrrr(t1,t2,t3):
n1=0
n2=0
n3=0
while n1<t1+0.01:
ax.cla()
setaxis(5)
rrr(n1,n2,n3)
n1=n1+5
dibujar()
while n2<t2+0.01:
ax.cla()
setaxis(5)
rrr(n1,n2,n3)
n2=n2+5
dibujar()
while n3<t3+0.01:
ax.cla()
setaxis(5)
rrr(n1,n2,n3)
n3=n3+5
dibujar()
def animscara(t1,t2,d3,t4):
n1=0
n2=0
n3=1
n4=0
while n1<t1+0.01:
ax.cla()
setaxis(5)
scara(n1,n2,n3,n4)
n1=n1+5
dibujar()
while n2<t2+0.01:
ax.cla()
setaxis(5)
scara(n1,n2,n3,n4)
n2=n2+5
dibujar()
while n3<d3+0.01:
ax.cla()
setaxis(5)
scara(n1,n2,n3,n4)
n3=n3+0.2
dibujar()
while n4<t4+0.01:
ax.cla()
setaxis(5)
scara(n1,n2,n3,n4)
n4=n4+5
dibujar()
def animcobras800(t1,t2,d3,t4):
n1=0
n2=0
n3=1
n4=0
while n1<t1+0.01:
ax.cla()
setaxis(1000)
cobras800(n1,n2,n3,n4)
n1=n1+5
dibujar()
while n2<t2+0.01:
ax.cla()
setaxis(1000)
cobras800(n1,n2,n3,n4)
n2=n2+5
dibujar()
while n3<d3+0.01:
ax.cla()
setaxis(1000)
cobras800(n1,n2,n3,n4)
n3=n3+5
dibujar()
while n4<t4+0.01:
ax.cla()
setaxis(1000)
cobras800(n1,n2,n3,n4)
n4=n4+5
dibujar()
def animur5(t1,t2,t3,t4,t5,t6):
n1=0
n2=0
n3=0
n4=0
n5=0
n6=0
while n1<t1+0.01:
ax.cla()
setaxis(1000)
ur5(n1,n2,n3,n4,n5,n6)
n1=n1+5
dibujar()
while n2<t2+0.01:
ax.cla()
setaxis(1000)
ur5(n1,n2,n3,n4,n5,n6)
n2=n2+5
dibujar()
while n3<t3+0.01:
ax.cla()
setaxis(1000)
ur5(n1,n2,n3,n4,n5,n6)
n3=n3+5
dibujar()
while n4<t4+0.01:
ax.cla()
setaxis(1000)
ur5(n1,n2,n3,n4,n5,n6)
n4=n4+5
dibujar()
while n5<t5+0.01:
ax.cla()
setaxis(1000)
ur5(n1,n2,n3,n4,n5,n6)
n5=n5+5
dibujar()
while n6<t6+0.01:
ax.cla()
setaxis(1000)
ur5(n1,n2,n3,n4,n5,n6)
n6=n6+5
dibujar()
def animur5(tb,t1a,t2a,t3a,t4a,t5a,t6a,t7a,t1b,t2b,t3b,t4b,t5b,t6b,t7b):
nb=0
n1a=0
n2a=0
n3a=0
n4a=0
n5a=0
n6a=0
n7a=0
n1b=0
n2b=0
n3b=0
n4b=0
n5b=0
n6b=0
n7b=0
while n1a<t1a+0.01:
ax.cla()
setaxis(15)
motoman(nb,n1a,n2a,n3a,n4a,n5a,n6a,n7a,n1b,n2b,n3b,n4b,n5b,n6b,n7b)
n1a=n1a+5
dibujar()
while n2a<t2a+0.01:
ax.cla()
setaxis(15)
motoman(nb,n1a,n2a,n3a,n4a,n5a,n6a,n7a,n1b,n2b,n3b,n4b,n5b,n6b,n7b)
n2a=n2a+5
dibujar()
while n3a<t3a+0.01:
ax.cla()
setaxis(15)
motoman(nb,n1a,n2a,n3a,n4a,n5a,n6a,n7a,n1b,n2b,n3b,n4b,n5b,n6b,n7b)
n3a=n3a+5
dibujar()
while n4a<t4a+0.01:
ax.cla()
setaxis(15)
motoman(nb,n1a,n2a,n3a,n4a,n5a,n6a,n7a,n1b,n2b,n3b,n4b,n5b,n6b,n7b)
n4a=n4a+5
dibujar()
while n5a<t5a+0.01:
ax.cla()
setaxis(15)
motoman(nb,n1a,n2a,n3a,n4a,n5a,n6a,n7a,n1b,n2b,n3b,n4b,n5b,n6b,n7b)
n5a=n5a+5
dibujar()
while n6a<t6a+0.01:
ax.cla()
setaxis(15)
motoman(nb,n1a,n2a,n3a,n4a,n5a,n6a,n7a,n1b,n2b,n3b,n4b,n5b,n6b,n7b)
n6a=n6a+5
dibujar()
while n7a<t7a+0.01:
ax.cla()
setaxis(15)
motoman(nb,n1a,n2a,n3a,n4a,n5a,n6a,n7a,n1b,n2b,n3b,n4b,n5b,n6b,n7b)
n7a=n7a+5
dibujar()
while n1b<t1b+0.01:
ax.cla()
setaxis(15)
motoman(nb,n1a,n2a,n3a,n4a,n5a,n6a,n7a,n1b,n2b,n3b,n4b,n5b,n6b,n7b)
n1b=n1b+5
dibujar()
while n2b<t2b+0.01:
ax.cla()
setaxis(15)
motoman(nb,n1a,n2a,n3a,n4a,n5a,n6a,n7a,n1b,n2b,n3b,n4b,n5b,n6b,n7b)
n2b=n2b+5
dibujar()
while n3b<t3b+0.01:
ax.cla()
setaxis(15)
motoman(nb,n1a,n2a,n3a,n4a,n5a,n6a,n7a,n1b,n2b,n3b,n4b,n5b,n6b,n7b)
n3b=n3b+5
dibujar()
while n4b<t4b+0.01:
ax.cla()
setaxis(15)
motoman(nb,n1a,n2a,n3a,n4a,n5a,n6a,n7a,n1b,n2b,n3b,n4b,n5b,n6b,n7b)
n4b=n4b+5
dibujar()
while n5b<t5b+0.01:
ax.cla()
setaxis(15)
motoman(nb,n1a,n2a,n3a,n4a,n5a,n6a,n7a,n1b,n2b,n3b,n4b,n5b,n6b,n7b)
n5b=n5b+5
dibujar()
while n6b<t6b+0.01:
ax.cla()
setaxis(15)
motoman(nb,n1a,n2a,n3a,n4a,n5a,n6a,n7a,n1b,n2b,n3b,n4b,n5b,n6b,n7b)
n6b=n6b+5
dibujar()
while n7b<t7b+0.01:
ax.cla()
setaxis(15)
motoman(nb,n1a,n2a,n3a,n4a,n5a,n6a,n7a,n1b,n2b,n3b,n4b,n5b,n6b,n7b)
n7b=n7b+5
dibujar()
while nb<tb+0.01:
ax.cla()
setaxis(15)
motoman(nb,n1a,n2a,n3a,n4a,n5a,n6a,n7a,n1b,n2b,n3b,n4b,n5b,n6b,n7b)
nb=nb+5
dibujar()
def animIRB1600(t1,t2,t3,t4,t5,t6):
n1=0
n2=0
n3=0
n4=0
n5=0
n6=0
while n1<t1+0.01:
ax.cla()
setaxis(1500)
IRB1600(n1,n2,n3,n4,n5,n6)
n1=n1+5
dibujar()
while n2<t2+0.01:
ax.cla()
setaxis(1500)
IRB1600(n1,n2,n3,n4,n5,n6)
n2=n2+5
dibujar()
while n3<t3+0.01:
ax.cla()
setaxis(1500)
IRB1600(n1,n2,n3,n4,n5,n6)
n3=n3+5
dibujar()
while n4<t4+0.01:
ax.cla()
setaxis(1500)
IRB1600(n1,n2,n3,n4,n5,n6)
n4=n4+5
dibujar()
while n5<t5+0.01:
ax.cla()
setaxis(1500)
IRB1600(n1,n2,n3,n4,n5,n6)
n5=n5+5
dibujar()
while n6<t6+0.01:
ax.cla()
setaxis(1500)
IRB1600(n1,n2,n3,n4,n5,n6)
n6=n6+5
dibujar()
def animIRB1600newton():
n1=0
n2=0
n3=0
n4=0
vd=IRB1600v(0,0,0,0,0,0)
sem=[0,0,0,0,0,0]
while n1<200+0.01:
vdn=vd@trasx(n1)
ax.cla()
setaxis(1500)
tetas=IRB1600newton(vdn,sem)
IRB1600(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5])
n1=n1+20
dibujar()
sem=[tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5]]
vd=vdn
while n2>-45-0.01:
vdn=vd@rotax(n2)
ax.cla()
setaxis(1500)
tetas=IRB1600newton(vdn,sem)
IRB1600(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5])
n2=n2-5
dibujar()
sem=[tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5]]
vd=vdn
while n3<300+0.01:
vdn=vd@trasy(n3)
ax.cla()
setaxis(1500)
tetas=IRB1600newton(vdn,sem)
IRB1600(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5])
n3=n3+20
dibujar()
sem=[tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5]]
vd=vdn
while n4<200+0.01:
vdn=vd@trasz(n4)
ax.cla()
setaxis(1500)
tetas=IRB1600newton(vdn,sem)
IRB1600(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5])
n4=n4+20
dibujar()
sem=[tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5]]
vd=vdn
def animIRB1600newtoncirculo(r):
n1=0
vd=IRB1600v(0,0,0,0,0,0)
sem=[0,0,0,0,0,0]
while n1<2*np.pi+0.01:
x=np.cos(n1)
y=np.sin(n1)
x=x-1
vdn=vd@trasx(-x*r)
vdn=vdn@trasy(-y*r)
ax.cla()
setaxis(1500)
tetas=IRB1600newton(vdn,sem)
IRB1600(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5])
robot.MoveJ(tetas)
n1=n1+(np.pi/30)
dibujar()
sem=[tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5]]
vd=vdn
def animIRB1600newtoncirculoh1(r):
n1=0
mh=herramienta1v(-90)
vd=IRB1600v(0,0,0,0,0,0,mh)
sem=[0,0,0,0,0,0]
while n1<2*np.pi+0.01:
x=np.cos(n1)
y=np.sin(n1)
x=x-1
vdn=vd@rotax(45)
vdn=vdn@trasx(-x*r)
vdn=vdn@trasy(-y*r)
ax.cla()
setaxis(1500)
tetas=IRB1600newton(vdn,sem,mh)
IRB1600(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5])
mr=IRB1600v(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5])
herramienta1(mr,-90)
robot.MoveJ(tetas)
n1=n1+(np.pi/30)
dibujar()
sem=[tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5]]
vd=vdn
def animur5newtoncirculo(r):
n1=0
vd=ur5v(0,0,0,0,0,0)
sem=[0,0,0,0,0,0]
while n1<2*np.pi+0.01:
x=np.cos(n1)
y=np.sin(n1)
x=x-1
#vdn=vd@rotax(-90)
vdn=vd@trasy(-x*r)
vdn=vdn@trasx(y*r)
ax.cla()
setaxis(1500)
tetas=ur5newton(vdn,sem)
ur5(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5])
valores1=tetas.copy()
valores1[1]=valores1[1]-90
valores1[2]=valores1[2]-90
valores1[4]=valores1[4]+90
valores1[4]=valores1[4]%360
robot.MoveJ(valores1)
n1=n1+(np.pi/30)
dibujar()
sem=[tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5]]
vd=vdn
def animur5newtoncirculoh1(r):
n1=0
mh=herramienta1v()
vd=ur5v(0,0,0,0,0,0,mh)
sem=[0,0,0,0,0,0]
while n1<2*np.pi+0.01:
x=np.cos(n1)
y=np.sin(n1)
x=x-1
vdn=vd@rotax(51.21)
vdn=vdn@trasy(-x*r)
vdn=vdn@trasx(y*r)
ax.cla()
setaxis(1500)
tetas=ur5newton(vdn,sem,mh)
ur5(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5])
mr=ur5v(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5])
herramienta1(mr)
valores1=tetas.copy()
valores1[1]=valores1[1]-90
valores1[2]=valores1[2]-90
valores1[4]=valores1[4]+90
valores1[4]=valores1[4]%360
robot.MoveJ(valores1)
n1=n1+(np.pi/30)
dibujar()
sem=[tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5]]
vd=vdn
def animur5newtoncirculome(r):
n1=0
mb=trasx(1200)@rotaz(180)@trasy(220)
vd=ur5v(0,0,0,0,0,0)
sem=[0,0,0,0,0,0]
while n1<2*np.pi+0.01:
x=np.cos(n1)
y=np.sin(n1)
x=x-1
#vdn=vd@rotax(-90)
vdn=vd@trasy(-x*r)
vdn=vdn@trasx(y*r)
vdn2=vdn@trasz(200)@rotax(180)@rotaz(180)
ax.cla()
setaxis(1500)
tetas=ur5newton(vdn,sem)
tetas2=ur5newton(vdn2,sem,mb=mb)
ur5(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5])
ur5(tetas2[0],tetas2[1],tetas2[2],tetas2[3],tetas2[4],tetas2[5],mb)
valores1=tetas.copy()
valores1e=tetas2.copy()
valores1[1]=valores1[1]-90
valores1[2]=valores1[2]-90
valores1[4]=valores1[4]+90
valores1[4]=valores1[4]%360
valores1e[1]=valores1e[1]-90
valores1e[2]=valores1e[2]-90
valores1e[4]=valores1e[4]+90
valores1e[4]=valores1e[4]%360
#robot.MoveJ(valores1)
#robote.MoveJ(valores1e)
n1=n1+(np.pi/30)
dibujar()
sem=[tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5]]
vd=vdn
def animur5newtoncirculoh1me(r):
n1=0
mb=trasx(1250)@rotaz(180)@trasy(220)
mh=herramienta1v()
vd=ur5v(0,0,0,0,0,0,mh)
sem=[0,0,0,0,0,0]
while n1<2*np.pi+0.01:
x=np.cos(n1)
y=np.sin(n1)
x=x-1
vdn=vd@rotax(0)
vdn=vdn@trasy(-x*r)
vdn=vdn@trasx(y*r)
vdn2=vdn@trasz(200)@rotax(180)@rotaz(180)
ax.cla()
setaxis(1500)
tetas=ur5newton(vdn,sem,mh)
tetas2=ur5newton(vdn2,sem,mb=mb)
ur5(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5])
ur5(tetas2[0],tetas2[1],tetas2[2],tetas2[3],tetas2[4],tetas2[5],mb)
mr=ur5v(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5])
herramienta1(mr)
valores1=tetas.copy()
valores1[1]=valores1[1]-90
valores1[2]=valores1[2]-90
valores1[4]=valores1[4]+90
valores1[4]=valores1[4]%360
#robot.MoveJ(valores1)
n1=n1+(np.pi/30)
dibujar()
sem=[tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5]]
vd=vdn
arduino=serial.Serial("COM5",9600,timeout=1)
time.sleep(1)
leyendo=True
mtr1,mtr2=accmotomanv(0,90,-90,-90,-130,0,-60,90,90,90,-90,-130,0,-60,90)
tetas=[0,90,-90,-90,-130,0,-60,90,90,90,-90,-130,0,-60,90]
movesel=1
puntosmove=[0,0,0,0,0,0,0,0,0]
selbrazo=1;
selbrazopuntos=[0,0,0,0,0,0,0,0,0]
mtr12=mtr1.copy()
modo=1
puntos=np.zeros((10,4,4))
tetaspuntos=np.zeros((10,15))
puntonum=-1
x1omin=0
x1omax=1023
x1nmin=-10
x1nmax=10
x1or=(x1omax-x1omin)
x1nr=(x1nmax-x1nmin)
x2omin=0
x2omax=1023
x2nmin=-10
x2nmax=10
x2or=(x2omax-x2omin)
x2nr=(x2nmax-x2nmin)
x3omin=0
x3omax=1023
x3nmin=-10
x3nmax=10
x3or=(x3omax-x3omin)
x3nr=(x3nmax-x3nmin)
y1omin=0
y1omax=1023
y1nmin=-10
y1nmax=10
y1or=(y1omax-y1omin)
y1nr=(y1nmax-y1nmin)
y2omin=0
y2omax=1023
y2nmin=-10
y2nmax=10
y2or=(y2omax-y2omin)
y2nr=(y2nmax-y2nmin)
y3omin=0
y3omax=1023
y3nmin=-10
y3nmax=10
y3or=(y3omax-y3omin)
y3nr=(y3nmax-y3nmin)
def leerarduino():
global selbrazo
global leyendo
global modo
global mtr1
global mtr2
global mtr12
sem=[0,90,-90,-90,-130,0,-60,90,90,90,-90,-130,0,-60,90]
global tetas
time.sleep(1)
while leyendo:
mensaje="0"
arduino.write(mensaje.encode('ascii'))
vals=arduino.readline().decode('ascii')
print(vals)
pos1=vals.index(",")
x1=vals[0:pos1]
pos2=vals.index(",",(pos1+1))
y1=vals[pos1+1:pos2]
pos3=vals.index(",",(pos2+1))
x2=vals[pos2+1:pos3]
pos4=vals.index(",",(pos3+1))
y2=vals[pos3+1:pos4]
pos5=vals.index(",",(pos4+1))
x3=vals[pos4+1:pos5]
y3=vals[pos5+1:]
x1=int(x1)
x2=int(x2)
x3=int(x3)
y1=int(y1)
y2=int(y2)
y3=int(y3)
x1n=(((x1-x1omin)*x1nr)/x1or)+x1nmin
x2n=(((x2-x2omin)*x2nr)/x2or)+x2nmin
x3n=(((x3-x3omin)*x3nr)/x3or)+x3nmin
y1n=(((y1-y1omin)*y1nr)/y1or)+y1nmin
y2n=(((y2-y2omin)*y2nr)/y2or)+y2nmin
y3n=(((y3-y3omin)*y3nr)/y3or)+y3nmin
if (abs(x1n)<1.8):
x1n=0
if (abs(x2n)<1.8):
x2n=0
if (abs(x3n)<1.8):
x3n=0
if (abs(y1n)<1.8):
y1n=0
if (abs(y2n)<1.8):
y2n=0
if (abs(y3n)<1.8):
y3n=0
print(x1n)
print(x2n)
print(x3n)
print(y1n)
print(y2n)
print(y3n)
if modo==1:
if selbrazo==1:
tetas[1]=tetas[1]+x1n
tetas[2]=tetas[2]+y1n
tetas[3]=tetas[3]+x2n
tetas[4]=tetas[4]+y2n
tetas[5]=tetas[5]+x3n
tetas[6]=tetas[6]+y3n
tetas[7]=tetas[7]
ax.cla()
setaxis(1000)
accmotoman(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14])
mtr1,mtr2=accmotomanv(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14])
sem=[tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14]]
dibujar()
elif selbrazo==2:
tetas[8]=tetas[8]+x1n
tetas[9]=tetas[9]+y1n
tetas[10]=tetas[10]+x2n
tetas[11]=tetas[11]+y2n
tetas[12]=tetas[12]+x3n
tetas[13]=tetas[13]+y3n
tetas[14]=tetas[14]
ax.cla()
setaxis(1000)
accmotoman(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14])
mtr1,mtr2=accmotomanv(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14])
sem=[tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14]]
dibujar()
elif selbrazo==3:
tetas[1]=tetas[1]+x1n
tetas[2]=tetas[2]+y1n
tetas[3]=tetas[3]+x2n
tetas[4]=tetas[4]+y2n
tetas[5]=tetas[5]+x3n
tetas[6]=tetas[6]+y3n
mtr1,mtr2=accmotomanv(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14])
mtr2=mtr1@mtr12
sem=[tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14]]
tetas=motomannewton2(mtr1,mtr2,sem)
ax.cla()
setaxis(1000)
accmotoman(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14])
mtr1,mtr2=accmotomanv(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14])
sem=[tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14]]
dibujar()
pass
elif modo==2:
if selbrazo==1:
ax.cla()
setaxis(1000)
mtr1=mtr1@trasx(x1n*5)
mtr1=mtr1@rotax(y1n)
mtr1=mtr1@trasy(x2n*5)
mtr1=mtr1@rotay(y2n)
mtr1=mtr1@trasz(x3n*5)
mtr1=mtr1@rotaz(y3n)
tetas=motomannewton2(mtr1,mtr2,sem)
accmotoman(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14])
sem=[tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14]]
dibujar()
elif selbrazo==2:
ax.cla()
setaxis(1000)
mtr2=mtr2@trasx(x1n*5)
mtr2=mtr2@rotax(y1n)
mtr2=mtr2@trasy(x2n*5)
mtr2=mtr2@rotay(y2n)
mtr2=mtr2@trasz(x3n*5)
mtr2=mtr2@rotaz(y3n)
tetas=motomannewton2(mtr1,mtr2,sem)
accmotoman(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14])
sem=[tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14]]
dibujar()
elif selbrazo==3:
ax.cla()
setaxis(1000)
mtr1=mtr1@trasx(x1n*5)
mtr1=mtr1@rotax(y1n)
mtr1=mtr1@trasy(x2n*5)
mtr1=mtr1@rotay(y2n)
mtr1=mtr1@trasz(x3n*5)
mtr1=mtr1@rotaz(y3n)
mtr2=mtr1@mtr12
tetas=motomannewton2(mtr1,mtr2,sem)
accmotoman(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14])
sem=[tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14]]
dibujar()
elif modo==3:
if selbrazo==1:
ax.cla()
setaxis(1000)
mtr1=trasx(x1n*5)@mtr1
mtr1=rotaxf(y1n,mtr1)
mtr1=trasy(x2n*5)@mtr1
mtr1=rotayf(y2n,mtr1)
mtr1=trasz(x3n*5)@mtr1
mtr1=rotazf(y3n,mtr1)
tetas=motomannewton2(mtr1,mtr2,sem)
accmotoman(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14])
sem=[tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14]]
dibujar()
elif selbrazo==2:
ax.cla()
setaxis(1000)
mtr2=trasx(x1n*5)@mtr2
mtr2=rotaxf(y1n,mtr2)
mtr2=trasy(x2n*5)@mtr2
mtr2=rotayf(y2n,mtr2)
mtr2=trasz(x3n*5)@mtr2
mtr2=rotazf(y3n,mtr2)
tetas=motomannewton2(mtr1,mtr2,sem)
accmotoman(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14])
sem=[tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14]]
dibujar()
elif selbrazo==3:
ax.cla()
setaxis(1000)
mtr1=trasx(x1n*5)@mtr1
mtr1=rotaxf(y1n,mtr1)
mtr1=trasy(x2n*5)@mtr1
mtr1=rotayf(y2n,mtr1)
mtr1=trasz(x3n*5)@mtr1
mtr1=rotazf(y3n,mtr1)
mtr2=mtr1@mtr12
tetas=motomannewton2(mtr1,mtr2,sem)
accmotoman(tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14])
sem=[tetas[0],tetas[1],tetas[2],tetas[3],tetas[4],tetas[5],tetas[6],tetas[7],tetas[8],tetas[9],tetas[10],tetas[11],tetas[12],tetas[13],tetas[14]]
dibujar()
print("finalizando hilo")
axbtnvalm=plt.axes([0.42,0.06,0.15,0.06])
axbtnmove=plt.axes([0.6,0.06,0.15,0.06])
axbtnmode=plt.axes([0.42,0.13,0.15,0.06])
axbtn1=plt.axes([0.85,0.01,0.12,0.08])
axtxtpuntos = plt.axes([0.02, 0.2, 0.34, 0.75])
axbtnsecuencia=plt.axes([0.04,0.13,0.22,0.06])
axbtnreset=plt.axes([0.04,0.06,0.11,0.06])
axrbtnselbrazo=plt.axes([0.27,0.02,0.14,0.175])
btnmodo=Button(axbtnmode,'modo: Joint')
btnvalm=Button(axbtnvalm,'Grabar')
btnmove=Button(axbtnmove,'Move J')
button1=Button(axbtn1,'cerrar')
btnsecuencia=Button(axbtnsecuencia,'Realizar secuencia')
btnreset=Button(axbtnreset,'Reset')
txtpuntos = TextBox(axtxtpuntos, '',initial='Empieza a grabar puntos')
radioselbrazo = RadioButtons(axrbtnselbrazo, ('Brazo 1', 'Brazo 2', '2 Brazos'))
ax.cla()
setaxis(1000)
accmotoman(0,90,-90,-90,-130,0,-60,90,90,90,-90,-130,0,-60,90)
dibujar()
def grabar(event):
global puntos
global puntonum
global tetas
global tetaspuntos
global movesel
global puntosmove
if puntonum<9:
puntonum+=1
tetaspuntos[puntonum,:]=tetas
print(puntos)
print(tetaspuntos)
n=0
texto=""
if puntonum>0:
puntosmove[puntonum-1]=movesel
while n<puntonum:
if puntosmove[n]==1:
texto=texto+"p"+str(n)+" p"+str(n+1)+" MoveJ\n"
elif puntosmove[n]==2:
texto=texto+"p"+str(n)+" p"+str(n+1)+" MoveL\n"
else:
texto=texto+"p"+str(n)+" p"+str(n+1)+" MoveC\n"
n=n+1
txtpuntos.set_val(texto)
else:
print("No hay mas espacios para grabar")
def cerrar(event):
global leyendo
leyendo=False
plt.close()
hilo1.join()
arduino.close()
sys.exit()
def mode(event):
global modo
if modo==1:
modo=2
btnmodo.label.set_text("modo: Tool")
elif modo==2:
modo=3
btnmodo.label.set_text("modo: World")
elif modo==3:
modo=1
btnmodo.label.set_text("modo: Joint")
def move(event):
global movesel
if movesel==1:
movesel=2
btnmove.label.set_text("Move L")
elif movesel==2:
movesel=3
btnmove.label.set_text("Move C")
elif movesel==3:
movesel=1
btnmove.label.set_text("Move J")
def realizarsecuencia(event):
global puntos
global puntonum
global tetas
global tetaspuntos
global puntosmove
p1=np.zeros((4,4))
p2=np.zeros((4,4))
tetas1=[0,0,0,0,0,0]
tetas2=[0,0,0,0,0,0]
n=0
if puntonum>0:
tetaanterior=tetas1=tetaspuntos[n,:]
while n<puntonum:
if puntosmove[n]==1:
tetas1=tetaspuntos[n,:]
tetas2=tetaspuntos[n+1,:]
tetaanterior=motomanmovej(tetaanterior,tetas2)
elif puntosmove[n]==2:
tetas1=tetaspuntos[n,:]
tetas2=tetaspuntos[n+1,:]
tetaanterior=motomanmovel(tetaanterior,tetas2)
else:
tetas1=tetaspuntos[n,:]
p1=puntos[n,:,:]
p2=puntos[n+1,:,:]
ur5movec(p1,p2,tetas1)
n=n+1
tetas=tetaanterior
else:
print("No hay suficientes puntos grabados")
def resetboton(event):
global puntonum
puntonum=-1
txtpuntos.set_val("")
def brazoseleccionado(label):
global selbrazo
global mtr1
global mtr2
global mtr12
if label=='Brazo 1':
selbrazo=1
elif label=='Brazo 2':
selbrazo=2
elif label=='2 Brazos':
selbrazo=3
mtr12=minv(mtr1)@mtr2
btnmodo.on_clicked(mode)
button1.on_clicked(cerrar)
btnvalm.on_clicked(grabar)
btnmove.on_clicked(move)
btnreset.on_clicked(resetboton)
btnsecuencia.on_clicked(realizarsecuencia)
radioselbrazo.on_clicked(brazoseleccionado)
hilo1=threading.Thread(target=leerarduino(),daemon=True)
hilo1.start()
#Motoman CSDA10F
| 28.795073
| 576
| 0.485818
| 18,654
| 106,369
| 2.769058
| 0.040635
| 0.015681
| 0.029427
| 0.047083
| 0.797073
| 0.76964
| 0.756689
| 0.742653
| 0.732567
| 0.715317
| 0
| 0.217844
| 0.298978
| 106,369
| 3,693
| 577
| 28.80287
| 0.474875
| 0.005566
| 0
| 0.730963
| 0
| 0
| 0.011264
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024889
| false
| 0.000296
| 0.002963
| 0
| 0.035852
| 0.026963
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f961ff0e25c580ccc483ad9b0bd213f02cd67b0d
| 115
|
py
|
Python
|
modules/__init__.py
|
isacolak/HeppaBOT
|
c8c7b638f6f530d714219a319875bb43f113315d
|
[
"MIT"
] | null | null | null |
modules/__init__.py
|
isacolak/HeppaBOT
|
c8c7b638f6f530d714219a319875bb43f113315d
|
[
"MIT"
] | null | null | null |
modules/__init__.py
|
isacolak/HeppaBOT
|
c8c7b638f6f530d714219a319875bb43f113315d
|
[
"MIT"
] | null | null | null |
from modules.classes import *
from modules.utils import *
from modules import errors
from modules import constants
| 23
| 29
| 0.826087
| 16
| 115
| 5.9375
| 0.4375
| 0.463158
| 0.357895
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13913
| 115
| 4
| 30
| 28.75
| 0.959596
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f9977ecbfc076f12b454e209f20e5debc41ef143
| 200
|
py
|
Python
|
mysite/poorslaves/admin.py
|
andyil/nemala
|
433b53dbe96e3f8e0b26f09ad45adcd86ac08b6d
|
[
"MIT"
] | null | null | null |
mysite/poorslaves/admin.py
|
andyil/nemala
|
433b53dbe96e3f8e0b26f09ad45adcd86ac08b6d
|
[
"MIT"
] | null | null | null |
mysite/poorslaves/admin.py
|
andyil/nemala
|
433b53dbe96e3f8e0b26f09ad45adcd86ac08b6d
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from django.contrib import admin
from .models import Document, Answer
admin.site.register(Document)
admin.site.register(Answer)
| 22.222222
| 37
| 0.78
| 27
| 200
| 5.777778
| 0.444444
| 0.128205
| 0.217949
| 0.294872
| 0.358974
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 200
| 9
| 38
| 22.222222
| 0.917647
| 0.13
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f9980fa61f1c25d056aa8ff40858aff87b643da0
| 11,778
|
py
|
Python
|
experiment/jobutil.py
|
shibaji7/Codebase_Kp_Prediction
|
431700a9ac0d9fc99bb3aed6cc86c261007f4a15
|
[
"Apache-2.0"
] | 1
|
2020-12-02T20:13:40.000Z
|
2020-12-02T20:13:40.000Z
|
experiment/jobutil.py
|
shibaji7/Codebase_Kp_Prediction
|
431700a9ac0d9fc99bb3aed6cc86c261007f4a15
|
[
"Apache-2.0"
] | null | null | null |
experiment/jobutil.py
|
shibaji7/Codebase_Kp_Prediction
|
431700a9ac0d9fc99bb3aed6cc86c261007f4a15
|
[
"Apache-2.0"
] | 2
|
2020-12-19T15:18:12.000Z
|
2022-03-31T13:42:44.000Z
|
import numpy as np
import pandas as pd
import datetime as dt
import database as db
from models import DetRegressor,Classifier,LSTMClassifier,LSTMRegressor,LSTMDataSource,GPRegressor,DeepGPR,DeepGPRegressor
run_without_goes_machine = True
if run_without_goes_machine:
###########################################################################################################
## Main body of the program to run storm events "without goes data".
###########################################################################################################
case = -1
reg_name_list = ["regression","elasticnet","bayesianridge","dtree","etree","knn","ada","bagging","etrees","gboost","randomforest"]
training_winows = range(1,108,7)#[14, 27, 54, 81, 108, 135, 162, 189, 216, 243, 270, 297, 324, 351, 378]
is_mix_model = False
dates = pd.read_csv("stormlist.csv")
dates.dates = pd.to_datetime(dates.dates)
_o, xparams, yparam_clf = db.load_data()
dataset = db.load_data(case=1)
#reg_name_list = ["dtree"]
## Case for all deterministic models
if case == 0:
source = None
if is_mix_model:
source = LSTMDataSource(_o, xparams, ["K_P_LT_delay"], yparam_clf)
clf = LSTMClassifier(source=source)
else: clf = Classifier()
for name in reg_name_list:
for trw in training_winows:
for date in dates.dates.tolist():
if date >= dt.datetime(2000,1,1) and date < dt.datetime(2014,1,1):
for h in range(8):
hour_date = date.to_pydatetime() + dt.timedelta(hours=h*3)
print "Execute outputs for one model DateTime - ",name,",",hour_date
try:
det_reg = DetRegressor(name, hour_date, dataset, clf, window=trw, alt_window=trw,#np.random.randint(28,56)+trw,
is_mix_model=is_mix_model, source=source)
det_reg.run_model()
except: pass
pass
pass
pass
pass
pass
pass
## Case for all deterministic LSTM model
elif case == 1:
source = LSTMDataSource(_o, xparams, ["K_P_LT_delay"], yparam_clf)
clf = LSTMClassifier(source=source)
loop_back = 3
for trw in training_winows:
for date in dates.dates.tolist():
if date >= dt.datetime(2000,1,1) and date < dt.datetime(2014,1,1):
for h in range(8):
hour_date = date.to_pydatetime() + dt.timedelta(hours=h*3)
print "Execute outputs for one model DateTime - LSTM",",",hour_date
try:
lstm_reg = LSTMRegressor(hour_date, source, clf, window=trw, alt_window=np.random.randint(28,56)+trw)
lstm_reg.run_model()
except: pass
pass
pass
pass
pass
pass
## Case for all GP model
elif case == 2:
source = LSTMDataSource(_o, xparams, ["K_P_LT_delay"], yparam_clf)
clf = LSTMClassifier(source=source)
k_type = "Matern"
for trw in training_winows:
for date in dates.dates.tolist():
if date >= dt.datetime(2000,1,1) and date < dt.datetime(2014,1,1):
for h in range(8):
hour_date = date.to_pydatetime() + dt.timedelta(hours=h*3)
print "Execute outputs for one model DateTime - GPR",",",hour_date
try:
gp_reg = GPRegressor(hour_date, dataset, clf, k_type, window=trw, source=source,alt_window=np.random.randint(28,56)+trw)
gp_reg.run_model()
except: pass
pass
pass
pass
pass
pass
## Case for all LSTM GP model
elif case == 3:
source = LSTMDataSource(_o, xparams, ["K_P_LT_delay"], yparam_clf)
clf = LSTMClassifier(source=source)
k_type = "RBF"
for trw in training_winows:
for date in dates.dates.tolist():
if date >= dt.datetime(2000,1,1) and date < dt.datetime(2014,1,1):
for h in range(8):
hour_date = date.to_pydatetime() + dt.timedelta(hours=h*3)
print "Execute outputs for one model DateTime - GPR",",",hour_date
#try:
deep_gp = DeepGPRegressor(hour_date, source, clf, k_type, window=trw, alt_window=np.random.randint(28,56)+trw)
success = deep_gp.run_model()
#except: pass
pass
pass
pass
pass
pass
## Case for all deep GPR
elif case == 4:
source = LSTMDataSource(_o, xparams, ["K_P_LT_delay"], yparam_clf)
clf = LSTMClassifier(source=source)
for name in reg_name_list:
for trw in training_winows:
for date in dates.dates.tolist():
if date >= dt.datetime(2000,1,1) and date < dt.datetime(2014,1,1):
for h in range(8):
hour_date = date.to_pydatetime() + dt.timedelta(hours=h*3)
print "Execute outputs for one model DateTime - ",name,",",hour_date
try:
dgp = DeepGPR(name, hour_date, dataset, clf, window=trw, alt_window=np.random.randint(28,56)+trw, source=source)
dgp.run_model()
except: pass
pass
pass
pass
pass
pass
pass
pass
else:
###########################################################################################################
## Main body of the program to run storm events "with goes data".
###########################################################################################################
case = -1
reg_name_list = ["regression","elasticnet","bayesianridge","dtree","etree","knn","ada","bagging","etrees","gboost","randomforest"]
training_winows = [14, 27, 54, 81, 108, 135, 162, 189, 216, 243, 270, 297, 324, 351, 378]
is_mix_model = True
dates = pd.read_csv("stormlist.csv")
dates.dates = pd.to_datetime(dates.dates)
#clf = Classifier(is_goes=True)
reg_name_list, training_winows = ["regression"], [14,27,54,81,108]
## Case for all deterministic models
if case == 0:
source = None
if is_mix_model:
_o, xparams, yparam_clf = db.load_data()
source = LSTMDataSource(_o, xparams, ["K_P_LT_delay"], yparam_clf)
clf = LSTMClassifier(source=source)
else: clf = Classifier(is_goes=True)
_o, xparams, yparam_clf = db.load_data_RB()
dataset = db.load_data_RB(case=1)
for name in reg_name_list:
for trw in training_winows:
for date in dates.dates.tolist():
if date >= dt.datetime(2000,1,1) and date < dt.datetime(2014,1,1):
for h in range(8):
hour_date = date.to_pydatetime() + dt.timedelta(hours=h*3)
print "Execute outputs for one model DateTime - ",name,",",hour_date
#try:
det_reg = DetRegressor(name, hour_date, dataset, clf, window=trw, alt_window=np.random.randint(28,56)+trw,
is_mix_model=is_mix_model, source=source)
det_reg.run_model()
#except: pass
pass
pass
pass
pass
pass
pass
## Case for all mix regression GP models
elif case == 1:
_o, xparams, yparam_clf = db.load_data()
source = LSTMDataSource(_o, xparams, ["K_P_LT_delay"], yparam_clf)
clf = LSTMClassifier(source=source)
_o, xparams, yparam_clf = db.load_data_RB()
dataset = db.load_data_RB(case=1)
for name in reg_name_list:
for trw in training_winows:
for date in dates.dates.tolist():
if date >= dt.datetime(2000,1,1) and date < dt.datetime(2014,1,1):
for h in range(8):
hour_date = date.to_pydatetime() + dt.timedelta(hours=h*3)
print "Execute outputs for one model DateTime - ",name,",",hour_date
try:
dgp = DeepGPR(name, hour_date, dataset, clf, window=trw, alt_window=np.random.randint(28,56)+trw, source=source)
dgp.run_model()
except: pass
pass
pass
pass
pass
pass
pass #EOD
## Case for all deep GPR 2 month periods
case = -1
if case == 0:
reg_name_list = ["regression","elasticnet","bayesianridge","dtree","etree","knn","ada","bagging","etrees","gboost","randomforest"]
training_winows = [14, 27, 54, 81]
dates = [dt.datetime(2004,7,1) + dt.timedelta(days=i) for i in range(62)]
_o, xparams, yparam_clf = db.load_data()
dataset = db.load_data_RB(case=1)
source = LSTMDataSource(_o, xparams, ["K_P_LT_delay"], yparam_clf)
clf = LSTMClassifier(source=source)
for name in reg_name_list:
for trw in training_winows:
for date in dates:
for h in range(8):
hour_date = date + dt.timedelta(hours=h*3)
print "Execute outputs for one model,trw DateTime - ",name,",",trw,",",hour_date
rnt = np.random.randint(28,56)+trw
det_reg = DetRegressor(name, hour_date, dataset, clf, window=trw, alt_window=rnt,
is_mix_model=True, source=source)
det_reg.run_model()
pass
pass
pass
pass
pass
elif case == 1:
_o, xparams, yparam_clf = db.load_data()
#dataset = db.load_data(case=1)
dataset = db.load_data_RB(case=1)
source = LSTMDataSource(_o, xparams, ["K_P_LT_delay"], yparam_clf)
clf = LSTMClassifier(source=source)
name= "regression"
fname = "../out/storm/prediction_RB_lstm_mixgp.csv"
training_winows = [27]
sdate, edate = dt.datetime(2001,1,1), dt.datetime(2011,1,1)
ndates = int((edate-sdate).total_seconds()/(60.*60.*24.))
print ndates*8
dates = [dt.datetime(2001,1,1) + dt.timedelta(days=i) for i in range(ndates)]
for trw in training_winows:
for date in dates:
for h in range(8):
hour_date = date + dt.timedelta(hours=h*3)
print "Execute outputs for one model,trw DateTime - ",name,",",trw,",",hour_date
rnt = np.random.randint(28,56) + trw
try:
dgp = DeepGPR(name, hour_date, dataset, clf, window=trw, alt_window=rnt, source=source, fname=fname)
dgp.run_model()
pass
except: pass
pass
pass
pass
pass
pass
| 41.914591
| 148
| 0.501189
| 1,351
| 11,778
| 4.210215
| 0.119171
| 0.068917
| 0.084388
| 0.087201
| 0.857243
| 0.837377
| 0.82507
| 0.819093
| 0.813643
| 0.791139
| 0
| 0.040054
| 0.366191
| 11,778
| 280
| 149
| 42.064286
| 0.721902
| 0.051112
| 0
| 0.807175
| 0
| 0
| 0.078581
| 0.003826
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.255605
| 0.022422
| null | null | 0.044843
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
f9b216d4dd0bc09944c4193be83f5e5cf4aa93b4
| 8,132
|
py
|
Python
|
skidl/libs/hc11_sklib.py
|
arjenroodselaar/skidl
|
0bf801bd3b74e6ef94bd9aa1b68eef756b568276
|
[
"MIT"
] | 700
|
2016-08-16T21:12:50.000Z
|
2021-10-10T02:15:18.000Z
|
skidl/libs/hc11_sklib.py
|
0dvictor/skidl
|
458709a10b28a864d25ae2c2b44c6103d4ddb291
|
[
"MIT"
] | 118
|
2016-08-16T20:51:05.000Z
|
2021-10-10T08:07:18.000Z
|
skidl/libs/hc11_sklib.py
|
0dvictor/skidl
|
458709a10b28a864d25ae2c2b44c6103d4ddb291
|
[
"MIT"
] | 94
|
2016-08-25T14:02:28.000Z
|
2021-09-12T05:17:08.000Z
|
from skidl import SKIDL, TEMPLATE, Part, Pin, SchLib
SKIDL_lib_version = '0.0.1'
hc11 = SchLib(tool=SKIDL).add_parts(*[
Part(name='MC68HC11A8CC',dest=TEMPLATE,tool=SKIDL,keywords='HC11 MCU Microcotroller',description='8K ROM, 256B RAM, 512B EEPROM',ref_prefix='U',num_units=1,do_erc=True,aliases=['MC68HC11A7CC', 'MC68HC11A1CC', 'MC68HC11A0CC'],pins=[
Pin(num='1',name='VSS',func=Pin.PWRIN,do_erc=True),
Pin(num='2',name='MODB',do_erc=True),
Pin(num='3',name='MODA',func=Pin.BIDIR,do_erc=True),
Pin(num='4',name='AS',func=Pin.OUTPUT,do_erc=True),
Pin(num='5',name='E',func=Pin.OUTPUT,do_erc=True),
Pin(num='6',name='R/W',func=Pin.OUTPUT,do_erc=True),
Pin(num='7',name='EXTAL',do_erc=True),
Pin(num='8',name='XTAL',func=Pin.OUTPUT,do_erc=True),
Pin(num='9',name='AD0/PC0',func=Pin.BIDIR,do_erc=True),
Pin(num='10',name='AD1/PC1',func=Pin.BIDIR,do_erc=True),
Pin(num='20',name='RXD/PD0',func=Pin.BIDIR,do_erc=True),
Pin(num='30',name='PA4',func=Pin.OUTPUT,do_erc=True),
Pin(num='40',name='A10/PB2',func=Pin.OUTPUT,do_erc=True),
Pin(num='50',name='PE7',do_erc=True),
Pin(num='11',name='AD2/PC2',func=Pin.BIDIR,do_erc=True),
Pin(num='21',name='TXD/PD1',func=Pin.BIDIR,do_erc=True),
Pin(num='31',name='PA3',func=Pin.OUTPUT,do_erc=True),
Pin(num='41',name='A9/PB1',func=Pin.OUTPUT,do_erc=True),
Pin(num='51',name='VRL',do_erc=True),
Pin(num='12',name='AD3/PC3',func=Pin.BIDIR,do_erc=True),
Pin(num='22',name='MIS/PD2',func=Pin.BIDIR,do_erc=True),
Pin(num='32',name='PA2',do_erc=True),
Pin(num='42',name='A8/PB0',func=Pin.OUTPUT,do_erc=True),
Pin(num='52',name='VRH',do_erc=True),
Pin(num='13',name='AD4/PC4',func=Pin.BIDIR,do_erc=True),
Pin(num='23',name='MOS/PD3',func=Pin.BIDIR,do_erc=True),
Pin(num='33',name='PA1',do_erc=True),
Pin(num='43',name='PE0',do_erc=True),
Pin(num='14',name='AD5/PC5',func=Pin.BIDIR,do_erc=True),
Pin(num='24',name='SCK/PD4',func=Pin.BIDIR,do_erc=True),
Pin(num='34',name='PA0',do_erc=True),
Pin(num='44',name='PE4',do_erc=True),
Pin(num='15',name='AD6/PC6',func=Pin.BIDIR,do_erc=True),
Pin(num='25',name='SS/PD5',func=Pin.BIDIR,do_erc=True),
Pin(num='35',name='A15/PB7',func=Pin.OUTPUT,do_erc=True),
Pin(num='45',name='PE1',do_erc=True),
Pin(num='16',name='AD7/PC7',func=Pin.BIDIR,do_erc=True),
Pin(num='26',name='VDD',func=Pin.PWRIN,do_erc=True),
Pin(num='36',name='A14/PB6',func=Pin.OUTPUT,do_erc=True),
Pin(num='46',name='PE5',do_erc=True),
Pin(num='17',name='~RESET',do_erc=True),
Pin(num='27',name='PA7',func=Pin.BIDIR,do_erc=True),
Pin(num='37',name='A13/PB5',func=Pin.OUTPUT,do_erc=True),
Pin(num='47',name='PE2',do_erc=True),
Pin(num='18',name='~XIRQ',do_erc=True),
Pin(num='28',name='PA6',func=Pin.OUTPUT,do_erc=True),
Pin(num='38',name='A12/PB4',func=Pin.OUTPUT,do_erc=True),
Pin(num='48',name='PE6',do_erc=True),
Pin(num='19',name='~IRQ',do_erc=True),
Pin(num='29',name='PA5',func=Pin.OUTPUT,do_erc=True),
Pin(num='39',name='A11/PB3',func=Pin.OUTPUT,do_erc=True),
Pin(num='49',name='PE3',do_erc=True)]),
Part(name='MC68HC11F1CC',dest=TEMPLATE,tool=SKIDL,keywords='HC11 MCU Microcontroller',description='ROMless, 1K RAM, 512B EEPROM, PLCC-68',ref_prefix='U',num_units=1,fplist=['PLCC-68*'],do_erc=True,pins=[
Pin(num='1',name='VSS',func=Pin.PWRIN,do_erc=True),
Pin(num='2',name='MODB',do_erc=True),
Pin(num='3',name='MODA',func=Pin.BIDIR,do_erc=True),
Pin(num='4',name='E',func=Pin.OUTPUT,do_erc=True),
Pin(num='5',name='R/W',func=Pin.OUTPUT,do_erc=True),
Pin(num='6',name='EXTAL',func=Pin.OUTPUT,do_erc=True),
Pin(num='7',name='XTAL',do_erc=True),
Pin(num='8',name='4XOUT',func=Pin.OUTPUT,do_erc=True),
Pin(num='9',name='D0/PC0',func=Pin.BIDIR,do_erc=True),
Pin(num='10',name='D1/PC1',func=Pin.BIDIR,do_erc=True),
Pin(num='20',name='CSPROG/PG7',func=Pin.BIDIR,do_erc=True),
Pin(num='30',name='MIS/PD2',func=Pin.BIDIR,do_erc=True),
Pin(num='40',name='PA2',func=Pin.BIDIR,do_erc=True),
Pin(num='50',name='A8/PB0',func=Pin.OUTPUT,do_erc=True),
Pin(num='60',name='PE4',do_erc=True),
Pin(num='11',name='D2/PC2',func=Pin.BIDIR,do_erc=True),
Pin(num='21',name='CSGEN/PG6',func=Pin.BIDIR,do_erc=True),
Pin(num='31',name='MOS/PD3',func=Pin.BIDIR,do_erc=True),
Pin(num='41',name='PA1',func=Pin.BIDIR,do_erc=True),
Pin(num='51',name='A7/PF7',func=Pin.OUTPUT,do_erc=True),
Pin(num='61',name='PE1',do_erc=True),
Pin(num='12',name='D3/PC3',func=Pin.BIDIR,do_erc=True),
Pin(num='22',name='CSIO1/PG5',func=Pin.BIDIR,do_erc=True),
Pin(num='32',name='SCK/PD4',func=Pin.BIDIR,do_erc=True),
Pin(num='42',name='PA0',func=Pin.BIDIR,do_erc=True),
Pin(num='52',name='A6/PF6',func=Pin.OUTPUT,do_erc=True),
Pin(num='62',name='PE5',do_erc=True),
Pin(num='13',name='D4/PC4',func=Pin.BIDIR,do_erc=True),
Pin(num='23',name='CSIO2/PG4',func=Pin.BIDIR,do_erc=True),
Pin(num='33',name='SS/PD5',func=Pin.BIDIR,do_erc=True),
Pin(num='43',name='A15/PB7',func=Pin.OUTPUT,do_erc=True),
Pin(num='53',name='A5/PF5',func=Pin.OUTPUT,do_erc=True),
Pin(num='63',name='PE2',do_erc=True),
Pin(num='14',name='D5/PC5',func=Pin.BIDIR,do_erc=True),
Pin(num='24',name='PG3',func=Pin.BIDIR,do_erc=True),
Pin(num='34',name='VDD',func=Pin.PWRIN,do_erc=True),
Pin(num='44',name='A14/PB6',func=Pin.OUTPUT,do_erc=True),
Pin(num='54',name='A4/PF4',func=Pin.OUTPUT,do_erc=True),
Pin(num='64',name='PE6',do_erc=True),
Pin(num='15',name='D6/PC6',func=Pin.BIDIR,do_erc=True),
Pin(num='25',name='PG2',func=Pin.BIDIR,do_erc=True),
Pin(num='35',name='PA7',func=Pin.BIDIR,do_erc=True),
Pin(num='45',name='A13/PB5',func=Pin.OUTPUT,do_erc=True),
Pin(num='55',name='A3/PF3',func=Pin.OUTPUT,do_erc=True),
Pin(num='65',name='PE3',do_erc=True),
Pin(num='16',name='D7/PC7',func=Pin.BIDIR,do_erc=True),
Pin(num='26',name='PG1',func=Pin.BIDIR,do_erc=True),
Pin(num='36',name='PA6',func=Pin.BIDIR,do_erc=True),
Pin(num='46',name='A12/PB4',func=Pin.OUTPUT,do_erc=True),
Pin(num='56',name='A2/PF2',func=Pin.OUTPUT,do_erc=True),
Pin(num='66',name='PE7',do_erc=True),
Pin(num='17',name='~RESET',do_erc=True),
Pin(num='27',name='PG0',func=Pin.BIDIR,do_erc=True),
Pin(num='37',name='PA5',func=Pin.BIDIR,do_erc=True),
Pin(num='47',name='A11/PB3',func=Pin.OUTPUT,do_erc=True),
Pin(num='57',name='A1/PF1',func=Pin.OUTPUT,do_erc=True),
Pin(num='67',name='VRL',do_erc=True),
Pin(num='18',name='~XIRQ',do_erc=True),
Pin(num='28',name='RXD/PD0',func=Pin.BIDIR,do_erc=True),
Pin(num='38',name='PA4',func=Pin.BIDIR,do_erc=True),
Pin(num='48',name='A10/PB2',func=Pin.OUTPUT,do_erc=True),
Pin(num='58',name='A0/PF0',func=Pin.OUTPUT,do_erc=True),
Pin(num='68',name='VRH',do_erc=True),
Pin(num='19',name='~IRQ',do_erc=True),
Pin(num='29',name='TXD/PD1',func=Pin.BIDIR,do_erc=True),
Pin(num='39',name='PA3',func=Pin.BIDIR,do_erc=True),
Pin(num='49',name='A9/PB1',func=Pin.OUTPUT,do_erc=True),
Pin(num='59',name='PE0',do_erc=True)])])
| 63.53125
| 239
| 0.57243
| 1,363
| 8,132
| 3.320616
| 0.168745
| 0.134777
| 0.242598
| 0.312859
| 0.878922
| 0.870526
| 0.86213
| 0.724039
| 0.602077
| 0.602077
| 0
| 0.064397
| 0.197983
| 8,132
| 127
| 240
| 64.031496
| 0.629561
| 0
| 0
| 0.096
| 0
| 0
| 0.122971
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.008
| 0
| 0.008
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ddcb83ce99bdc846995cc244959a274c04a2f639
| 7,653
|
py
|
Python
|
00 UNICEF/03 Data New/aedes-main/aedes/automl_utils.py
|
Cirrolytix/aedes_unicef_2022
|
23a26d57d5316ba44d573b4c1dcefcad4e10b157
|
[
"MIT"
] | null | null | null |
00 UNICEF/03 Data New/aedes-main/aedes/automl_utils.py
|
Cirrolytix/aedes_unicef_2022
|
23a26d57d5316ba44d573b4c1dcefcad4e10b157
|
[
"MIT"
] | null | null | null |
00 UNICEF/03 Data New/aedes-main/aedes/automl_utils.py
|
Cirrolytix/aedes_unicef_2022
|
23a26d57d5316ba44d573b4c1dcefcad4e10b157
|
[
"MIT"
] | null | null | null |
import pandas as pd
import matplotlib.pyplot as plt
import joblib
import warnings
import os
from tpot import TPOTClassifier, TPOTRegressor
from sklearn.cluster import KMeans as km
warnings.filterwarnings('ignore')
def perform_classification(X, y,
max_time_mins=10,
max_eval_time_mins=0.05,
folder_path="",
model_name="best_model.pkl",
pipeline_name="best_model_pipeline.py",
cv=10,
scoring='f1',
show_feature_importances=True
):
"""
This module performs limited automl classification
as described in this documentation https://epistasislab.github.io/tpot/.
The output model follows sklearn-like modules like .score, .predict, etc
Input
X: dataframe of predictors
y: Series or dataframe to be predicted (Classification)
max_time_mins: float value in minutes for maximum training time
max_eval_time_mins: float value in minutes for max time per pipeline
folder_path: String for path to store files/models into
model_name: String to name the best model's pickle file
pipeline_name: String to name the best model pipeline python script
cv: integer for number of cross-validations to perform
scoring: classification scoring metric described here
show_feature_importances: boolean that dictates showing/non-showing of feature importance plot
Returns:
best_model_pipeline: ml model generated from the automl formulation
feature_importances_df: dataframe of features and feature importances
"""
# define TPOTClassifier
model = TPOTClassifier(generations=20,
population_size=50,
cv=cv,
scoring=scoring,
verbosity=2,
random_state=42,
n_jobs=-1,
max_time_mins=max_time_mins,
max_eval_time_mins=max_eval_time_mins
)
# Fit X and y into model and find the best model
model.fit(X, y)
# best model
best_model_pipeline = model.fitted_pipeline_
# Save best model
joblib.dump(best_model_pipeline, os.path.join(folder_path, model_name))
# Save best model pipeline
model.export(os.path.join(folder_path, pipeline_name))
# Get the best model
extracted_best_model = model.fitted_pipeline_.steps[-1][1]
# Train the `exctracted_best_model` using the whole dataset
extracted_best_model.fit(X.dropna(), y[X.isna().sum(axis=1)==0])
# Feature importance dataframe
feat_importances_df = (pd.DataFrame({'Columns':X.columns,
'Feature Importances': extracted_best_model.feature_importances_})
.set_index('Columns')
.sort_values('Feature Importances', ascending=False))
# Show feature importances as dictated by the input flag
if show_feature_importances:
feat_importances_df.plot(kind='barh', figsize=(20, 10))
plt.gca().invert_yaxis()
else:
pass
# output prompt to locate the model and pipeline
if folder_path == "":
prompt_path = 'the same directory as this code'
else:
prompt_path = folder_path
print(f"Best model pickle file and best model pipeline saved to {prompt_path}.")
return extracted_best_model, feat_importances_df
def perform_regression(X, y,
max_time_mins=10,
max_eval_time_mins=0.05,
folder_path="",
model_name="best_model.pkl",
pipeline_name="best_model_pipeline.py",
cv=10,
scoring='neg_mean_squared_error',
show_feature_importances=True
):
"""
This module performs limited automl regression
as described in this documentation https://epistasislab.github.io/tpot/.
The output model follows sklearn-like modules like .score, .predict, etc
Input
X: dataframe of predictors
y: Series or dataframe to be predicted (Regression)
max_time_mins: float value in minutes for maximum training time
max_eval_time_mins: float value in minutes for max time per pipeline
folder_path: String for path to store files/models into
model_name: String to name the best model's pickle file
pipeline_name: String to name the best model pipeline python script
cv: integer for number of cross-validations to perform
scoring: classification scoring metric described here
show_feature_importances: boolean that dictates showing/non-showing of feature importance plot
Returns:
best_model_pipeline: ml model generated from the automl formulation
feature_importances_df: dataframe of features and feature importances
"""
# define TPOTClassifier
model = TPOTClassifier(generations=20,
population_size=50,
cv=cv,
scoring=scoring,
verbosity=2,
random_state=42,
n_jobs=-1,
max_time_mins=max_time_mins,
max_eval_time_mins=max_eval_time_mins
)
# Fit X and y into model and find the best model
model.fit(X, y)
# best model
best_model_pipeline = model.fitted_pipeline_
# Save best model
joblib.dump(best_model_pipeline, os.path.join(folder_path, model_name))
# Save best model pipeline
model.export(os.path.join(folder_path, pipeline_name))
# Get the best model
extracted_best_model = model.fitted_pipeline_.steps[-1][1]
# Train the `exctracted_best_model` using the whole dataset
extracted_best_model.fit(X.dropna(), y[X.isna().sum(axis=1)==0])
# Feature importance dataframe
feat_importances_df = (pd.DataFrame({'Columns':X.columns,
'Feature Importances': extracted_best_model.feature_importances_})
.set_index('Columns')
.sort_values('Feature Importances', ascending=False))
# Show feature importances as dictated by the input flag
if show_feature_importances:
feat_importances_df.plot(kind='barh', figsize=(20, 10))
plt.gca().invert_yaxis()
else:
pass
# output prompt to locate the model and pipeline
if folder_path == "":
prompt_path = 'the same directory as this code'
else:
prompt_path = folder_path
print(f"Best model pickle file and best model pipeline saved to {prompt_path}.")
return extracted_best_model, feat_importances_df
def perform_clustering(df,
features=['longitude', 'latitude', 'ndvi', 'ndbi', 'ndwi', 'ndmi',
'surface_temperature', 'precipitation_rate', 'relative_humidity'],
n_clusters=5):
"""
From dataframe and preset list of features to cluster, outputs the final clustering model
"""
X = df[features].dropna(axis=1, how='all')
kmeans = km(n_clusters=n_clusters,
random_state=42).fit(X)
return kmeans
| 39.246154
| 102
| 0.608912
| 890
| 7,653
| 5.041573
| 0.225843
| 0.07622
| 0.053042
| 0.026744
| 0.876309
| 0.876309
| 0.876309
| 0.876309
| 0.876309
| 0.850903
| 0
| 0.009872
| 0.324971
| 7,653
| 194
| 103
| 39.448454
| 0.858691
| 0.357507
| 0
| 0.77551
| 0
| 0
| 0.107477
| 0.014019
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030612
| false
| 0.020408
| 0.214286
| 0
| 0.27551
| 0.020408
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fb004be1584c4f0b9463a5deb7447995052e57dc
| 4,401
|
py
|
Python
|
download_fonts.py
|
abhabongse/xelatex-tenth
|
5cfa505af760fd1f4474511cb699e04fd03338ea
|
[
"Apache-2.0"
] | 1
|
2017-10-15T09:31:43.000Z
|
2017-10-15T09:31:43.000Z
|
download_fonts.py
|
abhabongse/latex-tenth
|
5cfa505af760fd1f4474511cb699e04fd03338ea
|
[
"Apache-2.0"
] | 5
|
2017-04-26T01:14:12.000Z
|
2017-10-09T10:43:41.000Z
|
download_fonts.py
|
abhabongse/xelatex-tenth
|
5cfa505af760fd1f4474511cb699e04fd03338ea
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
"""
Run this script to download required fonts
from GitHub repository into local fonts directory.
"""
import os
import shutil
import sys
from urllib.parse import urlsplit
from urllib.request import urlopen
FONT_URLS = [
'https://github.com/cadsondemak/Sarabun/raw/master/fonts/Sarabun-Thin.ttf',
'https://github.com/cadsondemak/Sarabun/raw/master/fonts/Sarabun-ThinItalic.ttf',
'https://github.com/cadsondemak/Sarabun/raw/master/fonts/Sarabun-ExtraLight.ttf',
'https://github.com/cadsondemak/Sarabun/raw/master/fonts/Sarabun-ExtraLightItalic.ttf',
'https://github.com/cadsondemak/Sarabun/raw/master/fonts/Sarabun-Light.ttf',
'https://github.com/cadsondemak/Sarabun/raw/master/fonts/Sarabun-LightItalic.ttf',
'https://github.com/cadsondemak/Sarabun/raw/master/fonts/Sarabun-Regular.ttf',
'https://github.com/cadsondemak/Sarabun/raw/master/fonts/Sarabun-Italic.ttf',
'https://github.com/cadsondemak/Sarabun/raw/master/fonts/Sarabun-Medium.ttf',
'https://github.com/cadsondemak/Sarabun/raw/master/fonts/Sarabun-MediumItalic.ttf',
'https://github.com/cadsondemak/Sarabun/raw/master/fonts/Sarabun-SemiBold.ttf',
'https://github.com/cadsondemak/Sarabun/raw/master/fonts/Sarabun-SemiBoldItalic.ttf',
'https://github.com/cadsondemak/Sarabun/raw/master/fonts/Sarabun-Bold.ttf',
'https://github.com/cadsondemak/Sarabun/raw/master/fonts/Sarabun-BoldItalic.ttf',
'https://github.com/cadsondemak/Sarabun/raw/master/fonts/Sarabun-ExtraBold.ttf',
'https://github.com/cadsondemak/Sarabun/raw/master/fonts/Sarabun-ExtraBoldItalic.ttf',
'https://github.com/bBoxType/FiraGO/raw/master/Fonts/FiraGO_OTF_1001/Roman/FiraGO-ExtraLight.otf',
'https://github.com/bBoxType/FiraGO/raw/master/Fonts/FiraGO_OTF_1001/Italic/FiraGO-ExtraLightItalic.otf',
'https://github.com/bBoxType/FiraGO/raw/master/Fonts/FiraGO_OTF_1001/Roman/FiraGO-Light.otf',
'https://github.com/bBoxType/FiraGO/raw/master/Fonts/FiraGO_OTF_1001/Italic/FiraGO-LightItalic.otf',
'https://github.com/bBoxType/FiraGO/raw/master/Fonts/FiraGO_OTF_1001/Roman/FiraGO-Book.otf',
'https://github.com/bBoxType/FiraGO/raw/master/Fonts/FiraGO_OTF_1001/Italic/FiraGO-BookItalic.otf',
'https://github.com/bBoxType/FiraGO/raw/master/Fonts/FiraGO_OTF_1001/Roman/FiraGO-Regular.otf',
'https://github.com/bBoxType/FiraGO/raw/master/Fonts/FiraGO_OTF_1001/Italic/FiraGO-Italic.otf',
'https://github.com/bBoxType/FiraGO/raw/master/Fonts/FiraGO_OTF_1001/Roman/FiraGO-Medium.otf',
'https://github.com/bBoxType/FiraGO/raw/master/Fonts/FiraGO_OTF_1001/Italic/FiraGO-MediumItalic.otf',
'https://github.com/bBoxType/FiraGO/raw/master/Fonts/FiraGO_OTF_1001/Roman/FiraGO-SemiBold.otf',
'https://github.com/bBoxType/FiraGO/raw/master/Fonts/FiraGO_OTF_1001/Italic/FiraGO-SemiBoldItalic.otf',
'https://github.com/bBoxType/FiraGO/raw/master/Fonts/FiraGO_OTF_1001/Roman/FiraGO-Bold.otf',
'https://github.com/bBoxType/FiraGO/raw/master/Fonts/FiraGO_OTF_1001/Italic/FiraGO-BoldItalic.otf',
'https://github.com/bBoxType/FiraGO/raw/master/Fonts/FiraGO_OTF_1001/Roman/FiraGO-ExtraBold.otf',
'https://github.com/bBoxType/FiraGO/raw/master/Fonts/FiraGO_OTF_1001/Italic/FiraGO-ExtraBoldItalic.otf',
'https://github.com/tonsky/FiraCode/raw/master/distr/ttf/FiraCode-Light.ttf',
'https://github.com/tonsky/FiraCode/raw/master/distr/ttf/FiraCode-Regular.ttf',
'https://github.com/tonsky/FiraCode/raw/master/distr/ttf/FiraCode-Medium.ttf',
'https://github.com/tonsky/FiraCode/raw/master/distr/ttf/FiraCode-SemiBold.ttf',
'https://github.com/tonsky/FiraCode/raw/master/distr/ttf/FiraCode-Bold.ttf',
'https://github.com/tonsky/FiraCode/raw/master/distr/ttf/FiraCode-Retina.ttf',
]
def main():
for font_url in FONT_URLS:
download_file(font_url)
def download_file(url):
# Create local file path
split_result = urlsplit(url)
this_dir = os.path.dirname(os.path.abspath(__file__))
local_path = os.path.join(this_dir, 'fonts', os.path.basename(split_result.path))
# Ensure that directory exists
os.makedirs(os.path.dirname(local_path), exist_ok=True)
# Actual downloading
print(f"Downloading to fonts/ directory: {url}", file=sys.stderr)
with urlopen(url) as response, open(local_path, 'wb') as fobj:
shutil.copyfileobj(response, fobj)
if __name__ == '__main__':
main()
| 57.907895
| 109
| 0.760509
| 616
| 4,401
| 5.339286
| 0.159091
| 0.12709
| 0.161751
| 0.108544
| 0.728185
| 0.709942
| 0.709942
| 0.709942
| 0.709942
| 0.693828
| 0
| 0.016049
| 0.079755
| 4,401
| 75
| 110
| 58.68
| 0.796049
| 0.042263
| 0
| 0
| 0
| 0.655172
| 0.773787
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034483
| false
| 0
| 0.086207
| 0
| 0.12069
| 0.017241
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
34ab0611115c452a703abe24b8d689149e89de23
| 4,012
|
py
|
Python
|
amazsel/pages/base_page.py
|
Remek953/Amazsel
|
565b2b08553113bda7606d3f0844ffa039742186
|
[
"MIT"
] | null | null | null |
amazsel/pages/base_page.py
|
Remek953/Amazsel
|
565b2b08553113bda7606d3f0844ffa039742186
|
[
"MIT"
] | null | null | null |
amazsel/pages/base_page.py
|
Remek953/Amazsel
|
565b2b08553113bda7606d3f0844ffa039742186
|
[
"MIT"
] | null | null | null |
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import TimeoutException
class LocatorType:
CLASS_NAME = 'class name'
CSS_SELECTOR = 'css selector'
ID = 'id'
LINK_TEXT = 'link text'
NAME = 'name'
XPATH = 'xpath'
class BasePage(object):
def __init__(self, driver):
self.driver = driver
def wait_for_element(self, locator, timeout = 5):
element = None
if locator[0] == LocatorType.CLASS_NAME:
try:
element = WebDriverWait(self.driver, timeout).until(
EC.presence_of_element_located((By.CLASS_NAME, locator[1])))
except TimeoutException as ex:
print ("Loading took too much time or not found CLASS NAME element")
elif locator[0] == LocatorType.CSS_SELECTOR:
try:
element = WebDriverWait(self.driver, timeout).until(
EC.presence_of_element_located((By.CSS_SELECTOR, locator[1])))
except TimeoutException as ex:
print ("Loading took too much time or not found CSS SELECTOR element")
elif locator[0] == LocatorType.ID:
try:
element = WebDriverWait(self.driver, timeout).until(
EC.presence_of_element_located((By.ID, locator[1])))
except TimeoutException as ex:
print ("Loading took too much time or not found ID element")
elif locator[0] == LocatorType.LINK_TEXT:
try:
element = WebDriverWait(self.driver, timeout).until(
EC.presence_of_element_located((By.LINK_TEXT, locator[1])))
except TimeoutException as ex:
print ("Loading took too much time or not found LINK TEXT element")
elif locator[0] == LocatorType.NAME:
try:
element = WebDriverWait(self.driver, timeout).until(
EC.presence_of_element_located((By.NAME, locator[1])))
except TimeoutException as ex:
print ("Loading took too much time or not found NAME element")
elif locator[0] == LocatorType.XPATH:
try:
element = WebDriverWait(self.driver, timeout).until(
EC.presence_of_element_located((By.XPATH, locator[1])))
except TimeoutException as ex:
print ("Loading took too much time or not found XPATH element")
else:
print("Locator not found")
return element
def wait_to_be_clickable(self, locator, timeout = 5):
element = None
if locator[0] == LocatorType.CLASS_NAME:
try:
element = WebDriverWait(self.driver, timeout).until(
EC.element_to_be_clickable((By.CLASS_NAME, locator[1])))
except TimeoutException as ex:
print ("Loading took too much time or not found CLASS NAME element")
elif locator[0] == LocatorType.CSS_SELECTOR:
try:
element = WebDriverWait(self.driver, timeout).until(
EC.element_to_be_clickable((By.CSS_SELECTOR, locator[1])))
except TimeoutException as ex:
print ("Loading took too much time or not found CSS SELECTOR element")
elif locator[0] == LocatorType.ID:
try:
element = WebDriverWait(self.driver, timeout).until(
EC.element_to_be_clickable((By.ID, locator[1])))
except TimeoutException as ex:
print ("Loading took too much time or not found ID element")
elif locator[0] == LocatorType.LINK_TEXT:
try:
element = WebDriverWait(self.driver, timeout).until(
EC.element_to_be_clickable((By.LINK_TEXT, locator[1])))
except TimeoutException as ex:
print ("Loading took too much time or not found LINK TEXT element")
elif locator[0] == LocatorType.NAME:
try:
element = WebDriverWait(self.driver, timeout).until(
EC.element_to_be_clickable((By.NAME, locator[1])))
except TimeoutException as ex:
print ("Loading took too much time or not found NAME element")
elif locator[0] == LocatorType.XPATH:
try:
element = WebDriverWait(self.driver, timeout).until(
EC.element_to_be_clickable((By.XPATH, locator[1])))
except TimeoutException as ex:
print ("Loading took too much time or not found XPATH element")
else:
print("Locator not found")
return element
| 34
| 74
| 0.720588
| 548
| 4,012
| 5.164234
| 0.113139
| 0.04947
| 0.080565
| 0.114488
| 0.857951
| 0.857951
| 0.857951
| 0.857951
| 0.857951
| 0.857951
| 0
| 0.007876
| 0.177218
| 4,012
| 118
| 75
| 34
| 0.84944
| 0
| 0
| 0.701031
| 0
| 0
| 0.183404
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030928
| false
| 0
| 0.051546
| 0
| 0.185567
| 0.14433
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
34b46d989ce7d3bec124c50ddb2427f3d4dc808a
| 216
|
py
|
Python
|
src/scoring/jobtitle_scorer_strict.py
|
tiefenauer/ip7-python
|
512105ba39110ec77d2ea0961dd7c2a42d4ec26d
|
[
"MIT"
] | null | null | null |
src/scoring/jobtitle_scorer_strict.py
|
tiefenauer/ip7-python
|
512105ba39110ec77d2ea0961dd7c2a42d4ec26d
|
[
"MIT"
] | null | null | null |
src/scoring/jobtitle_scorer_strict.py
|
tiefenauer/ip7-python
|
512105ba39110ec77d2ea0961dd7c2a42d4ec26d
|
[
"MIT"
] | null | null | null |
from src.scoring.scorer_strict import StrictScorer
class StrictJobtitleScorer(StrictScorer):
def _calculate_similarity(self, actual_class, predicted_class):
return int(actual_class == predicted_class)
| 27
| 67
| 0.800926
| 24
| 216
| 6.916667
| 0.708333
| 0.13253
| 0.240964
| 0.301205
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134259
| 216
| 7
| 68
| 30.857143
| 0.887701
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
34bf02c694003d900b32cca9a5b2f98a68fa625f
| 11,279
|
py
|
Python
|
app.py
|
iqvan/OCR_library
|
6f9dd6d7a239b5b712c66406e49231a43e154a5d
|
[
"MIT"
] | null | null | null |
app.py
|
iqvan/OCR_library
|
6f9dd6d7a239b5b712c66406e49231a43e154a5d
|
[
"MIT"
] | null | null | null |
app.py
|
iqvan/OCR_library
|
6f9dd6d7a239b5b712c66406e49231a43e154a5d
|
[
"MIT"
] | null | null | null |
__pyarmor__(__name__, __file__, b'\x50\x59\x41\x52\x4d\x4f\x52\x00\x00\x03\x07\x00\x42\x0d\x0d\x0a\x06\x28\xa0\x01\x00\x00\x00\x00\x01\x00\x00\x00\x40\x00\x00\x00\xba\x0a\x00\x00\x00\x00\x00\x10\x8c\xe3\x5f\x01\xd0\x82\x28\xb0\xf7\x13\x9c\xd4\xab\x8b\x03\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x50\xff\xfe\x0c\xe6\xb4\xcd\x78\xf3\x8b\x4e\x9b\xd1\x2c\x07\x0e\x41\x27\xe3\xed\xbd\x78\x88\x5c\xc8\x54\xc1\x5c\xc5\x09\x22\x70\x13\x72\x03\x62\xc3\x26\x27\xfe\x81\xc3\xa1\xff\xb5\x48\xdf\xd4\x52\x7e\x31\x05\x21\xd6\xbf\x87\xe0\x0f\xd6\x2e\xd0\x66\x43\x5a\xf7\x9c\xa5\xcf\xbb\xa8\xbd\xc7\xf9\xf9\xf1\x86\x53\xb3\xa5\x78\x4c\xea\xaa\xa7\x3e\xe6\x84\x9a\xe2\xd2\x4a\xb2\xdf\xe2\xe9\xa9\x11\xac\x29\xbb\x64\x1a\xc4\x65\x36\xde\xfe\x48\x7a\xfb\x58\x6d\x8a\x86\x6c\xc5\x4f\x6d\x81\xf2\x33\x73\xcc\x30\x04\x7a\x10\x54\xdf\x74\x4d\xf6\xa2\xec\x15\x97\x66\x93\xaa\x00\xb8\x0b\xb2\x04\xe2\x53\x7c\x11\xb7\x14\xc7\xbd\x9b\x11\xa1\xee\x7e\x9e\xff\xe0\xc2\x13\xb8\x0c\x8f\x49\x41\xcc\x8b\x7c\xe1\xd8\x8f\x1f\x9b\x88\xe1\xe7\x4f\x9e\xc2\x3d\x4e\x21\x4e\x22\xa4\xe3\xf7\xc1\xe5\x2a\x7b\x56\x1a\x13\xde\x85\x55\xa2\x56\xe9\xdb\xfc\x0e\x27\xd7\xbe\xb6\x21\xac\x96\x86\xab\x94\x75\xd5\x34\x62\x4a\xb6\xc6\xfd\x6c\x3e\x2f\x9f\xfb\x63\x54\xce\x8c\x27\x49\xaa\x90\x76\x56\xe6\x34\x77\xb0\x42\x1b\xd6\xf4\xdc\x50\x72\xa1\x17\x23\x99\xe4\x75\xde\x54\x72\x26\x26\xae\x0d\x8c\x1c\x86\xd4\x21\x10\xec\x66\xa3\x03\xa7\x97\x2a\x6d\x3e\x3a\x75\x07\x08\xa2\x0e\x74\x43\xb2\x70\xd2\x9d\x33\x67\xba\x30\x5c\x0d\xa0\xf6\x24\x90\x2a\xd4\x36\x1a\xdc\xd8\x62\xdf\x24\x88\x13\x2b\x3b\x1d\x05\xeb\x9a\xf4\x3a\x74\x52\x8b\xc4\x81\xd6\xec\x03\x11\xf9\x0c\xf0\xef\xa8\xa9\xf8\x90\x66\xb4\xe2\xc2\x4f\xb5\x45\xc5\xa2\x00\xb5\xf2\x95\x0a\x0b\x3f\x00\xa2\x49\x13\x4d\x5a\x9b\xc3\xb5\xd7\x5a\xfd\x67\x80\xad\xee\xfe\xc4\x65\xbf\x0c\xa3\xd4\x45\xdc\xa7\x2c\xae\xf4\xac\x87\xb9\x56\x6c\x55\x77\x78\x48\xd9\x70\x6c\x12\x1a\x6d\x88\x71\xcd\x38\xec\xba\x7e\x1d\x36\x58\xbf\x77\xc7\xed\x31\xe0\x71\x8b\x95\x3d\xb8\xeb\x65\xa4\xce\x45\x1b\x93\xb3\x41\xd4\xce\xb0\x83\xb3\xb7\xf9\x47\x16\xb6\x8a\xf0\x45\x73\xb1\x5f\x20\x45\xc7\xb9\x03\xc3\x14\x92\x9a\xbd\x0f\x8b\xaa\xdc\x84\x13\xf5\x59\xec\xc9\x22\xe6\x16\xfc\x74\xc0\x59\x40\x92\xa2\x6a\xd7\xc0\x70\xc2\xb2\x7a\xf8\x5a\xb0\x81\x47\xff\x5c\xbc\xba\x8c\x67\x50\x32\x1b\x1a\xe1\xcc\x39\xa0\x3d\x0a\xca\x66\x67\x2e\xc5\x91\x77\xe4\x17\x85\x89\xbf\xd4\xbd\xf7\x45\x30\x72\x50\x88\xda\x39\xb9\x77\xc2\xbe\x31\x46\x60\x43\x06\xfc\x06\xba\xb6\x16\x9e\xcc\x96\x78\xd4\xc5\x13\x56\xdd\xd4\xb7\x7f\x7d\xcd\xab\x28\xf5\x33\x03\xef\x65\xbd\x22\x19\xac\xba\x8f\xb9\xc9\x0b\x22\x99\xa7\x8b\x17\x98\xf6\xca\x8c\xc0\x78\x89\x24\xf7\xc7\xb5\x1d\xf0\x88\xb2\xf9\xeb\x94\xa0\x3e\x51\x7b\x89\x89\x4d\x0d\x96\x5e\x13\x2b\x27\xc3\xe4\x3f\xe4\x0b\xe2\x72\xae\x95\x77\x98\x67\x56\xed\x3b\x58\xbd\xb7\xef\xb3\x63\x09\x80\xd8\x7f\x35\x90\x8c\x53\xfd\x91\x0e\xf8\xa7\xe3\x43\x41\x72\x6f\x9c\xb6\x65\xfa\xb4\x2a\xd0\x72\x99\x75\x34\xbd\x87\xef\x21\x66\xd3\xea\x70\xb5\xd1\x39\x72\xb0\x4d\xad\x09\xb1\xed\xef\x92\x2e\x6a\xcb\x31\x80\x56\xb7\x34\x9a\x24\x60\x83\x72\x6f\x9e\x7a\x3a\x01\x25\xa9\x6c\x3f\xe6\xec\xd8\x2d\xab\x84\x8e\x02\xdd\xbb\xd7\x8e\xeb\xe7\x1e\x4c\xe7\x1f\xdf\x05\xd7\x80\x42\xc1\xa7\xe9\xc8\xd4\x67\x87\x08\xbd\x6b\x07\x21\x4d\x8f\x58\x4f\x73\xc4\x65\x42\xef\xf4\xff\x47\x72\x4b\xad\x54\xc2\xe2\x33\xbc\x5b\xdf\x32\x7f\x5d\xae\x57\x34\x0c\xa7\x5c\xd7\xe5\x4f\xa0\x8a\xdf\xab\x7c\x7f\xc0\x39\x38\x7d\xba\x84\x9f\xf3\x7e\xfb\xde\x2d\x31\x68\x8b\x5a\x8e\x84\x6d\x66\x61\xd8\x3d\x4a\x96\x16\x7d\xb6\x92\x90\xf8\xe5\x5e\x65\x91\x9f\x73\xcf\xe9\x13\x3f\x8b\x11\x05\xc3\x8e\x6b\xc3\xcf\xf6\xd1\x71\xff\x57\xb4\x53\xd6\xe4\x59\x34\x10\x15\x79\x54\x14\xa6\x59\x4f\xfa\x8d\xaf\x41\x90\x79\xa1\xfd\xbc\x3a\xd4\xf3\xaa\x87\x3a\xa7\xac\x9e\xd5\x7e\xca\x5f\x90\xd1\xd1\xb8\x8d\xa0\xd0\x7d\xda\x43\xf2\x50\xa8\x5b\xba\xf9\xfc\x9a\x79\x90\xdf\x72\xb3\x1b\x7b\xd3\x43\xdd\x3e\x05\x79\xc2\x22\xa0\x8e\x71\xd2\xe4\x44\x41\x0f\x16\x7c\x2f\xc5\x8a\xee\xe9\xd8\xc1\x1d\x2e\x52\x11\xc3\x53\xff\xf0\xce\x5c\xe3\xe6\xd4\xba\xfd\xfc\xb4\x5d\x2b\x1b\x45\x34\x9b\xfa\xb2\x13\x84\x1e\x3a\x19\x54\x86\x41\x9d\xdb\xa8\xc7\xdd\x4d\x1f\x8f\x45\x16\x36\xd5\x0d\x3f\x91\x50\x39\x6a\xac\x47\xc4\x0d\xec\x90\x75\x10\x92\x1b\x5f\x3e\xb0\x40\x18\xd4\x16\x6e\x9e\x1b\x3f\x59\x77\xbf\x6d\x62\x36\x6d\x30\xc0\xa1\x9a\xfd\x5e\x8e\x6d\xea\x84\xe8\xed\x23\xfe\x8a\x62\x6b\x5c\xd0\x00\x2c\x08\xc6\x2b\xeb\x93\xa7\x27\x9d\x89\x79\x9b\xd9\xc1\xf2\xa8\x7a\x60\x54\xe8\x59\x5c\xd8\xbe\xe3\x10\x45\x09\xa9\x25\x88\x63\x9f\x34\xd5\x76\x9f\xdc\x53\xaf\x1c\x72\x7e\x88\x19\x0e\x88\xdd\xb6\xde\x4c\x7a\xd1\x20\xe8\xe7\x3f\xdb\x74\x87\x71\x52\x4d\x0f\xb8\x09\x3c\x41\xd1\x50\x9e\x52\xc2\x83\xae\xf0\x10\x90\x3f\x35\x9f\x1b\x20\x68\x67\xb0\x72\x69\x64\x4e\x81\x96\x30\x04\x45\x23\x64\x38\xdd\xe4\xd9\x5e\xc6\x1f\xe7\x72\xb7\x1d\xdb\xf5\xd5\x9e\x03\x8a\xa4\xda\x93\x8a\xc0\x06\xa9\x1a\x65\xdc\x0e\xde\xe6\xd6\x9f\xf1\xe5\x70\xff\x67\x65\xf9\xf6\x6b\x74\xf2\x63\x06\x67\x71\x0f\x44\xd8\x2f\xe5\xb9\x6e\x21\x5b\x32\x1f\xca\x3c\xdc\x7d\x09\xa7\xcc\x9d\x0f\x76\x97\x10\x87\x5a\x10\xf5\xd2\xbf\x3f\xeb\x88\x84\x40\xc5\x14\x5a\x4c\xe7\xf7\x77\x77\xf9\x44\x0f\x1a\x61\xd5\x40\x55\xa4\x79\x52\xd0\xfe\xe5\x5f\xf7\xae\x02\x7e\x09\x2f\x19\xb4\x46\xc8\x20\xec\xc0\x14\x58\x05\xae\x19\xe3\xf0\xe4\xc4\xcf\x9e\xce\xd8\xc0\xfd\x88\x52\x1b\x78\xec\xd6\xe5\xfd\x54\xe5\xac\x29\x46\x8e\xc7\xe1\x77\x47\x20\x8b\xb6\x8b\x32\xf2\xf3\xd3\x76\xd4\x4c\xc7\xa8\xca\x54\x0c\xb7\x9f\x2d\xeb\xd6\xb3\x56\x38\x5a\xd5\x2d\x0e\x87\x39\x90\x19\xf6\x71\x3e\xfe\x72\x41\x13\x5b\xfc\xf1\xb7\x14\xce\xa3\x5c\xc3\xff\x69\x17\x0f\x4c\x94\x78\x35\x2c\xd2\x59\xa6\xd1\x00\xbd\xd6\x19\x55\x82\x73\x5c\x37\xac\x23\x3d\xd8\xfa\x45\x31\x26\x40\xd6\x69\x68\x09\x6d\xda\xda\xc0\x15\xbf\x22\x2a\xdf\xdf\x1e\x22\x24\x31\xae\x7a\x1a\xd9\x2f\x07\x59\xf5\xec\x22\xa7\x9f\x7e\x2b\x62\x24\x35\x0f\xe0\xa5\x88\x72\x88\x70\xf6\xd3\x56\xe6\x5d\x56\xcc\x92\x21\x92\xef\x3f\x83\x71\xa7\x12\x1e\x46\x8f\x5c\xc8\xaa\x9e\x6c\x5e\x4c\x79\x7b\x86\xce\x1d\xd9\xa0\x04\x67\xf3\x0b\x40\xc2\x2b\x3f\xeb\xfd\x32\xc0\x40\x17\xd7\x96\x14\x32\xff\xef\x21\xe9\x75\x5f\xf5\x82\x8a\x0a\x8d\x65\x83\x1b\x6d\x20\x86\x2e\xde\x0d\x8b\x89\xe9\x4f\xe9\x17\x20\x78\x92\xa6\xf6\xfa\x1a\xd8\xea\x44\x89\xd1\x24\x75\x85\x3a\x87\x08\xde\x95\x65\xc6\x01\x15\x77\x45\xbc\x54\x1c\x91\xf2\xbc\x0b\xb9\xd7\x28\x88\x82\xd3\x95\x29\x6b\xf1\xa5\xf5\x93\x9a\x97\x2f\x69\x7d\x53\x71\x7b\x4e\x33\x01\x79\xb3\x6b\xc7\x30\xbd\x44\xca\x04\x36\x73\xdb\xda\x69\xfd\xe9\xff\x91\x8f\x01\x3d\xce\x29\x43\x98\x2c\x03\x2a\x6a\x66\x0a\x44\xc3\x52\xce\x82\x5d\xc9\x48\x7e\x41\xeb\x21\x43\x5e\x69\x37\x9a\x0f\x5b\x2c\x90\x70\x38\x75\xa0\x66\xe2\x5f\x01\x0d\x80\x8a\x14\x04\xa7\xe6\xcd\xfc\x7c\x30\x77\x68\xe4\xc5\xb0\x0c\x1a\x4f\x3d\xa0\xce\x92\x6e\xbf\xdc\x03\x48\xc1\x2a\x16\xe4\x88\xe7\x55\x47\x7a\x09\xa3\xa6\x37\x00\x2e\xcf\xa4\xc2\x1f\x2c\x5a\xdf\x0b\x2f\xb2\x75\x13\xa7\xf6\x43\xf9\xf3\xf7\xab\x86\xe8\x9b\xe7\x42\x4a\x5f\x04\x40\x74\xb5\xb8\x4b\x24\x7a\x1c\x0f\x52\xa7\x8b\xfd\xd7\x90\x91\x0b\x9f\xf1\x46\xba\x8b\x22\x89\x12\x3d\xda\xa2\x31\x75\x01\x00\x19\x60\x70\xa7\x1e\x3b\x1d\xc8\x0d\x85\x1c\xaa\x3f\x52\x2c\xc3\xf4\x33\xa4\x30\x80\x18\x1a\xa8\x90\x05\x32\x44\x04\x7a\x30\x81\x26\xea\x07\x88\x4a\x1b\xbc\x8f\x5d\x5d\x7e\x1e\x37\x61\xbc\x98\xc3\x4e\x60\x62\x93\x20\x79\x35\xbb\x8d\x17\x51\xd2\x50\xd8\xd7\xf4\xa8\xdd\x54\x44\xc2\xdb\x19\x82\xde\xea\x37\xfe\x44\x92\xc0\x9b\x19\x1b\xc6\xe7\xc5\x41\x24\x6a\xd1\x15\x81\x70\x04\xb7\x48\x51\x4b\x08\x5f\x76\x48\xd9\xd9\x7f\xe4\xe5\x42\xe5\x5a\x6c\xf6\x8c\x70\x9f\x59\xa5\xa7\x7e\x92\x59\xfe\x55\x57\x07\xa9\xf9\xb2\xea\xf0\x2b\xf1\xa7\x19\x85\x5e\x31\xd7\x91\xa1\x70\x7b\xa9\x0f\x4f\x1b\x8b\x8b\xfd\x37\xca\x60\xe1\x9d\x8a\xc2\xdc\x05\x1d\xf8\x3f\x1b\x02\x4f\xab\x85\x06\x63\x9b\xf0\x0b\x85\xe0\x3b\x93\xed\xb2\xe6\x6b\xce\x7d\x25\x06\x17\x64\x7a\x08\x83\xce\x60\xd0\xdb\x1d\xd9\xeb\xf0\x4f\x05\x87\xd2\x54\xcb\x54\xeb\x93\xf2\xe7\xae\x4c\x75\xd9\x03\x7e\xa8\x28\xb9\x5c\xe2\x7d\x97\xf2\xdd\x47\xc9\xd3\x7e\x04\xc6\x1e\xe7\xa0\x80\x94\xe5\xc0\x7f\xd5\x52\xf1\x36\x37\x0d\xd3\x38\xce\x1b\x89\x1e\x40\x9e\x28\x78\x06\xed\x2b\xc2\x27\x3c\x54\xf0\x2e\x93\x50\x7e\x23\x61\xda\xf6\x53\x77\x7f\xe2\xdf\x22\xeb\x99\x4b\x53\xd9\x82\xac\x2d\xb4\x3f\x15\xc1\x69\x3e\x5a\x06\x17\xba\x45\x83\x40\x12\xf0\xf8\x6a\xea\x0b\x57\x98\x4a\x57\x59\xd5\x31\x35\x42\x9b\xca\x99\x98\x5c\xa9\xea\xe2\xe0\x17\x4c\x58\x11\x02\x66\xf2\xb8\x5b\x5c\x32\xa8\x23\x6c\x9b\x11\x2d\x34\xdb\x47\x12\x8f\x9e\x14\x74\xf6\x66\x1b\xff\x12\x6f\x16\x4e\xcb\x8d\x6a\x01\xe2\xd5\x9a\x56\x72\x8f\x16\xf5\x10\x98\x3b\x7e\x38\xfe\xb1\x9e\x5b\x41\x50\xaa\x6a\xd3\x4e\x46\xa5\x9b\x63\x17\x62\x7e\x60\x5b\x05\x2e\x43\x06\x26\x83\x88\x20\xfd\x6a\x9b\x0b\x2f\x9b\xce\x80\x57\xbd\xac\xd5\x0e\x81\x07\xe2\x4d\xba\xbc\xa7\xf8\x2f\xa7\xe2\x4f\x9a\xb1\x90\xb1\x8a\x4f\x16\xd5\x76\x5d\xd8\x73\x8c\x91\x34\x06\xfa\xeb\x1b\x7c\x05\x43\xe2\x3a\x45\xce\xb6\xa5\x35\xd9\x55\xaf\x1b\x8e\x44\x2e\x8c\xc0\x22\xe0\x72\xb1\x1d\x12\x6a\x52\xac\x69\x7f\xd0\x32\xd1\x9e\x69\x61\x0e\x04\xfd\x76\x17\x66\x80\x0b\x62\xe1\xea\xab\x7f\x68\x06\x8f\x59\x28\xec\x33\x0e\x1e\xf2\xd2\x28\x35\xdb\x77\xbe\x9b\xcd\x43\xeb\xca\xe8\x9d\xf1\xf9\xdf\xdf\x7a\xef\x17\x6e\xa4\xa8\x64\xfd\x25\x1a\xa0\xba\x46\x5c\xb2\x24\x76\x12\xd5\x7a\xef\x49\x84\xde\xe1\x37\xc0\x85\x3f\x41\xdf\xea\xbb\xf3\x07\x64\x86\x89\xd9\xd9\x40\x94\xa2\x53\x85\xaf\xa8\xa8\x7c\x5a\x2e\x63\xbd\xbe\xa9\x52\x9c\x7c\xa4\x8e\xce\xcc\xf0\xaf\x45\x8f\x36\x81\xda\xd1\xa3\x65\x85\x7b\xf3\xa4\x7d\x74\xed\xc2\x0a\xed\x44\x7d\x60\xa5\xbf\xc4\x5f\x75\x9b\x2c\xe2\xe8\x05\x0a\xe4\x51\xcf\x72\x90\xfb\xc2\x39\xa9\xc3\x2b\xe1\x73\xa3\x4f\x7a\x30\xd5\x41\x8d\xbd\xd0\xcd\xfa\x20\xc1\xb6\xa3\x30\xca\x57\x06\x73\xe9\x26\x74\x58\xdc\x40\x7c\x53\xed\xcb\x3e\x7b\x05\xae\x0a\xfb\x29\xfc\x99\x74\xca\x3c\xe1\xac\x7c\x17\xd5\x56\xc1\x9f\x93\x1c\xcd\xb9\xbb\x5b\x3d\x28\xbf\xe0\xf8\x5d\x88\xbe\x0f\x17\x05\xa5\x40\x12\xeb\x6a\x05\x54\xa2\xc0\xfc\x50\x4a\x10\xda\x20\x0f\xb5\x17\x23\x53\x03\x5e\x13\x12\xf2\xe3\x15\x3c\xb2\x80\x91\xc9\x78\xbc\x89\x05\xc8\xae\x08\xf8\xb4\x38\xac\x4f\x81\x8b\x96\x21\x54\xef\xf0\xfb\x12\x9e\x29\xf6\xc4\x55\x85\x08\xbc\xdc\xae\x5a\xff\x24\x0c\xcc\x90\x52\x10\xc2\x46\x2a\x70\x8a\x10\x91\xdb\xb1\x9d\x61\x52\x8e\xbe\x9f\xee\x7d\x39\x76\x15\x89\xcf\xbe\xc6\x23\xd6\x72\x03\x41\xff\x9b\x31\x40\xbf\xd3\x60\x9d\x8b\xf3\xcf\x87\xcc\x80\xf9\x19\xe2\x06\x70\xe5\xa6\xa8\xb0\x87\xef\x22\x3f\x09\x71\x40\x12\x6f\x80\x13\xd0\xfc\x38\x93\x58\xea\x1c\xb1\x44\xb2\x94\xd5\xa5\x2e\xc6\xea\x1e\x24\x20\x25\x86\x80\x8e\x32\xd1\xd1\xd6\x9f\xdb\xb5\xea\xc5\xa4\x93\xcc\x23\xac\x79\x4b\xab\x6e\x64\xa8\xca\x7a\x94\x30\x75\xc4\xe2\xe3\x12\x4f\xb3\x0f\xa9\x9c\xca\x86\x8b\xf7\xfe\x39\xf6\xa2\xf5\xab\x7a\x43\x46\x82\x42\x17\x71\xbe\x0c\xaf\x2f\x0e\xd5\xb3\x15\x1d\xd4\x76\x4a\x7c\x78\xaf\xfb\x7f\xb3\x7c\x00\x8f\x79\x9d\x50\xba\xf7\x48\xf0\x0e\x89\x3e\x6e\x79\x95\x1c\xe2\xca\xaa\x7d\x9c\x11\x4d\xff\xe2\xf1\x7d\xe5\xce\x89\xa1\xc9\x4d\x8c\x07\x4f\xf4\xea\x65\x4e\x18\x14\xcf\xd3\x69\x25\x38\xe3\xf4\x54\xb5\x6a\xf0\xf9\x2a\x48\x68\x37\xcd\xe3\x01\xdf\xa3\xcd\xbf\xac\xe7\xd3\x2a\x80\x52\xb1\x2f\xf1\xa8\x08\x6f\xc8\xf9\x93\xc8\xc0\x51\x4a\x58\x10\x1e\xde\xd6\x74\xf9\x29\xe5\x2a\x3f\xb2\x36\xe4\xf0\x1f\xe3\xaa\xa0\x59\xae\xfe\xd2\x06\xf4\xc1\x70\x79\xa0\x57\x91\x3b\xbd\x24\x60\xe5\xd2\x76\x84\x0a\xae\x60\x97\x7d\xb2\x26\x51\x0d\x94\x62\x61\x33\x70\x13\xb8\x06\xa4\x0e\xc5\xad\x10\x34\xb3\x57\x2a\x13\x4f\xd8\x50\x1b\x91\x28', 2)
| 11,279
| 11,279
| 0.749978
| 2,815
| 11,279
| 3.00071
| 0.092718
| 0.013496
| 0.013851
| 0.011365
| 0.004617
| 0.002841
| 0.002841
| 0
| 0
| 0
| 0
| 0.312966
| 0.000266
| 11,279
| 1
| 11,279
| 11,279
| 0.436148
| 0
| 0
| 0
| 0
| 1
| 0.996454
| 0.996454
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
9b4cf6e7d552d82240d01bec97b5959a37a1a6f3
| 6,177
|
py
|
Python
|
pyMKL/Tests/test_Pardiso.py
|
jcapriot/pyMKL
|
4b960585903bc1504dec2e37aa09d67849986322
|
[
"MIT"
] | 10
|
2016-05-18T09:33:39.000Z
|
2021-03-13T07:10:46.000Z
|
pyMKL/Tests/test_Pardiso.py
|
jcapriot/pyMKL
|
4b960585903bc1504dec2e37aa09d67849986322
|
[
"MIT"
] | 10
|
2016-04-29T16:07:21.000Z
|
2022-01-02T19:15:06.000Z
|
pyMKL/Tests/test_Pardiso.py
|
jcapriot/pyMKL
|
4b960585903bc1504dec2e37aa09d67849986322
|
[
"MIT"
] | 16
|
2016-04-29T14:14:38.000Z
|
2022-01-04T11:52:56.000Z
|
from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import unittest
import numpy as np
import scipy.sparse as sp
from pyMKL import pardisoSolver
nSize = 100
class TestPardiso_oneRHS(unittest.TestCase):
def test_RealNonSym(self):
nSize = 100
A = sp.rand(nSize, nSize, 0.05, format='csr', random_state=100)
A = A + sp.spdiags(np.ones(nSize), 0, nSize, nSize)
A = A.tocsr()
np.random.seed(1)
xTrue = np.random.rand(nSize)
rhs = A.dot(xTrue)
pSolve = pardisoSolver(A, mtype=11)
pSolve.run_pardiso(12)
x = pSolve.run_pardiso(33, rhs)
pSolve.clear()
self.assertLess(np.linalg.norm(x-xTrue)/np.linalg.norm(xTrue), 1e-12)
def test_RealSPD(self):
A = sp.rand(nSize, nSize, 0.05, format='csr', random_state=100)
A = A.T.dot(A) + sp.spdiags(np.ones(nSize), 0, nSize, nSize)
A = A.tocsr()
np.random.seed(1)
xTrue = np.random.rand(nSize)
rhs = A.dot(xTrue)
pSolve = pardisoSolver(A, mtype=2)
pSolve.run_pardiso(12)
x = pSolve.run_pardiso(33, rhs)
pSolve.clear()
self.assertLess(np.linalg.norm(x-xTrue)/np.linalg.norm(xTrue), 1e-12)
def test_RealPosInd(self):
A = sp.rand(nSize, nSize, 0.05, format='csr', random_state=100)
d = np.ones(nSize)
d[nSize//2:] = -1.
A = A.T.dot(A) + sp.spdiags(d, 0, nSize, nSize)
A = A.tocsr()
np.random.seed(1)
xTrue = np.random.rand(nSize)
rhs = A.dot(xTrue)
pSolve = pardisoSolver(A, mtype=-2)
pSolve.run_pardiso(12)
x = pSolve.run_pardiso(33, rhs)
pSolve.clear()
self.assertLess(np.linalg.norm(x-xTrue)/np.linalg.norm(xTrue), 1e-12)
def test_ComplexNonSym(self):
nSize = 100
A = sp.rand(nSize, nSize, 0.05, format='csr', random_state=100)
A = A + sp.spdiags(np.ones(nSize), 0, nSize, nSize)
A.data = A.data + 1j*np.random.rand(A.nnz)
A = A.tocsr()
np.random.seed(1)
xTrue = np.random.rand(nSize) + 1j*np.random.rand(nSize)
rhs = A.dot(xTrue)
pSolve = pardisoSolver(A, mtype=13)
pSolve.run_pardiso(12)
x = pSolve.run_pardiso(33, rhs)
pSolve.clear()
self.assertLess(np.linalg.norm(x-xTrue)/np.linalg.norm(xTrue), 1e-12)
def test_ComplexNonSym_RealRHS(self):
nSize = 100
A = sp.rand(nSize, nSize, 0.05, format='csr', random_state=100)
A = A + sp.spdiags(np.ones(nSize), 0, nSize, nSize)
A.data = A.data + 1j*np.random.rand(A.nnz)
A = A.tocsr()
np.random.seed(1)
rhs = np.random.rand(nSize)
pSolve = pardisoSolver(A, mtype=13)
pSolve.run_pardiso(12)
x = pSolve.run_pardiso(33, rhs)
pSolve.clear()
self.assertLess(np.linalg.norm(A.dot(x)-rhs), 1e-12)
def test_ComplexSym(self):
nSize = 100
A = sp.rand(nSize, nSize, 0.05, format='csr', random_state=100)
A.data = A.data + 1j*np.random.rand(A.nnz)
A = A.T.dot(A) + sp.spdiags(np.ones(nSize), 0, nSize, nSize)
A = A.tocsr()
np.random.seed(1)
xTrue = np.random.rand(nSize) + 1j*np.random.rand(nSize)
rhs = A.dot(xTrue)
pSolve = pardisoSolver(A, mtype=6)
pSolve.run_pardiso(12)
x = pSolve.run_pardiso(33, rhs)
pSolve.clear()
self.assertLess(np.linalg.norm(x-xTrue)/np.linalg.norm(xTrue), 1e-12)
def test_ComplexHerm(self):
nSize = 100
A = sp.rand(nSize, nSize, 0.05, format='csr', random_state=100)
A.data = A.data + 1j*np.random.rand(A.nnz)
A = A.T.dot(A.conj()) + sp.spdiags(np.ones(nSize), 0, nSize, nSize)
A = A.tocsr()
np.random.seed(1)
xTrue = np.random.rand(nSize) + 1j*np.random.rand(nSize)
rhs = A.dot(xTrue)
pSolve = pardisoSolver(A, mtype=4)
pSolve.run_pardiso(12)
x = pSolve.run_pardiso(33, rhs)
pSolve.clear()
self.assertLess(np.linalg.norm(x-xTrue)/np.linalg.norm(xTrue), 1e-12)
class TestPardiso_multipleRHS(unittest.TestCase):
nRHS = 20
def test_RealNonSym(self):
nSize = 100
A = sp.rand(nSize, nSize, 0.05, format='csr', random_state=100)
A = A + sp.spdiags(np.ones(nSize), 0, nSize, nSize)
A = A.tocsr()
np.random.seed(1)
xTrue = np.random.rand(nSize,self.nRHS)
rhs = A.dot(xTrue)
pSolve = pardisoSolver(A, mtype=11)
pSolve.run_pardiso(12)
x = pSolve.run_pardiso(33, rhs)
pSolve.clear()
for i in range(self.nRHS):
self.assertLess(np.linalg.norm(x[:,i]-xTrue[:,i])/np.linalg.norm(xTrue[:,i]), 1e-12)
def test_ComplexHerm(self):
nSize = 100
A = sp.rand(nSize, nSize, 0.05, format='csr', random_state=100)
A.data = A.data + 1j*np.random.rand(A.nnz)
A = A.T.dot(A.conj()) + sp.spdiags(np.ones(nSize), 0, nSize, nSize)
A = A.tocsr()
np.random.seed(1)
xTrue = np.random.rand(nSize, self.nRHS) + 1j*np.random.rand(nSize, self.nRHS)
rhs = A.dot(xTrue)
pSolve = pardisoSolver(A, mtype=4)
pSolve.run_pardiso(12)
x = pSolve.run_pardiso(33, rhs)
pSolve.clear()
for i in range(self.nRHS):
self.assertLess(np.linalg.norm(x[:,i]-xTrue[:,i])/np.linalg.norm(xTrue[:,i]), 1e-12)
def test_FactorSolve(self):
nSize = 100
A = sp.rand(nSize, nSize, 0.05, format='csr', random_state=100)
A = A + sp.spdiags(np.ones(nSize), 0, nSize, nSize)
A = A.tocsr()
np.random.seed(1)
xTrue = np.random.rand(nSize)
rhs = A.dot(xTrue)
pSolve = pardisoSolver(A, mtype=11)
pSolve.factor()
x = pSolve.solve(rhs)
pSolve.clear()
self.assertLess(np.linalg.norm(x-xTrue)/np.linalg.norm(xTrue), 1e-12)
if __name__ == '__main__':
unittest.main()
| 29.84058
| 96
| 0.585559
| 906
| 6,177
| 3.913907
| 0.099338
| 0.065426
| 0.064298
| 0.067118
| 0.86097
| 0.860406
| 0.860406
| 0.855894
| 0.855894
| 0.855894
| 0
| 0.044293
| 0.265339
| 6,177
| 206
| 97
| 29.985437
| 0.737109
| 0
| 0
| 0.782895
| 0
| 0
| 0.006152
| 0
| 0
| 0
| 0
| 0
| 0.065789
| 1
| 0.065789
| false
| 0
| 0.059211
| 0
| 0.144737
| 0.006579
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9b6385ca995dd8857905b28eaf267b1e2fcdba82
| 157
|
py
|
Python
|
glosysnet/nn/__init__.py
|
NareshAtnPLUS/glosysnet
|
e85df44727b8784766be7e728267e5699997e226
|
[
"MIT"
] | null | null | null |
glosysnet/nn/__init__.py
|
NareshAtnPLUS/glosysnet
|
e85df44727b8784766be7e728267e5699997e226
|
[
"MIT"
] | null | null | null |
glosysnet/nn/__init__.py
|
NareshAtnPLUS/glosysnet
|
e85df44727b8784766be7e728267e5699997e226
|
[
"MIT"
] | null | null | null |
from glosysnet.nn.activate import *
from glosysnet.nn.optimizations import *
from glosysnet.nn.loss_functions import *
from glosysnet.nn.classifier import *
| 31.4
| 41
| 0.821656
| 21
| 157
| 6.095238
| 0.428571
| 0.40625
| 0.46875
| 0.492188
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101911
| 157
| 4
| 42
| 39.25
| 0.907801
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
32daa01952c5df330e1705543790b4ca58387b6e
| 3,041
|
py
|
Python
|
tests/tests_decay_functions.py
|
lmbringas/NeuralMap
|
0a8787e79f3985bb188b1b041e3ec7e558c4a742
|
[
"MIT"
] | 4
|
2020-10-02T11:46:48.000Z
|
2021-05-14T18:20:41.000Z
|
tests/tests_decay_functions.py
|
lmbringas/NeuralMap
|
0a8787e79f3985bb188b1b041e3ec7e558c4a742
|
[
"MIT"
] | 1
|
2021-06-21T18:46:39.000Z
|
2021-06-21T18:46:39.000Z
|
tests/tests_decay_functions.py
|
lmbringas/NeuralMap
|
0a8787e79f3985bb188b1b041e3ec7e558c4a742
|
[
"MIT"
] | 1
|
2021-06-13T19:58:19.000Z
|
2021-06-13T19:58:19.000Z
|
import unittest
from ..neural_map import linear, exponential, rational, no_decay
TOLERANCE = 1e-8
class LinearTestCase(unittest.TestCase):
def setUp(self):
self.tested_function = linear
def test_initial_value(self):
self.assertEqual(self.tested_function(2., 1., 10., 0.), 2., 'wrong first values')
def test_final_value(self):
self.assertEqual(self.tested_function(2., 1., 10., 9.), 1., 'wrong last values')
def test_variations(self):
first_epoch = self.tested_function(2., 1., 10., 0.)
second_epoch = self.tested_function(2., 1., 10., 1.)
third_epoch = self.tested_function(2., 1., 10., 2.)
delta = abs((second_epoch - first_epoch) - (third_epoch - second_epoch))
self.assertLessEqual(delta, TOLERANCE, 'wrong incremental values')
class ExponentialTestCase(unittest.TestCase):
def setUp(self):
self.tested_function = exponential
def test_initial_value(self):
self.assertEqual(self.tested_function(2., 1., 10., 0.), 2., 'wrong first values')
def test_final_value(self):
self.assertEqual(self.tested_function(2., 1., 10., 9.), 1., 'wrong last values')
def test_variations(self):
first_epoch = self.tested_function(2., 1., 10., 0.)
second_epoch = self.tested_function(2., 1., 10., 1.)
third_epoch = self.tested_function(2., 1., 10., 2.)
growth = abs((second_epoch / first_epoch) - (third_epoch / second_epoch))
self.assertLessEqual(growth, TOLERANCE, 'wrong incremental values')
class RationalTestCase(unittest.TestCase):
def setUp(self):
self.tested_function = rational
def test_initial_value(self):
self.assertEqual(self.tested_function(2., 1., 10., 0.), 2., 'wrong first values')
def test_final_value(self):
self.assertEqual(self.tested_function(2., 1., 10., 9.), 1., 'wrong last values')
def test_variations(self):
first_epoch = self.tested_function(2., 1., 10., 0.)
second_epoch = self.tested_function(2., 1., 10., 1.)
third_epoch = self.tested_function(2., 1., 10., 2.)
ratio = abs((1. / second_epoch - 1. / first_epoch) - (1. / third_epoch - 1. / second_epoch))
self.assertLessEqual(ratio, TOLERANCE, 'wrong incremental values')
class NoDecayTestCase(unittest.TestCase):
def setUp(self):
self.tested_function = no_decay
def test_initial_value(self):
self.assertEqual(self.tested_function(2., 1., 10., 0.), 2., 'wrong first values')
def test_final_value(self):
# even if the first and final values are not the same, this function should always return
# the first value
self.assertEqual(self.tested_function(2., 1., 10., 9.), 2., 'wrong last values')
def test_variations(self):
first_epoch = self.tested_function(2., 1., 10., 0.)
second_epoch = self.tested_function(2., 1., 10., 1.)
delta = abs(second_epoch - first_epoch)
self.assertLessEqual(delta, TOLERANCE, 'wrong incremental values')
| 38.987179
| 100
| 0.657678
| 405
| 3,041
| 4.762963
| 0.145679
| 0.119233
| 0.214619
| 0.187144
| 0.817004
| 0.777087
| 0.762053
| 0.762053
| 0.6169
| 0.597201
| 0
| 0.045097
| 0.205196
| 3,041
| 77
| 101
| 39.493506
| 0.753
| 0.03387
| 0
| 0.666667
| 0
| 0
| 0.080409
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 1
| 0.296296
| false
| 0
| 0.037037
| 0
| 0.407407
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fd3f971717d3cd5d68d6385db9b809ee7a6091fb
| 7,040
|
py
|
Python
|
rollyourown/seo/migrations/0002_auto__chg_field_basicmetadatamodelinstance_description__chg_field_basi.py
|
winzard/django-seo
|
3b89043f5659e902d88a678b49dc835cc26fae3b
|
[
"BSD-3-Clause"
] | null | null | null |
rollyourown/seo/migrations/0002_auto__chg_field_basicmetadatamodelinstance_description__chg_field_basi.py
|
winzard/django-seo
|
3b89043f5659e902d88a678b49dc835cc26fae3b
|
[
"BSD-3-Clause"
] | null | null | null |
rollyourown/seo/migrations/0002_auto__chg_field_basicmetadatamodelinstance_description__chg_field_basi.py
|
winzard/django-seo
|
3b89043f5659e902d88a678b49dc835cc26fae3b
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'BasicMetadataModelInstance.description'
db.alter_column(u'seo_basicmetadatamodelinstance', 'description', self.gf('django.db.models.fields.CharField')(max_length=305))
# Changing field 'BasicMetadataModel.description'
db.alter_column(u'seo_basicmetadatamodel', 'description', self.gf('django.db.models.fields.CharField')(max_length=305))
# Changing field 'BasicMetadataPath.description'
db.alter_column(u'seo_basicmetadatapath', 'description', self.gf('django.db.models.fields.CharField')(max_length=305))
# Changing field 'BasicMetadataView.description'
db.alter_column(u'seo_basicmetadataview', 'description', self.gf('django.db.models.fields.CharField')(max_length=305))
def backwards(self, orm):
# Changing field 'BasicMetadataModelInstance.description'
db.alter_column(u'seo_basicmetadatamodelinstance', 'description', self.gf('django.db.models.fields.CharField')(max_length=255))
# Changing field 'BasicMetadataModel.description'
db.alter_column(u'seo_basicmetadatamodel', 'description', self.gf('django.db.models.fields.CharField')(max_length=255))
# Changing field 'BasicMetadataPath.description'
db.alter_column(u'seo_basicmetadatapath', 'description', self.gf('django.db.models.fields.CharField')(max_length=255))
# Changing field 'BasicMetadataView.description'
db.alter_column(u'seo_basicmetadataview', 'description', self.gf('django.db.models.fields.CharField')(max_length=255))
models = {
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'seo.basicmetadatamodel': {
'Meta': {'unique_together': "(('_content_type',),)", 'object_name': 'BasicMetadataModel'},
'_content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '305', 'blank': 'True'}),
'heading': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '511', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keywords': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '511', 'blank': 'True'}),
'og_description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '511', 'blank': 'True'}),
'og_title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '511', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'})
},
u'seo.basicmetadatamodelinstance': {
'Meta': {'unique_together': "(('_path',), ('_content_type', '_object_id'))", 'object_name': 'BasicMetadataModelInstance'},
'_content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'_object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'_path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '305', 'blank': 'True'}),
'heading': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '511', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keywords': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '511', 'blank': 'True'}),
'og_description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '511', 'blank': 'True'}),
'og_title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '511', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'})
},
u'seo.basicmetadatapath': {
'Meta': {'unique_together': "(('_path',),)", 'object_name': 'BasicMetadataPath'},
'_path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '305', 'blank': 'True'}),
'heading': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '511', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keywords': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '511', 'blank': 'True'}),
'og_description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '511', 'blank': 'True'}),
'og_title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '511', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'})
},
u'seo.basicmetadataview': {
'Meta': {'unique_together': "(('_view',),)", 'object_name': 'BasicMetadataView'},
'_view': ('django.db.models.fields.CharField', [], {'default': "''", 'unique': 'True', 'max_length': '255', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '305', 'blank': 'True'}),
'heading': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '511', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keywords': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '511', 'blank': 'True'}),
'og_description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '511', 'blank': 'True'}),
'og_title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '511', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'})
}
}
complete_apps = ['seo']
| 74.893617
| 161
| 0.594318
| 716
| 7,040
| 5.701117
| 0.103352
| 0.092112
| 0.157766
| 0.22538
| 0.83513
| 0.822146
| 0.813327
| 0.813327
| 0.803283
| 0.78295
| 0
| 0.019839
| 0.16946
| 7,040
| 94
| 162
| 74.893617
| 0.678297
| 0.059233
| 0
| 0.464789
| 0
| 0
| 0.551557
| 0.299819
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028169
| false
| 0
| 0.056338
| 0
| 0.126761
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b5d03d1051ad8694b9bdb4eb63c3fc63fc36d3f3
| 458
|
py
|
Python
|
temboo/core/Library/Disqus/OAuth/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 7
|
2016-03-07T02:07:21.000Z
|
2022-01-21T02:22:41.000Z
|
temboo/core/Library/Disqus/OAuth/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | null | null | null |
temboo/core/Library/Disqus/OAuth/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 8
|
2016-06-14T06:01:11.000Z
|
2020-04-22T09:21:44.000Z
|
from temboo.Library.Disqus.OAuth.FinalizeOAuth import FinalizeOAuth, FinalizeOAuthInputSet, FinalizeOAuthResultSet, FinalizeOAuthChoreographyExecution
from temboo.Library.Disqus.OAuth.InitializeOAuth import InitializeOAuth, InitializeOAuthInputSet, InitializeOAuthResultSet, InitializeOAuthChoreographyExecution
from temboo.Library.Disqus.OAuth.RefreshToken import RefreshToken, RefreshTokenInputSet, RefreshTokenResultSet, RefreshTokenChoreographyExecution
| 114.5
| 160
| 0.908297
| 33
| 458
| 12.606061
| 0.545455
| 0.072115
| 0.122596
| 0.165865
| 0.201923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045852
| 458
| 3
| 161
| 152.666667
| 0.951945
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bd01c18a54a25674523a21fb41e1a1cf263ec68c
| 2,797
|
py
|
Python
|
scripts/script_helpers.py
|
taodav/balloon-learning-environment
|
4beb45f52ab7127a4c051df25894738420e8e691
|
[
"Apache-2.0"
] | null | null | null |
scripts/script_helpers.py
|
taodav/balloon-learning-environment
|
4beb45f52ab7127a4c051df25894738420e8e691
|
[
"Apache-2.0"
] | null | null | null |
scripts/script_helpers.py
|
taodav/balloon-learning-environment
|
4beb45f52ab7127a4c051df25894738420e8e691
|
[
"Apache-2.0"
] | null | null | null |
from pathlib import Path
from itertools import product
from typing import List
from definitions import ROOT_DIR
def generate_runs(run_dict: dict, runs_dir: Path, runs_fname: str = 'runs.txt',
run_command: str = '-m balloon_learning_environment.train',
data_dir: Path = Path(ROOT_DIR) / 'data',
sub_dir_keys: List[str] = []) -> List[str]:
"""
:param runs_dir: Directory to put the runs
:param runs_fname: What do we call our run file?
:param main_fname: what is our python entry script?
:return:
"""
runs_path = runs_dir / runs_fname
if runs_path.is_file():
runs_path.unlink()
f = open(runs_path, 'a+')
keys, values = [], []
for k, v in run_dict.items():
keys.append(k)
values.append(v)
num_runs = 0
for i, args in enumerate(product(*values)):
arg = {k: v for k, v in zip(keys, args)}
base_dir = data_dir
for dir_key in sub_dir_keys:
base_dir /= f"{dir_key}_{arg[dir_key]}"
run_string = f"python {run_command}"
for k, v in arg.items():
if v is True:
run_string += f" --{k}"
elif v is False or v is None:
continue
else:
run_string += f" --{k}={v}"
run_string += f" --base_dir={base_dir}"
run_string += "\n"
f.write(run_string)
num_runs += 1
print(num_runs, run_string)
f.close()
def generate_eval_runs(run_dict: dict, runs_dir: Path, runs_fname: str = 'runs.txt',
run_command: str = '-m balloon_learning_environment.eval.eval_checkpoints',
data_dir: Path = Path(ROOT_DIR) / 'data',
sub_dir_keys: List[str] = []) -> List[str]:
"""
:param runs_dir: Directory to put the runs
:param runs_fname: What do we call our run file?
:param main_fname: what is our python entry script?
:return:
"""
runs_path = runs_dir / runs_fname
if runs_path.is_file():
runs_path.unlink()
f = open(runs_path, 'a+')
keys, values = [], []
for k, v in run_dict.items():
keys.append(k)
values.append(v)
num_runs = 0
for i, args in enumerate(product(*values)):
arg = {k: v for k, v in zip(keys, args)}
base_dir = data_dir
for dir_key in sub_dir_keys:
base_dir /= f"{dir_key}_{arg[dir_key]}"
base_dir /= arg['agent']
# base_dir /= str(arg['run_number'])
run_string = f"python {run_command}"
for k, v in arg.items():
if v is True:
run_string += f" --{k}"
elif v is False or v is None:
continue
else:
run_string += f" --{k}={v}"
# TODO: refactor this
run_string += f" --output_dir={base_dir}"
run_string += f" --checkpoint_dir={base_dir}\n"
f.write(run_string)
num_runs += 1
print(num_runs, run_string)
f.close()
| 24.973214
| 98
| 0.601001
| 430
| 2,797
| 3.686047
| 0.204651
| 0.079495
| 0.069401
| 0.026498
| 0.842902
| 0.818927
| 0.818927
| 0.818927
| 0.818927
| 0.818927
| 0
| 0.001951
| 0.267072
| 2,797
| 111
| 99
| 25.198198
| 0.77122
| 0.129067
| 0
| 0.811594
| 1
| 0
| 0.13375
| 0.085417
| 0
| 0
| 0
| 0.009009
| 0
| 1
| 0.028986
| false
| 0
| 0.057971
| 0
| 0.086957
| 0.028986
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
bd19b648f91d50d46ca1cd822aa7dd5720929043
| 1,620
|
py
|
Python
|
src/heos_remote/commands.py
|
jrderuiter/heos-remote
|
a25b2d3975cf5365e49cb6c6788102838bc84c9e
|
[
"MIT"
] | null | null | null |
src/heos_remote/commands.py
|
jrderuiter/heos-remote
|
a25b2d3975cf5365e49cb6c6788102838bc84c9e
|
[
"MIT"
] | null | null | null |
src/heos_remote/commands.py
|
jrderuiter/heos-remote
|
a25b2d3975cf5365e49cb6c6788102838bc84c9e
|
[
"MIT"
] | null | null | null |
class Command:
def __call__(self):
return self.run()
def run(self):
raise NotImplementedError()
def __str__(self):
return f"{self.__class__.__name__}()"
class IncreaseVolume(Command):
def __init__(self, player_or_group, step_size=2):
self.player_or_group = player_or_group
self.step_size = step_size
def run(self):
self.player_or_group.volume += self.step_size
def __str__(self):
return f"{self.__class__.__name__}(step_size={self.step_size})"
class DecreaseVolume(Command):
def __init__(self, player_or_group, step_size=2):
self.player_or_group = player_or_group
self.step_size = step_size
def run(self):
self.player_or_group.volume -= self.step_size
def __str__(self):
return f"{self.__class__.__name__}(step_size={self.step_size})"
class ToggleMute(Command):
def __init__(self, player_or_group):
self.player_or_group = player_or_group
def run(self):
self.player_or_group.mute = not self.player_or_group.mute
class TogglePlay(Command):
def __init__(self, player_or_group):
self.player_or_group = player_or_group
def run(self):
self.player_or_group.toggle_play()
class PlayNext(Command):
def __init__(self, player_or_group):
self.player_or_group = player_or_group
def run(self):
self.player_or_group.play_next()
class PlayPrevious(Command):
def __init__(self, player_or_group):
self.player_or_group = player_or_group
def run(self):
self.player_or_group.play_previous()
| 21.891892
| 71
| 0.683333
| 222
| 1,620
| 4.405405
| 0.144144
| 0.204499
| 0.332311
| 0.330266
| 0.812883
| 0.787321
| 0.787321
| 0.787321
| 0.756646
| 0.756646
| 0
| 0.001579
| 0.217901
| 1,620
| 73
| 72
| 22.191781
| 0.770324
| 0
| 0
| 0.604651
| 0
| 0
| 0.082149
| 0.082149
| 0
| 0
| 0
| 0
| 0
| 1
| 0.395349
| false
| 0
| 0
| 0.093023
| 0.651163
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 11
|
1f9bb36c6ba45c57c587dafa0b306ca06e93ca65
| 138
|
py
|
Python
|
app/utils/print.py
|
hsnuonly/SuperPanda
|
5c590f625110f712caf046a6ca40bcc93173a005
|
[
"MIT"
] | 3
|
2020-12-18T21:58:55.000Z
|
2020-12-26T09:21:14.000Z
|
app/utils/print.py
|
hsnuonly/SuperPanda
|
5c590f625110f712caf046a6ca40bcc93173a005
|
[
"MIT"
] | null | null | null |
app/utils/print.py
|
hsnuonly/SuperPanda
|
5c590f625110f712caf046a6ca40bcc93173a005
|
[
"MIT"
] | 1
|
2021-05-29T15:53:46.000Z
|
2021-05-29T15:53:46.000Z
|
import json
from .json import default
def jdump(x, default=default, indent=2):
print(json.dumps(x, default=default, indent=indent))
| 19.714286
| 56
| 0.73913
| 21
| 138
| 4.857143
| 0.52381
| 0.156863
| 0.294118
| 0.411765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008403
| 0.137681
| 138
| 6
| 57
| 23
| 0.84874
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0
| 0.75
| 0.25
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1fccdaa4f1fdfa8befc3cb938cca4e0fad9d8e33
| 3,401
|
py
|
Python
|
ktapp/sqls.py
|
cu2/KT
|
8a0964b77dce150358637faa679d969a07e42f07
|
[
"CC-BY-3.0"
] | 5
|
2015-04-13T09:44:31.000Z
|
2017-10-19T01:07:58.000Z
|
ktapp/sqls.py
|
cu2/KT
|
8a0964b77dce150358637faa679d969a07e42f07
|
[
"CC-BY-3.0"
] | 49
|
2015-02-15T07:12:05.000Z
|
2022-03-11T23:11:43.000Z
|
ktapp/sqls.py
|
cu2/KT
|
8a0964b77dce150358637faa679d969a07e42f07
|
[
"CC-BY-3.0"
] | null | null | null |
SIMILARITY = '''
SELECT
uur.number_of_ratings,
uur.similarity AS sim
FROM ktapp_useruserrating uur
WHERE uur.user_1_id = %s AND uur.user_2_id = %s AND uur.keyword_id IS NULL
'''
SIMILARITY_PER_GENRE = '''
SELECT
uur.number_of_ratings,
uur.similarity AS sim,
k.id, k.name, k.slug_cache
FROM ktapp_useruserrating uur
INNER JOIN ktapp_keyword k ON k.id = uur.keyword_id
WHERE uur.user_1_id = %s AND uur.user_2_id = %s
ORDER BY sim DESC
'''
SIMILAR_USERS = '''
SELECT
uur.number_of_ratings,
uur.similarity AS sim,
u.id, u.username, u.slug_cache
FROM ktapp_useruserrating uur
INNER JOIN ktapp_ktuser u ON u.id = uur.user_2_id
WHERE uur.user_1_id = %s AND uur.user_2_id != %s
AND uur.number_of_ratings >= %s
AND uur.keyword_id IS NULL
ORDER BY sim DESC, uur.number_of_ratings DESC, u.username, u.id
'''
SIMILAR_USERS_PER_GENRE = '''
SELECT
uur.number_of_ratings,
uur.similarity AS sim,
u.id, u.username, u.slug_cache
FROM ktapp_useruserrating uur
INNER JOIN ktapp_ktuser u ON u.id = uur.user_2_id
WHERE uur.user_1_id = %s AND uur.user_2_id != %s
AND uur.number_of_ratings >= %s
AND uur.keyword_id = %s
ORDER BY sim DESC, uur.number_of_ratings DESC, u.username, u.id
'''
RECOMMENDED_FILMS = '''
SELECT f.*
FROM ktapp_film f
INNER JOIN ktapp_filmfilmrecommendation ffr ON ffr.film_1_id = {film_id} AND ffr.film_2_id = f.id
ORDER BY ffr.score DESC, f.number_of_ratings DESC
LIMIT 10
'''
RECOMMENDED_FILMS_LOGGED_IN = '''
SELECT f.*
FROM ktapp_film f
INNER JOIN ktapp_filmfilmrecommendation ffr ON ffr.film_1_id = {film_id} AND ffr.film_2_id = f.id
LEFT JOIN ktapp_vote v ON v.film_id = f.id AND v.user_id = {user_id}
WHERE v.id IS NULL
ORDER BY ffr.score DESC, f.number_of_ratings DESC
LIMIT 10
'''
VAPITI_WINNER_GOLD = '''
SELECT v.film_id, f.orig_title, SUM(u.vapiti_weight) AS sum_vapiti_weight, COUNT(distinct u.id) AS user_count, f.number_of_ratings, f.average_rating
FROM ktapp_vapitivote v
INNER JOIN ktapp_ktuser u ON u.id = v.user_id
INNER JOIN ktapp_film f ON f.id = v.film_id
WHERE u.core_member=1
AND v.year = %s
AND v.vapiti_round = 2
AND v.vapiti_type = 'G'
GROUP BY v.film_id
ORDER BY user_count DESC, sum_vapiti_weight DESC
LIMIT 1
'''
VAPITI_WINNER_SILVER_FEMALE = '''
SELECT r.id, a.name, f.orig_title, SUM(u.vapiti_weight) AS sum_vapiti_weight, COUNT(distinct u.id) AS user_count, f.number_of_ratings, f.average_rating
FROM ktapp_vapitivote v
INNER JOIN ktapp_ktuser u ON u.id = v.user_id
INNER JOIN ktapp_film f ON f.id = v.film_id
INNER JOIN ktapp_artist a ON a.id = v.artist_id
INNER JOIN ktapp_filmartistrelationship r ON r.film_id = v.film_id AND r.artist_id = v.artist_id
WHERE u.core_member=1
AND v.year = %s
AND v.vapiti_round = 2
AND v.vapiti_type = 'F'
GROUP BY f.id, a.id
ORDER BY user_count DESC, sum_vapiti_weight DESC
LIMIT 1
'''
VAPITI_WINNER_SILVER_MALE = '''
SELECT r.id, a.name, f.orig_title, SUM(u.vapiti_weight) AS sum_vapiti_weight, COUNT(distinct u.id) AS user_count, f.number_of_ratings, f.average_rating
FROM ktapp_vapitivote v
INNER JOIN ktapp_ktuser u ON u.id = v.user_id
INNER JOIN ktapp_film f ON f.id = v.film_id
INNER JOIN ktapp_artist a ON a.id = v.artist_id
INNER JOIN ktapp_filmartistrelationship r ON r.film_id = v.film_id AND r.artist_id = v.artist_id
WHERE u.core_member=1
AND v.year = %s
AND v.vapiti_round = 2
AND v.vapiti_type = 'M'
GROUP BY f.id, a.id
ORDER BY user_count DESC, sum_vapiti_weight DESC
LIMIT 1
'''
| 29.068376
| 151
| 0.764187
| 672
| 3,401
| 3.625
| 0.11756
| 0.059113
| 0.086207
| 0.059113
| 0.898194
| 0.89491
| 0.888752
| 0.880131
| 0.880131
| 0.842775
| 0
| 0.009272
| 0.143781
| 3,401
| 116
| 152
| 29.318966
| 0.827266
| 0
| 0
| 0.76
| 0
| 0.03
| 0.915907
| 0.083505
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1fecdd426464cfde8c94820f1cb78f1c7b86e238
| 5,792
|
py
|
Python
|
tests/test_density_SphereVoxelization.py
|
pbuslaev/freud
|
ed83bdc63ee8fddba78f070ce9ddf2a0021d67b8
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_density_SphereVoxelization.py
|
pbuslaev/freud
|
ed83bdc63ee8fddba78f070ce9ddf2a0021d67b8
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_density_SphereVoxelization.py
|
pbuslaev/freud
|
ed83bdc63ee8fddba78f070ce9ddf2a0021d67b8
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
import freud
import unittest
from SphereVoxelization_fft import compute_3d, compute_2d
class TestSphereVoxelization(unittest.TestCase):
def test_random_points_2d(self):
width = 100
r_max = 10.0
num_points = 10
box_size = r_max*10
box, points = freud.data.make_random_system(
box_size, num_points, is2D=True)
for w in (width, (width, width), [width, width]):
vox = freud.density.SphereVoxelization(w, r_max)
# Test access
with self.assertRaises(AttributeError):
vox.box
with self.assertRaises(AttributeError):
vox.voxels
vox.compute(system=(box, points))
# Test access
vox.box
vox.voxels
# Verify the output dimensions are correct
self.assertEqual(vox.voxels.shape, (width, width))
self.assertEqual(np.prod(vox.voxels.shape), np.prod(vox.width))
# Verify the calculation is correct
# here we assert that the calculations (from two different methods)
# are the same up to rounding error
fft_vox = compute_2d(box_size, width, points, r_max)
num_same = len(np.where(np.isclose(vox.voxels - fft_vox,
np.zeros(fft_vox.shape)))[0])
total_num = np.prod(fft_vox.shape)
self.assertGreater(num_same / total_num, .95)
# Verify that the voxels are all 1's and 0's
num_zeros = len(np.where(np.isclose(
vox.voxels, np.zeros(vox.voxels.shape)))[0])
num_ones = len(np.where(np.isclose(
vox.voxels, np.ones(vox.voxels.shape)))[0])
self.assertGreater(num_zeros, 0)
self.assertGreater(num_ones, 0)
self.assertEqual(num_zeros + num_ones, np.prod(vox.voxels.shape))
def test_random_points_3d(self):
width = 100
r_max = 10.0
num_points = 10
box_size = r_max*10
box, points = freud.data.make_random_system(
box_size, num_points, is2D=False)
for w in (width, (width, width, width), [width, width, width]):
vox = freud.density.SphereVoxelization(w, r_max)
# Test access
with self.assertRaises(AttributeError):
vox.box
with self.assertRaises(AttributeError):
vox.voxels
vox.compute(system=(box, points))
# Test access
vox.box
vox.voxels
# Verify the output dimensions are correct
self.assertEqual(vox.voxels.shape, (width, width, width))
# Verify the calculation is correct
# here we assert that the calculations (from two different methods)
# are the same up to rounding error
fft_vox = compute_3d(box_size, width, points, r_max)
num_same = len(np.where(np.isclose(vox.voxels - fft_vox,
np.zeros(fft_vox.shape)))[0])
total_num = np.prod(fft_vox.shape)
self.assertGreater(num_same / total_num, .95)
# Verify that the voxels are all 1's and 0's
num_zeros = len(np.where(np.isclose(
vox.voxels, np.zeros(vox.voxels.shape)))[0])
num_ones = len(np.where(np.isclose(
vox.voxels, np.ones(vox.voxels.shape)))[0])
self.assertGreater(num_zeros, 0)
self.assertGreater(num_ones, 0)
self.assertEqual(num_zeros + num_ones, np.prod(vox.voxels.shape))
def test_change_box_dimension(self):
width = 100
r_max = 10.0
num_points = 100
box_size = r_max*3.1
# test that computing a 3D system after computing a 2D system will fail
box, points = freud.data.make_random_system(
box_size, num_points, is2D=True)
vox = freud.density.SphereVoxelization(width, r_max)
vox.compute(system=(box, points))
test_box, test_points = freud.data.make_random_system(
box_size, num_points, is2D=False)
with self.assertRaises(ValueError):
vox.compute((test_box, test_points))
# test that computing a 2D system after computing a 3D system will fail
box, points = freud.data.make_random_system(
box_size, num_points, is2D=False)
vox = freud.density.SphereVoxelization(width, r_max)
vox.compute(system=(box, points))
test_box, test_points = freud.data.make_random_system(
box_size, num_points, is2D=True)
with self.assertRaises(ValueError):
vox.compute((test_box, test_points))
def test_repr(self):
vox = freud.density.SphereVoxelization(100, 10.0)
self.assertEqual(str(vox), str(eval(repr(vox))))
# Use both signatures
vox3 = freud.density.SphereVoxelization((98, 99, 100), 10.0)
self.assertEqual(str(vox3), str(eval(repr(vox3))))
def test_repr_png(self):
width = 100
r_max = 10.0
num_points = 100
box_size = r_max*3.1
box, points = freud.data.make_random_system(
box_size, num_points, is2D=True)
vox = freud.density.SphereVoxelization(width, r_max)
with self.assertRaises(AttributeError):
vox.plot()
self.assertEqual(vox._repr_png_(), None)
vox.compute((box, points))
vox.plot()
vox = freud.density.SphereVoxelization(width, r_max)
test_box = freud.box.Box.cube(box_size)
vox.compute((test_box, points))
vox.plot()
self.assertEqual(vox._repr_png_(), None)
if __name__ == '__main__':
unittest.main()
| 37.128205
| 79
| 0.593405
| 733
| 5,792
| 4.519782
| 0.144611
| 0.051615
| 0.040749
| 0.040145
| 0.833384
| 0.817084
| 0.802596
| 0.789919
| 0.761545
| 0.761545
| 0
| 0.023952
| 0.308011
| 5,792
| 155
| 80
| 37.367742
| 0.802645
| 0.111015
| 0
| 0.763636
| 0
| 0
| 0.001559
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.045455
| false
| 0
| 0.036364
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1f3229c8942f530b65cdd25fd7a4f14330f824a7
| 5,700
|
py
|
Python
|
ccws/test/test_bitfinex.py
|
applezjm/testsub
|
051348bb852d8e3cefe764a6315f53da66cd413e
|
[
"MIT"
] | null | null | null |
ccws/test/test_bitfinex.py
|
applezjm/testsub
|
051348bb852d8e3cefe764a6315f53da66cd413e
|
[
"MIT"
] | null | null | null |
ccws/test/test_bitfinex.py
|
applezjm/testsub
|
051348bb852d8e3cefe764a6315f53da66cd413e
|
[
"MIT"
] | null | null | null |
from interruptingcow import timeout
from ccws.bitfinex import Bitfinex
from ccws.test.test_base import Test
from ccws.configs import HOME_PATH
class TestBitfinex(Test, Bitfinex):
def __init__(self, *args, **kwargs):
Bitfinex.__init__(self)
Test.__init__(self, *args, **kwargs)
def test_BTC_USD_order(self):
origin = {
'FileName': 'BTC_USD-bitfinex_order.gz',
'Date': '2018/06/28',
'Output': 'BTC_USD-bitfinex.book.csv.gz',
}
self.initialization('BTC/USD', 'order', origin['Date'])
input_key = self.Config['RedisCollectKey']
self.write_into_redis(input_key, self.RedisConnection, origin['FileName'])
try:
with timeout(15, exception=RuntimeWarning):
self.process_data()
except RuntimeWarning:
pass
try:
with timeout(15, exception=RuntimeWarning):
self.write_data_csv()
except RuntimeWarning:
pass
fn1 = origin['Output']
fn2 = '%s/%s/%s' % (HOME_PATH, origin['Date'], self.Config['FileName'])
self.compare_two_csv(fn1, fn2)
self.delete_tmp_file(fn2)
def test_BTC_USD_trade(self):
origin = {
'FileName': 'BTC_USD-bitfinex_ticker.gz',
'Date': '2018/06/28',
'Output': 'BTC_USD-bitfinex.trade.csv.gz',
}
self.initialization('BTC/USD', 'trade', origin['Date'])
input_key = self.Config['RedisCollectKey']
self.write_into_redis(input_key, self.RedisConnection, origin['FileName'])
try:
with timeout(10, exception=RuntimeWarning):
self.process_data()
except RuntimeWarning:
pass
try:
with timeout(10, exception=RuntimeWarning):
self.write_data_csv()
except RuntimeWarning:
pass
fn1 = origin['Output']
fn2 = '%s/%s/%s' % (HOME_PATH, origin['Date'], self.Config['FileName'])
self.compare_two_csv(fn1, fn2)
self.delete_tmp_file(fn2)
def test_BCH_USD_order(self):
origin = {
'FileName': 'BCH_USD-bitfinex_order.gz',
'Date': '2018/06/29',
'Output': 'BCH_USD-bitfinex.book.csv.gz',
}
self.initialization('BCH/USD', 'order', origin['Date'])
input_key = self.Config['RedisCollectKey']
self.write_into_redis(input_key, self.RedisConnection, origin['FileName'])
try:
with timeout(15, exception=RuntimeWarning):
self.process_data()
except RuntimeWarning:
pass
try:
with timeout(15, exception=RuntimeWarning):
self.write_data_csv()
except RuntimeWarning:
pass
fn1 = origin['Output']
fn2 = '%s/%s/%s' % (HOME_PATH, origin['Date'], self.Config['FileName'])
self.compare_two_csv(fn1, fn2)
self.delete_tmp_file(fn2)
def test_BCH_USD_trade(self):
origin = {
'FileName': 'BCH_USD-bitfinex_ticker.gz',
'Date': '2018/06/29',
'Output': 'BCH_USD-bitfinex.trade.csv.gz',
}
self.initialization('BCH/USD', 'trade', origin['Date'])
input_key = self.Config['RedisCollectKey']
self.write_into_redis(input_key, self.RedisConnection, origin['FileName'])
try:
with timeout(10, exception=RuntimeWarning):
self.process_data()
except RuntimeWarning:
pass
try:
with timeout(10, exception=RuntimeWarning):
self.write_data_csv()
except RuntimeWarning:
pass
fn1 = origin['Output']
fn2 = '%s/%s/%s' % (HOME_PATH, origin['Date'], self.Config['FileName'])
self.compare_two_csv(fn1, fn2)
self.delete_tmp_file(fn2)
def test_ETH_USD_order(self):
origin = {
'FileName': 'ETH_USD-bitfinex_order.gz',
'Date': '2018/06/29',
'Output': 'ETH_USD-bitfinex.book.csv.gz',
}
self.initialization('ETH/USD', 'order', origin['Date'])
input_key = self.Config['RedisCollectKey']
self.write_into_redis(input_key, self.RedisConnection, origin['FileName'])
try:
with timeout(15, exception=RuntimeWarning):
self.process_data()
except RuntimeWarning:
pass
try:
with timeout(15, exception=RuntimeWarning):
self.write_data_csv()
except RuntimeWarning:
pass
fn1 = origin['Output']
fn2 = '%s/%s/%s' % (HOME_PATH, origin['Date'], self.Config['FileName'])
self.compare_two_csv(fn1, fn2)
self.delete_tmp_file(fn2)
def test_ETH_USD_trade(self):
origin = {
'FileName': 'ETH_USD-bitfinex_ticker.gz',
'Date': '2018/06/29',
'Output': 'ETH_USD-bitfinex.trade.csv.gz',
}
self.initialization('ETH/USD', 'trade', origin['Date'])
input_key = self.Config['RedisCollectKey']
self.write_into_redis(input_key, self.RedisConnection, origin['FileName'])
try:
with timeout(10, exception=RuntimeWarning):
self.process_data()
except RuntimeWarning:
pass
try:
with timeout(10, exception=RuntimeWarning):
self.write_data_csv()
except RuntimeWarning:
pass
fn1 = origin['Output']
fn2 = '%s/%s/%s' % (HOME_PATH, origin['Date'], self.Config['FileName'])
self.compare_two_csv(fn1, fn2)
self.delete_tmp_file(fn2)
| 31.666667
| 82
| 0.570702
| 615
| 5,700
| 5.091057
| 0.102439
| 0.053657
| 0.045992
| 0.022996
| 0.929096
| 0.917598
| 0.871607
| 0.808368
| 0.808368
| 0.722134
| 0
| 0.025654
| 0.302456
| 5,700
| 179
| 83
| 31.843575
| 0.761821
| 0
| 0
| 0.739726
| 0
| 0
| 0.154764
| 0.056852
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047945
| false
| 0.082192
| 0.027397
| 0
| 0.082192
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
1f87b1162a95040302529277147878273814b4f5
| 3,154
|
py
|
Python
|
2020/day11/main.py
|
AlinMH/advent-of-code
|
8ff9cc16884f99a5e227ad0cfec2e12072f105c6
|
[
"MIT"
] | null | null | null |
2020/day11/main.py
|
AlinMH/advent-of-code
|
8ff9cc16884f99a5e227ad0cfec2e12072f105c6
|
[
"MIT"
] | null | null | null |
2020/day11/main.py
|
AlinMH/advent-of-code
|
8ff9cc16884f99a5e227ad0cfec2e12072f105c6
|
[
"MIT"
] | null | null | null |
import os
from copy import deepcopy
INPUT_FILE = os.path.join(os.path.dirname(__file__), "input")
def num_adj_occ1(i, j, mat):
max_i = len(mat) - 1
max_j = len(mat[0]) - 1
directions = ((-1, 0), (1, 0), (0, -1), (0, 1), (-1, -1), (-1, 1), (1, -1), (1, 1))
num = 0
for dx, dy in directions:
xp, yp = i + dx, j + dy
if (xp >= 0 and xp <= max_i) and (yp >= 0 and yp <= max_j):
view = mat[xp][yp]
if view != ".":
if view == "#":
num += 1
xp += dx
yp += dy
return num
def num_adj_occ2(i, j, mat):
max_i = len(mat) - 1
max_j = len(mat[0]) - 1
directions = ((-1, 0), (1, 0), (0, -1), (0, 1), (-1, -1), (-1, 1), (1, -1), (1, 1))
num = 0
for dx, dy in directions:
xp, yp = i + dx, j + dy
while (xp >= 0 and xp <= max_i) and (yp >= 0 and yp <= max_j):
view = mat[xp][yp]
if view != ".":
if view == "#":
num += 1
break
xp += dx
yp += dy
return num
def part1():
with open(INPUT_FILE, "r") as fp:
prev = list(map(list, fp.read().splitlines()))
current = []
current_round = 0
while True:
if not current:
current = deepcopy(prev)
for i, row in enumerate(prev):
for j, column in enumerate(row):
if column == ".":
continue
elif column == "L" and num_adj_occ1(i, j, prev) == 0:
current[i][j] = "#"
elif column == "#" and num_adj_occ1(i, j, prev) >= 4:
current[i][j] = "L"
if prev == current:
occupied = 0
for row in current:
for column in row:
if column == "#":
occupied += 1
return occupied
current_round += 1
prev = deepcopy(current)
def part2():
with open(INPUT_FILE, "r") as fp:
prev = list(map(list, fp.read().splitlines()))
current = []
current_round = 0
while True:
if not current:
current = deepcopy(prev)
for i, row in enumerate(prev):
for j, column in enumerate(row):
if column == ".":
continue
elif column == "L" and num_adj_occ2(i, j, prev) == 0:
current[i][j] = "#"
elif column == "#" and num_adj_occ2(i, j, prev) >= 5:
current[i][j] = "L"
if prev == current:
occupied = 0
for row in current:
for column in row:
if column == "#":
occupied += 1
return occupied
current_round += 1
prev = deepcopy(current)
if __name__ == "__main__":
result = part1()
print(result)
result = part2()
print(result)
| 27.189655
| 87
| 0.403297
| 376
| 3,154
| 3.279255
| 0.170213
| 0.025953
| 0.034063
| 0.038929
| 0.87429
| 0.85807
| 0.85807
| 0.80292
| 0.80292
| 0.80292
| 0
| 0.040024
| 0.461319
| 3,154
| 115
| 88
| 27.426087
| 0.685697
| 0
| 0
| 0.808989
| 0
| 0
| 0.009829
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044944
| false
| 0
| 0.022472
| 0
| 0.11236
| 0.022472
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2f159244826ca5db07bd57c24695f58468690a77
| 240
|
py
|
Python
|
repositories/permission.py
|
jsmsalt/jobs-api
|
a2c26522bf7f997558f6c6608524187785c830e5
|
[
"MIT"
] | null | null | null |
repositories/permission.py
|
jsmsalt/jobs-api
|
a2c26522bf7f997558f6c6608524187785c830e5
|
[
"MIT"
] | 5
|
2021-11-29T04:40:14.000Z
|
2021-11-29T12:33:44.000Z
|
repositories/permission.py
|
jsmsalt/jobs-api
|
a2c26522bf7f997558f6c6608524187785c830e5
|
[
"MIT"
] | null | null | null |
from repositories.base import BaseRepository
from models.permission import Permission, PermissionCreate, PermissionUpdate
class PermissionRepository(BaseRepository[Permission, PermissionCreate, PermissionUpdate]):
entity = Permission
| 34.285714
| 91
| 0.854167
| 20
| 240
| 10.25
| 0.6
| 0.253659
| 0.409756
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095833
| 240
| 6
| 92
| 40
| 0.9447
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2f4531d537895cfc1fbd5362b57631b0f3379c8c
| 11,003
|
py
|
Python
|
src/wellsfargo/tests/api/test_prequal.py
|
robertdev5/django-oscar
|
35c8554c988ad3a901de1d4772c4372c190df1e3
|
[
"0BSD"
] | null | null | null |
src/wellsfargo/tests/api/test_prequal.py
|
robertdev5/django-oscar
|
35c8554c988ad3a901de1d4772c4372c190df1e3
|
[
"0BSD"
] | 8
|
2021-03-18T22:26:27.000Z
|
2022-02-10T11:28:57.000Z
|
src/wellsfargo/tests/api/test_prequal.py
|
supertech-999/django-oscar
|
35c8554c988ad3a901de1d4772c4372c190df1e3
|
[
"0BSD"
] | null | null | null |
from rest_framework import status
from rest_framework.reverse import reverse
from wellsfargo.tests.base import BaseTest
from wellsfargo.tests import responses
import mock
from wellsfargo.models import PreQualificationRequest
class PreQualificationRequestTest(BaseTest):
def setUp(self):
super().setUp()
# Test IPAddress to use
self.ip_address = '127.0.0.1'
@mock.patch('soap.get_transport')
def test_prequal_successful(self, get_transport):
get_transport.return_value = self._build_transport_with_reply(responses.prequal_successful)
url = reverse('wfrs-api-prequal')
data = {
'first_name': 'Joe',
'last_name': 'Schmoe',
'line1': '123 Evergreen Terrace',
'city': 'Springfield',
'state': 'NY',
'postcode': '10001',
'phone': '+1 (212) 209-1333',
}
response = self.client.post(url, data, format='json', REMOTE_ADDR=self.ip_address)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['status'], 'A')
self.assertEqual(response.data['is_approved'], True)
self.assertEqual(response.data['message'], 'approved')
self.assertEqual(response.data['credit_limit'], '8500.00')
self.assertEqual(response.data['customer_response'], '')
get_transport.return_value = self._build_transport_with_reply(responses.otb_successful)
url = "{}?A=41".format(reverse('wfrs-api-prequal-app-complete'))
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertInHTML('<h1>Your Wells Fargo application was approved.</h1>', response.content.decode())
self.assertInHTML('<p>Your account number is 9999999999999999.</p>', response.content.decode())
self.assertInHTML('<p>Your credit limit is 9000.00.</p>', response.content.decode())
# Check if IPAddress was stored
prequal_request = PreQualificationRequest.objects.first()
self.assertEqual(prequal_request.ip_address, self.ip_address)
@mock.patch('soap.get_transport')
def test_prequal_failed_prescreen(self, get_transport):
get_transport.return_value = self._build_transport_with_reply(responses.prequal_failed)
url = reverse('wfrs-api-prequal')
data = {
'first_name': 'Joe',
'last_name': 'Schmoe',
'line1': '123 Evergreen Terrace',
'city': 'Springfield',
'state': 'NY',
'postcode': '10001',
'phone': '+1 (212) 209-1333',
}
response = self.client.post(url, data, format='json', REMOTE_ADDR=self.ip_address)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data['non_field_errors'], ['Phone is blank or invalid.'])
# Check if IPAddress was stored
prequal_request = PreQualificationRequest.objects.first()
self.assertEqual(prequal_request.ip_address, self.ip_address)
@mock.patch('soap.get_transport')
def test_prequal_failed_application(self, get_transport):
get_transport.return_value = self._build_transport_with_reply(responses.prequal_successful)
url = reverse('wfrs-api-prequal')
data = {
'first_name': 'Joe',
'last_name': 'Schmoe',
'line1': '123 Evergreen Terrace',
'city': 'Springfield',
'state': 'NY',
'postcode': '10001',
'phone': '+1 (212) 209-1333',
}
response = self.client.post(url, data, format='json', REMOTE_ADDR=self.ip_address)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['status'], 'A')
self.assertEqual(response.data['is_approved'], True)
self.assertEqual(response.data['message'], 'approved')
self.assertEqual(response.data['credit_limit'], '8500.00')
self.assertEqual(response.data['customer_response'], '')
get_transport.return_value = self._build_transport_with_reply(responses.otb_denied)
url = reverse('wfrs-api-prequal-app-complete')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertInHTML('<h1>Your Wells Fargo application was not approved.</h1>', response.content.decode())
# Check if IPAddress was stored
prequal_request = PreQualificationRequest.objects.first()
self.assertEqual(prequal_request.ip_address, self.ip_address)
@mock.patch('soap.get_transport')
def test_prequal_failed_error(self, get_transport):
get_transport.return_value = self._build_transport_with_reply(responses.prequal_successful)
url = reverse('wfrs-api-prequal')
data = {
'first_name': 'Joe',
'last_name': 'Schmoe',
'line1': '123 Evergreen Terrace',
'city': 'Springfield',
'state': 'NY',
'postcode': '10001',
'phone': '+1 (212) 209-1333',
}
response = self.client.post(url, data, format='json', REMOTE_ADDR=self.ip_address)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['status'], 'A')
self.assertEqual(response.data['is_approved'], True)
self.assertEqual(response.data['message'], 'approved')
self.assertEqual(response.data['credit_limit'], '8500.00')
self.assertEqual(response.data['customer_response'], '')
get_transport.return_value = self._build_transport_with_reply(responses.otb_error)
url = "{}?A=41".format(reverse('wfrs-api-prequal-app-complete'))
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertInHTML('<h1>Your Wells Fargo application was not approved.</h1>', response.content.decode())
# Check if IPAddress was stored
prequal_request = PreQualificationRequest.objects.first()
self.assertEqual(prequal_request.ip_address, self.ip_address)
def test_sdk_prequal_response(self):
url = reverse('wfrs-api-prequal-sdk-response')
data = {
'first_name': 'Joe',
'last_name': 'Schmoe',
'line1': '123 Evergreen Terrace',
'city': 'Springfield',
'state': 'NY',
'postcode': '10001',
'status': 'A',
'credit_limit': '7500.00',
'response_id': 'ABC123',
}
response = self.client.post(url, data, format='json', REMOTE_ADDR=self.ip_address)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['status'], 'A')
self.assertEqual(response.data['is_approved'], True)
self.assertEqual(response.data['message'], '')
self.assertEqual(response.data['credit_limit'], '7500.00')
self.assertEqual(response.data['customer_response'], '')
url = reverse('wfrs-api-prequal-customer-response')
data = {
'customer_response': 'SDKPRESENTED',
}
response = self.client.post(url, data, format='json', REMOTE_ADDR=self.ip_address)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['status'], 'A')
self.assertEqual(response.data['is_approved'], True)
self.assertEqual(response.data['message'], '')
self.assertEqual(response.data['credit_limit'], '7500.00')
self.assertEqual(response.data['customer_response'], 'SDKPRESENTED')
# Check if IPAddress was stored
prequal_request = PreQualificationRequest.objects.first()
self.assertEqual(prequal_request.ip_address, self.ip_address)
def test_sdk_resume_prequal(self):
# Set-up a prequal response
url = reverse('wfrs-api-prequal-sdk-response')
data = {
'first_name': 'Joe',
'last_name': 'Schmoe',
'line1': '123 Evergreen Terrace',
'city': 'Springfield',
'state': 'NY',
'postcode': '10001',
'status': 'A',
'credit_limit': '7500.00',
'response_id': 'ABC123',
}
response = self.client.post(url, data, format='json', REMOTE_ADDR=self.ip_address)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['status'], 'A')
self.assertEqual(response.data['is_approved'], True)
self.assertEqual(response.data['message'], '')
self.assertEqual(response.data['credit_limit'], '7500.00')
self.assertEqual(response.data['customer_response'], '')
url = reverse('wfrs-api-prequal-customer-response')
data = {
'customer_response': 'SDKPRESENTED',
}
response = self.client.post(url, data, format='json', REMOTE_ADDR=self.ip_address)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['status'], 'A')
self.assertEqual(response.data['is_approved'], True)
self.assertEqual(response.data['message'], '')
self.assertEqual(response.data['credit_limit'], '7500.00')
self.assertEqual(response.data['customer_response'], 'SDKPRESENTED')
# Hit the resume view
prequal_request = PreQualificationRequest.objects.first()
url = prequal_request.get_resume_offer_url(next_url='/my-redirect/')
response = self.client.get(url)
self.assertRedirects(response, '/my-redirect/', fetch_redirect_response=False)
# Fetch the response data
url = reverse('wfrs-api-prequal-sdk-response')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['request']['first_name'], 'Joe')
self.assertEqual(response.data['request']['last_name'], 'Schmoe')
self.assertEqual(response.data['request']['line1'], '123 Evergreen Terrace')
self.assertEqual(response.data['request']['line2'], None)
self.assertEqual(response.data['request']['city'], 'Springfield')
self.assertEqual(response.data['request']['state'], 'NY')
self.assertEqual(response.data['request']['postcode'], '10001')
self.assertEqual(response.data['status'], 'A')
self.assertEqual(response.data['is_approved'], True)
self.assertEqual(response.data['message'], '')
self.assertEqual(response.data['credit_limit'], '7500.00')
self.assertEqual(response.data['customer_response'], 'SDKPRESENTED')
self.assertEqual(response.data['full_application_url'], '&mn=1111111111')
self.assertEqual(response.data['offer_indicator'], '')
self.assertEqual(response.data['response_id'], 'ABC123')
self.assertEqual(response.data['sdk_application_result'], None)
| 45.094262
| 111
| 0.647005
| 1,219
| 11,003
| 5.66776
| 0.11977
| 0.149805
| 0.213055
| 0.203213
| 0.860327
| 0.820813
| 0.815603
| 0.798379
| 0.792734
| 0.792734
| 0
| 0.03018
| 0.214033
| 11,003
| 243
| 112
| 45.279835
| 0.768733
| 0.021903
| 0
| 0.762626
| 0
| 0
| 0.212984
| 0.026507
| 0
| 0
| 0
| 0
| 0.378788
| 1
| 0.035354
| false
| 0
| 0.030303
| 0
| 0.070707
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
2f5ea4e7f4886b735f3ffdb282394fc8b9faffcf
| 5,240
|
py
|
Python
|
app/forms.py
|
steve-njuguna-k/Flask-Blog
|
7ee0d74ed9af0f5dd092a4e0d7907fcc738bfd45
|
[
"MIT"
] | 1
|
2022-02-01T17:52:42.000Z
|
2022-02-01T17:52:42.000Z
|
app/forms.py
|
steve-njuguna-k/Flask-Blog
|
7ee0d74ed9af0f5dd092a4e0d7907fcc738bfd45
|
[
"MIT"
] | null | null | null |
app/forms.py
|
steve-njuguna-k/Flask-Blog
|
7ee0d74ed9af0f5dd092a4e0d7907fcc738bfd45
|
[
"MIT"
] | null | null | null |
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, SubmitField, SelectField, TextAreaField, BooleanField
from wtforms.validators import DataRequired, Email, Length, EqualTo
from flask_ckeditor import CKEditorField
class LoginForm(FlaskForm):
email = StringField(label='Email Address', validators=[DataRequired(), Email(message='⚠️ Enter A Valid Email Address!')], render_kw={"placeholder": "Email Address"})
password = PasswordField(label='Password', validators=[DataRequired(), Length(min=6, max=255, message='⚠️ Password strength must be between %(min)d and %(max)d characters!')], render_kw={"placeholder": "Password"})
show_password = BooleanField('Show password', id='check')
submit = SubmitField(label=('Log In'))
class RegisterForm(FlaskForm):
first_name = StringField(label='First Name', validators=[DataRequired(), Length(min=3, max=255, message='⚠️ Name length must be between %(min)d and %(max)d characters!')], render_kw={"placeholder": "First Name"})
last_name = StringField(label='Last Name', validators=[DataRequired(), Length(min=3, max=255, message='⚠️ Name length must be between %(min)d and %(max)d characters!')], render_kw={"placeholder": "Last Name"})
email = StringField(label='Email Address', validators=[DataRequired(), Email(message='⚠️ Enter A Valid Email Address!')], render_kw={"placeholder": "Email Address"})
password = PasswordField(label='Password', validators=[DataRequired(), Length(min=6, max=255, message='⚠️ Password strength must be between %(min)d and %(max)d characters!')], render_kw={"placeholder": "Password"})
confirm_password = PasswordField(label='Confirm Password', validators=[DataRequired(), EqualTo('password', message='⚠️ The Passwords Entered Do Not Match!')], render_kw={"placeholder": "Confirm Password"})
submit = SubmitField(label=('Sign Up'))
class BlogPostsForm(FlaskForm):
title = StringField(label='Post Title', validators=[DataRequired(), Length(min=3, max=100, message='⚠️ Title length must be between %(min)d and %(max)d characters!')], render_kw={"placeholder": "New Post Title Here..."})
description = CKEditorField(label='Description',validators=[DataRequired(), Length(min=6, max=20000, message='⚠️ Content length must be between %(min)d and %(max)d characters!')], render_kw={"placeholder": "Write Your Post Content Here...", 'rows': 20})
category = SelectField(label='Select Category',choices=[
('AI & Machine Learning', 'AI & Machine Learning'),
('Big Data', 'Big Data'),
('Blockchain & Cryptocurrency', 'Blockchain & Cryptocurrency'),
('Career Development', 'Career Development'),
('Cloud Computing', 'Cloud Computing'),
('Cybersecurity', 'Cybersecurity'),
('Design + UX', 'Design + UX'),
('DevOps', 'DevOps'),
('Fintech', 'Fintech'),
('IoT: The Internet of Things', 'IoT: The Internet of Things'),
('Robotics', 'Robotics'),
('SaaS', 'SaaS'),
('Software Development', 'Software Development')
], render_kw={"placeholder": "Choose Category"})
tags = StringField(label='Tags', validators=[DataRequired(), Length(min=3, max=50, message='⚠️ Tag length must be between %(min)d and %(max)d characters!')], render_kw={"placeholder": "Tags (Add up to 5 short tags)"})
submit = SubmitField('Submit')
class EditBlogPostsForm(FlaskForm):
title = StringField(label='Post Title', validators=[DataRequired(), Length(min=3, max=100, message='⚠️ Title length must be between %(min)d and %(max)d characters!')], render_kw={"placeholder": "New Post Title Here..."})
description = CKEditorField(label='Description',validators=[DataRequired(), Length(min=6, max=10000, message='⚠️ Content length must be between %(min)d and %(max)d characters!')], render_kw={"placeholder": "Write Your Post Content Here...", 'rows': 20})
category = SelectField(label='Select Category',choices=[
('AI & Machine Learning', 'AI & Machine Learning'),
('Big Data', 'Big Data'),
('Blockchain & Cryptocurrency', 'Blockchain & Cryptocurrency'),
('Career Development', 'Career Development'),
('Cloud Computing', 'Cloud Computing'),
('Cybersecurity', 'Cybersecurity'),
('Design + UX', 'Design + UX'),
('DevOps', 'DevOps'),
('Fintech', 'Fintech'),
('IoT: The Internet of Things', 'IoT: The Internet of Things'),
('Robotics', 'Robotics'),
('SaaS', 'SaaS'),
('Software Development', 'Software Development')
], render_kw={"placeholder": "Choose Category"})
submit = SubmitField('Submit')
class CommentsForm(FlaskForm):
comment = TextAreaField(label = 'Comment',validators=[DataRequired(), Length(min=6, max=1000, message='⚠️ Comment length must be between %(min)d and %(max)d characters!')], render_kw={"placeholder": "Your Comment", 'rows': 5})
submit= SubmitField('Submit')
class SearchForm(FlaskForm):
search = StringField(label='Search Blog Articles', validators=[DataRequired(), Length(min=3, max=100, message='⚠️ Search length must be between %(min)d and %(max)d characters!')], render_kw={"placeholder": "Search Blog Articles"})
submit= SubmitField('Submit')
| 78.208955
| 258
| 0.679389
| 606
| 5,240
| 5.884488
| 0.19637
| 0.035895
| 0.08525
| 0.095625
| 0.727987
| 0.727987
| 0.708357
| 0.708357
| 0.708357
| 0.695177
| 0
| 0.012132
| 0.150573
| 5,240
| 67
| 259
| 78.208955
| 0.782745
| 0
| 0
| 0.655738
| 0
| 0
| 0.429498
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.081967
| 0.065574
| 0
| 0.540984
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
c803dfe8937264dc5a3fabeb8c5da6050d74120a
| 214
|
py
|
Python
|
ignite/contrib/metrics/__init__.py
|
kagrze/ignite
|
18708a76f86623545311d35bc48673eac9e55591
|
[
"BSD-3-Clause"
] | 1
|
2020-03-04T20:07:45.000Z
|
2020-03-04T20:07:45.000Z
|
ignite/contrib/metrics/__init__.py
|
hefv57/ignite
|
a22a0f5e909ac70d2a1f76a60b6e84b2134f196c
|
[
"BSD-3-Clause"
] | null | null | null |
ignite/contrib/metrics/__init__.py
|
hefv57/ignite
|
a22a0f5e909ac70d2a1f76a60b6e84b2134f196c
|
[
"BSD-3-Clause"
] | null | null | null |
from ignite.contrib.metrics.average_precision import AveragePrecision
from ignite.contrib.metrics.roc_auc import ROC_AUC
import ignite.contrib.metrics.regression
from ignite.contrib.metrics.gpu_info import GpuInfo
| 42.8
| 69
| 0.878505
| 30
| 214
| 6.133333
| 0.466667
| 0.282609
| 0.434783
| 0.391304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065421
| 214
| 4
| 70
| 53.5
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c82eabaf2aacdf3ded15fd14514e8d1e3363348d
| 13,663
|
py
|
Python
|
wagtailmenus/tests/migrations/0010_auto_20161211_2116.py
|
pierremanceaux/wagtailmenus
|
8e2e81e74fa37a9d4139c6be7158cd3d1429ac1d
|
[
"MIT"
] | 329
|
2016-01-28T16:20:16.000Z
|
2022-01-31T03:43:54.000Z
|
wagtailmenus/tests/migrations/0010_auto_20161211_2116.py
|
pierremanceaux/wagtailmenus
|
8e2e81e74fa37a9d4139c6be7158cd3d1429ac1d
|
[
"MIT"
] | 337
|
2016-04-15T11:09:44.000Z
|
2022-01-31T10:01:32.000Z
|
wagtailmenus/tests/migrations/0010_auto_20161211_2116.py
|
pierremanceaux/wagtailmenus
|
8e2e81e74fa37a9d4139c6be7158cd3d1429ac1d
|
[
"MIT"
] | 105
|
2016-06-17T15:45:07.000Z
|
2022-01-21T21:23:56.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-12-11 21:16
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import modelcluster.fields
import wagtailmenus.models.menuitems
import wagtailmenus.models.menus
class Migration(migrations.Migration):
dependencies = [
('wagtailmenus', '0020_auto_20161210_0004'),
('tests', '0009_typicalpage'),
]
operations = [
migrations.CreateModel(
name='CustomFlatMenu',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(help_text='For internal reference only.', max_length=255, verbose_name='title')),
('handle', models.SlugField(help_text='Used to reference this menu in templates etc. Must be unique for the selected site.', max_length=100, verbose_name='handle')),
('heading', models.CharField(blank=True, help_text='If supplied, appears above the menu when rendered.', max_length=255, verbose_name='heading')),
('max_levels', models.PositiveSmallIntegerField(choices=[(1, '1: No sub-navigation (flat)'), (2, '2: Allow 1 level of sub-navigation'), (3, '3: Allow 2 levels of sub-navigation'), (4, '4: Allow 3 levels of sub-navigation')], default=1, help_text='The maximum number of levels to display when rendering this menu. The value can be overidden by supplying a different <code>max_levels</code> value to the <code>{% flat_menu %}</code> tag in your templates.', verbose_name='maximum levels')),
('use_specific', models.PositiveSmallIntegerField(choices=[(0, 'Off (most efficient)'), (1, 'Auto'), (2, 'Top level'), (3, 'Always (least efficient)')], default=1, help_text="Controls how 'specific' pages objects are fetched and used when rendering this menu. This value can be overidden by supplying a different <code>use_specific</code> value to the <code>{% flat_menu %}</code> tag in your templates.", verbose_name='specific page usage')),
('heading_de', models.CharField(blank=True, max_length=255, verbose_name='heading (de)')),
('heading_fr', models.CharField(blank=True, max_length=255, verbose_name='heading (fr)')),
('site', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to='wagtailcore.Site', verbose_name='site')),
],
options={
'abstract': False,
'verbose_name': 'flat menu',
'verbose_name_plural': 'flat menus',
},
bases=(models.Model, wagtailmenus.models.menus.Menu),
),
migrations.CreateModel(
name='CustomFlatMenuItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sort_order', models.IntegerField(blank=True, editable=False, null=True)),
('link_url', models.CharField(blank=True, max_length=255, null=True, verbose_name='link to a custom URL')),
('link_text', models.CharField(blank=True, help_text="Provide the text to use for a custom URL, or set on an internal page link to use instead of the page's title.", max_length=255, verbose_name='link text')),
('handle', models.CharField(blank=True, help_text='Use this field to optionally specify an additional value for each menu item, which you can then reference in custom menu templates.', max_length=100, verbose_name='handle')),
('url_append', models.CharField(blank=True, help_text="Use this to optionally append a #hash or querystring to the above page's URL.", max_length=255, verbose_name='append to URL')),
('allow_subnav', models.BooleanField(default=False, help_text="NOTE: The sub-menu might not be displayed, even if checked. It depends on how the menu is used in this project's templates.", verbose_name='allow sub-menu for this item')),
('link_text_de', models.CharField(blank=True, max_length=255, verbose_name='link text (de)')),
('link_text_fr', models.CharField(blank=True, max_length=255, verbose_name='link text (fr)')),
('link_page', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='wagtailcore.Page', verbose_name='link to an internal page')),
('menu', modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='menu_items', to='tests.CustomFlatMenu')),
],
options={
'abstract': False,
},
bases=(models.Model, wagtailmenus.models.menuitems.MenuItem),
),
migrations.CreateModel(
name='CustomMainMenu',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('max_levels', models.PositiveSmallIntegerField(choices=[(1, '1: No sub-navigation (flat)'), (2, '2: Allow 1 level of sub-navigation'), (3, '3: Allow 2 levels of sub-navigation'), (4, '4: Allow 3 levels of sub-navigation')], default=2, help_text='The maximum number of levels to display when rendering this menu. The value can be overidden by supplying a different <code>max_levels</code> value to the <code>{% main_menu %}</code> tag in your templates.', verbose_name='maximum levels')),
('use_specific', models.PositiveSmallIntegerField(choices=[(0, 'Off (most efficient)'), (1, 'Auto'), (2, 'Top level'), (3, 'Always (least efficient)')], default=1, help_text="Controls how 'specific' pages objects are fetched and used when rendering this menu. This value can be overidden by supplying a different <code>use_specific</code> value to the <code>{% main_menu %}</code> tag in your templates.", verbose_name='specific page usage')),
('site', models.OneToOneField(editable=False, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='wagtailcore.Site', verbose_name='site')),
],
options={
'abstract': False,
'verbose_name': 'main menu',
'verbose_name_plural': 'main menu',
},
bases=(models.Model, wagtailmenus.models.menus.Menu),
),
migrations.CreateModel(
name='CustomMainMenuItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sort_order', models.IntegerField(blank=True, editable=False, null=True)),
('link_url', models.CharField(blank=True, max_length=255, null=True, verbose_name='link to a custom URL')),
('link_text', models.CharField(blank=True, help_text="Provide the text to use for a custom URL, or set on an internal page link to use instead of the page's title.", max_length=255, verbose_name='link text')),
('handle', models.CharField(blank=True, help_text='Use this field to optionally specify an additional value for each menu item, which you can then reference in custom menu templates.', max_length=100, verbose_name='handle')),
('url_append', models.CharField(blank=True, help_text="Use this to optionally append a #hash or querystring to the above page's URL.", max_length=255, verbose_name='append to URL')),
('allow_subnav', models.BooleanField(default=True, help_text="NOTE: The sub-menu might not be displayed, even if checked. It depends on how the menu is used in this project's templates.", verbose_name='allow sub-menu for this item')),
('link_text_de', models.CharField(blank=True, max_length=255, verbose_name='link text (de)')),
('link_text_fr', models.CharField(blank=True, max_length=255, verbose_name='link text (fr)')),
('link_page', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='wagtailcore.Page', verbose_name='link to an internal page')),
('menu', modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='menu_items', to='tests.CustomMainMenu')),
],
options={
'abstract': False,
},
bases=(models.Model, wagtailmenus.models.menuitems.MenuItem),
),
migrations.CreateModel(
name='FlatMenuCustomMenuItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sort_order', models.IntegerField(blank=True, editable=False, null=True)),
('link_url', models.CharField(blank=True, max_length=255, null=True, verbose_name='link to a custom URL')),
('link_text', models.CharField(blank=True, help_text="Provide the text to use for a custom URL, or set on an internal page link to use instead of the page's title.", max_length=255, verbose_name='link text')),
('handle', models.CharField(blank=True, help_text='Use this field to optionally specify an additional value for each menu item, which you can then reference in custom menu templates.', max_length=100, verbose_name='handle')),
('url_append', models.CharField(blank=True, help_text="Use this to optionally append a #hash or querystring to the above page's URL.", max_length=255, verbose_name='append to URL')),
('allow_subnav', models.BooleanField(default=False, help_text="NOTE: The sub-menu might not be displayed, even if checked. It depends on how the menu is used in this project's templates.", verbose_name='allow sub-menu for this item')),
('link_text_de', models.CharField(blank=True, max_length=255, verbose_name='link text (de)')),
('link_text_fr', models.CharField(blank=True, max_length=255, verbose_name='link text (fr)')),
('link_page', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='wagtailcore.Page', verbose_name='link to an internal page')),
('menu', modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='custom_menu_items', to='wagtailmenus.FlatMenu')),
],
options={
'abstract': False,
},
bases=(models.Model, wagtailmenus.models.menuitems.MenuItem),
),
migrations.CreateModel(
name='MainMenuCustomMenuItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sort_order', models.IntegerField(blank=True, editable=False, null=True)),
('link_url', models.CharField(blank=True, max_length=255, null=True, verbose_name='link to a custom URL')),
('link_text', models.CharField(blank=True, help_text="Provide the text to use for a custom URL, or set on an internal page link to use instead of the page's title.", max_length=255, verbose_name='link text')),
('handle', models.CharField(blank=True, help_text='Use this field to optionally specify an additional value for each menu item, which you can then reference in custom menu templates.', max_length=100, verbose_name='handle')),
('url_append', models.CharField(blank=True, help_text="Use this to optionally append a #hash or querystring to the above page's URL.", max_length=255, verbose_name='append to URL')),
('allow_subnav', models.BooleanField(default=True, help_text="NOTE: The sub-menu might not be displayed, even if checked. It depends on how the menu is used in this project's templates.", verbose_name='allow sub-menu for this item')),
('link_text_de', models.CharField(blank=True, max_length=255, verbose_name='link text (de)')),
('link_text_fr', models.CharField(blank=True, max_length=255, verbose_name='link text (fr)')),
('link_page', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='wagtailcore.Page', verbose_name='link to an internal page')),
('menu', modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='custom_menu_items', to='wagtailmenus.MainMenu')),
],
options={
'abstract': False,
},
bases=(models.Model, wagtailmenus.models.menuitems.MenuItem),
),
migrations.AddField(
model_name='toplevelpage',
name='repeated_item_text_de',
field=models.CharField(blank=True, max_length=255, verbose_name='repeated item link text (de)'),
),
migrations.AddField(
model_name='toplevelpage',
name='repeated_item_text_fr',
field=models.CharField(blank=True, max_length=255, verbose_name='repeated item link text (fr)'),
),
migrations.AddField(
model_name='toplevelpage',
name='title_de',
field=models.CharField(blank=True, max_length=255, verbose_name='title (de)'),
),
migrations.AddField(
model_name='toplevelpage',
name='title_fr',
field=models.CharField(blank=True, max_length=255, verbose_name='title (fr)'),
),
migrations.AlterUniqueTogether(
name='customflatmenu',
unique_together=set([('site', 'handle')]),
),
]
| 85.39375
| 504
| 0.656664
| 1,743
| 13,663
| 5.018359
| 0.113597
| 0.071682
| 0.070881
| 0.085058
| 0.894249
| 0.894249
| 0.880645
| 0.869212
| 0.869212
| 0.856865
| 0
| 0.015839
| 0.214448
| 13,663
| 159
| 505
| 85.930818
| 0.799124
| 0.004977
| 0
| 0.638158
| 1
| 0.105263
| 0.369776
| 0.018172
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.039474
| 0
| 0.059211
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c84726272f5ccce872fa9f251d5064eaed566127
| 1,302
|
py
|
Python
|
Graded/G3/slam/solution/__init__.py
|
chrstrom/TTK4250
|
f453c3a59597d3fe6cff7d35b790689919798b94
|
[
"Unlicense"
] | null | null | null |
Graded/G3/slam/solution/__init__.py
|
chrstrom/TTK4250
|
f453c3a59597d3fe6cff7d35b790689919798b94
|
[
"Unlicense"
] | null | null | null |
Graded/G3/slam/solution/__init__.py
|
chrstrom/TTK4250
|
f453c3a59597d3fe6cff7d35b790689919798b94
|
[
"Unlicense"
] | null | null | null |
from .pytransform import pyarmor_runtime
pyarmor_runtime()
__pyarmor__(__name__, __file__, b'\x50\x59\x41\x52\x4d\x4f\x52\x00\x00\x03\x09\x00\x61\x0d\x0d\x0a\x08\x2d\xa0\x01\x00\x00\x00\x00\x01\x00\x00\x00\x40\x00\x00\x00\xed\x00\x00\x00\x00\x00\x00\x18\x3d\x71\xc5\x03\x9e\x68\x9a\xa0\x37\x72\x21\xef\xad\x8a\xf4\x10\x00\x00\x00\x00\x00\x00\x00\x00\xb4\x8c\x82\x42\x16\x77\xe5\x90\x93\xcb\xad\x1f\x2f\x25\x62\x6c\xf5\x02\xd8\xd5\xa2\x5e\x70\x77\xac\xd7\x78\x2f\xbe\x60\x40\x8f\x2b\x57\x02\x4f\xa0\x4f\xb9\x5f\x3f\x67\x56\x7c\x8c\x15\x95\x26\xdf\xaf\x5d\x30\xf2\xbc\x4b\x06\x6d\x66\x77\x1d\xf1\xd6\x67\x18\x5f\xe5\x7f\x4a\x8d\x4e\x82\x97\x42\x19\xfa\xff\x42\xe3\x1b\xe7\xa1\x36\x46\x2b\x63\x0b\x2b\x4a\x53\x6e\x1b\x06\xf1\x8d\xc9\xf5\x16\x5c\xcd\xd0\xc8\xd3\xaf\x08\x86\x5e\x20\xc7\xad\x33\x4a\x8c\x06\x71\x4d\x9a\x1e\xbe\xa7\xe8\x08\x3f\xf1\x6b\x6e\x54\x4e\x6f\x4b\xe3\x3b\x98\x9a\x2a\x3a\x01\xfa\x52\xc3\xf6\x64\x3c\xeb\xa6\xbf\x4c\xb6\x5e\xf4\x59\x40\xd3\xb9\x02\x01\x63\x0f\xa8\x5a\x9f\x60\x26\xc4\xdc\xa6\xb6\xe6\xf8\xac\xea\xaa\x04\xa4\x23\x1a\x50\xb2\x67\x91\xf9\xee\xed\xbc\x35\x18\xff\x1f\x5a\xab\x0b\xbe\x95\xc6\x72\x12\x2d\x31\xf9\x4a\x52\x60\x1f\x42\x0f\x5d\xcc\xf1\x4c\xa0\xed\xc5\x2b\x49\x68\x71\xa4\x0f\x7b\x76\x16\x50\xe6\xdb\x83\xd7\x2f\xc4\x57\xc7\x12\x02\x30\xc8\xef\xe8\x38\xf6', 2)
| 434
| 1,243
| 0.754992
| 313
| 1,302
| 3.095847
| 0.546326
| 0.123839
| 0.130031
| 0.111455
| 0.043344
| 0.043344
| 0.024768
| 0
| 0
| 0
| 0
| 0.308346
| 0.006144
| 1,302
| 3
| 1,243
| 434
| 0.440495
| 0
| 0
| 0
| 0
| 0.333333
| 0.924021
| 0.924021
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 11
|
c864907a58b06cc8f7ca6e13b0c4b69adb47f06b
| 34,978
|
py
|
Python
|
molecule/python/molecule_api/api/token_api.py
|
sumit4-ttn/SDK
|
b3ae385e5415e47ac70abd0b3fdeeaeee9aa7cff
|
[
"Apache-2.0"
] | null | null | null |
molecule/python/molecule_api/api/token_api.py
|
sumit4-ttn/SDK
|
b3ae385e5415e47ac70abd0b3fdeeaeee9aa7cff
|
[
"Apache-2.0"
] | null | null | null |
molecule/python/molecule_api/api/token_api.py
|
sumit4-ttn/SDK
|
b3ae385e5415e47ac70abd0b3fdeeaeee9aa7cff
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Molecule API Documentation
The Hydrogen Molecule API # noqa: E501
OpenAPI spec version: 1.3.0
Contact: info@hydrogenplatform.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from molecule_api.api_client import ApiClient
class TokenApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def burn_token_using_post(self, token_burn_params, **kwargs): # noqa: E501
"""Burn tokens # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.burn_token_using_post(token_burn_params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TokenBurnParams token_burn_params: Burns provided amount of existing tokens (required)
:return: TransactionSuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.burn_token_using_post_with_http_info(token_burn_params, **kwargs) # noqa: E501
else:
(data) = self.burn_token_using_post_with_http_info(token_burn_params, **kwargs) # noqa: E501
return data
def burn_token_using_post_with_http_info(self, token_burn_params, **kwargs): # noqa: E501
"""Burn tokens # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.burn_token_using_post_with_http_info(token_burn_params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TokenBurnParams token_burn_params: Burns provided amount of existing tokens (required)
:return: TransactionSuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['token_burn_params'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method burn_token_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'token_burn_params' is set
if ('token_burn_params' not in params or
params['token_burn_params'] is None):
raise ValueError("Missing the required parameter `token_burn_params` when calling `burn_token_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'token_burn_params' in params:
body_params = params['token_burn_params']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/token/burn', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TransactionSuccessResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_token_using_post(self, token_params, **kwargs): # noqa: E501
"""Creates a new Token # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_token_using_post(token_params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TokenParams token_params: It enables a user to create a Token (required)
:return: TokenResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_token_using_post_with_http_info(token_params, **kwargs) # noqa: E501
else:
(data) = self.create_token_using_post_with_http_info(token_params, **kwargs) # noqa: E501
return data
def create_token_using_post_with_http_info(self, token_params, **kwargs): # noqa: E501
"""Creates a new Token # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_token_using_post_with_http_info(token_params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TokenParams token_params: It enables a user to create a Token (required)
:return: TokenResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['token_params'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_token_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'token_params' is set
if ('token_params' not in params or
params['token_params'] is None):
raise ValueError("Missing the required parameter `token_params` when calling `create_token_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'token_params' in params:
body_params = params['token_params']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/token', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TokenResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_token_using_delete(self, token_id, **kwargs): # noqa: E501
"""Delete Token # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_token_using_delete(token_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str token_id: Token ID (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_token_using_delete_with_http_info(token_id, **kwargs) # noqa: E501
else:
(data) = self.delete_token_using_delete_with_http_info(token_id, **kwargs) # noqa: E501
return data
def delete_token_using_delete_with_http_info(self, token_id, **kwargs): # noqa: E501
"""Delete Token # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_token_using_delete_with_http_info(token_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str token_id: Token ID (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['token_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_token_using_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'token_id' is set
if ('token_id' not in params or
params['token_id'] is None):
raise ValueError("Missing the required parameter `token_id` when calling `delete_token_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'token_id' in params:
path_params['token_id'] = params['token_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/token/{token_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def deploy_token_using_post(self, token_deploy_params, **kwargs): # noqa: E501
"""Deploys provided Token to network # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.deploy_token_using_post(token_deploy_params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TokenDeployParams token_deploy_params: Deploys provided Token to network (required)
:return: TransactionSuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.deploy_token_using_post_with_http_info(token_deploy_params, **kwargs) # noqa: E501
else:
(data) = self.deploy_token_using_post_with_http_info(token_deploy_params, **kwargs) # noqa: E501
return data
def deploy_token_using_post_with_http_info(self, token_deploy_params, **kwargs): # noqa: E501
"""Deploys provided Token to network # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.deploy_token_using_post_with_http_info(token_deploy_params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TokenDeployParams token_deploy_params: Deploys provided Token to network (required)
:return: TransactionSuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['token_deploy_params'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method deploy_token_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'token_deploy_params' is set
if ('token_deploy_params' not in params or
params['token_deploy_params'] is None):
raise ValueError("Missing the required parameter `token_deploy_params` when calling `deploy_token_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'token_deploy_params' in params:
body_params = params['token_deploy_params']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/token/deploy', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TransactionSuccessResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_token_all_using_get(self, **kwargs): # noqa: E501
"""Fetch Token list # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_token_all_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: To filter response Token list by page number
:param int size: Number of records per page
:param str order_by: Field to sort record list
:param bool ascending: Sorting order
:param bool get_latest: To fetch latest (one) record
:return: PageTokenResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_token_all_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_token_all_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_token_all_using_get_with_http_info(self, **kwargs): # noqa: E501
"""Fetch Token list # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_token_all_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: To filter response Token list by page number
:param int size: Number of records per page
:param str order_by: Field to sort record list
:param bool ascending: Sorting order
:param bool get_latest: To fetch latest (one) record
:return: PageTokenResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'size', 'order_by', 'ascending', 'get_latest'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_token_all_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'order_by' in params:
query_params.append(('order_by', params['order_by'])) # noqa: E501
if 'ascending' in params:
query_params.append(('ascending', params['ascending'])) # noqa: E501
if 'get_latest' in params:
query_params.append(('get_latest', params['get_latest'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/token', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageTokenResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_token_using_get(self, token_id, **kwargs): # noqa: E501
"""Fetch Token details # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_token_using_get(token_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str token_id: Token ID (required)
:return: TokenResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_token_using_get_with_http_info(token_id, **kwargs) # noqa: E501
else:
(data) = self.get_token_using_get_with_http_info(token_id, **kwargs) # noqa: E501
return data
def get_token_using_get_with_http_info(self, token_id, **kwargs): # noqa: E501
"""Fetch Token details # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_token_using_get_with_http_info(token_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str token_id: Token ID (required)
:return: TokenResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['token_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_token_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'token_id' is set
if ('token_id' not in params or
params['token_id'] is None):
raise ValueError("Missing the required parameter `token_id` when calling `get_token_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'token_id' in params:
path_params['token_id'] = params['token_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/token/{token_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TokenResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def mint_token_using_post(self, token_mint_params, **kwargs): # noqa: E501
"""Mint new tokens # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.mint_token_using_post(token_mint_params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TokenMintParams token_mint_params: Mint new tokens (required)
:return: TransactionSuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.mint_token_using_post_with_http_info(token_mint_params, **kwargs) # noqa: E501
else:
(data) = self.mint_token_using_post_with_http_info(token_mint_params, **kwargs) # noqa: E501
return data
def mint_token_using_post_with_http_info(self, token_mint_params, **kwargs): # noqa: E501
"""Mint new tokens # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.mint_token_using_post_with_http_info(token_mint_params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TokenMintParams token_mint_params: Mint new tokens (required)
:return: TransactionSuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['token_mint_params'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method mint_token_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'token_mint_params' is set
if ('token_mint_params' not in params or
params['token_mint_params'] is None):
raise ValueError("Missing the required parameter `token_mint_params` when calling `mint_token_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'token_mint_params' in params:
body_params = params['token_mint_params']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/token/mint', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TransactionSuccessResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_token_using_put(self, token_id, **kwargs): # noqa: E501
"""Update Token details # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_token_using_put(token_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str token_id: Token ID (required)
:param TokenUpdateParams token_update_params: Token details to be updated
:return: TokenResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_token_using_put_with_http_info(token_id, **kwargs) # noqa: E501
else:
(data) = self.update_token_using_put_with_http_info(token_id, **kwargs) # noqa: E501
return data
def update_token_using_put_with_http_info(self, token_id, **kwargs): # noqa: E501
"""Update Token details # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_token_using_put_with_http_info(token_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str token_id: Token ID (required)
:param TokenUpdateParams token_update_params: Token details to be updated
:return: TokenResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['token_id', 'token_update_params'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_token_using_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'token_id' is set
if ('token_id' not in params or
params['token_id'] is None):
raise ValueError("Missing the required parameter `token_id` when calling `update_token_using_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'token_id' in params:
path_params['token_id'] = params['token_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'token_update_params' in params:
body_params = params['token_update_params']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/token/{token_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TokenResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def whitelist_token_using_post(self, token_whitelist_params, **kwargs): # noqa: E501
"""Whitelist token for provided wallet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.whitelist_token_using_post(token_whitelist_params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TokenWhitelistParams token_whitelist_params: Whitelist token for provided wallet (required)
:return: TransactionSuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.whitelist_token_using_post_with_http_info(token_whitelist_params, **kwargs) # noqa: E501
else:
(data) = self.whitelist_token_using_post_with_http_info(token_whitelist_params, **kwargs) # noqa: E501
return data
def whitelist_token_using_post_with_http_info(self, token_whitelist_params, **kwargs): # noqa: E501
"""Whitelist token for provided wallet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.whitelist_token_using_post_with_http_info(token_whitelist_params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TokenWhitelistParams token_whitelist_params: Whitelist token for provided wallet (required)
:return: TransactionSuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['token_whitelist_params'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method whitelist_token_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'token_whitelist_params' is set
if ('token_whitelist_params' not in params or
params['token_whitelist_params'] is None):
raise ValueError("Missing the required parameter `token_whitelist_params` when calling `whitelist_token_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'token_whitelist_params' in params:
body_params = params['token_whitelist_params']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/token/whitelist', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TransactionSuccessResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.38964
| 143
| 0.62019
| 4,051
| 34,978
| 5.055295
| 0.048877
| 0.041799
| 0.027345
| 0.031642
| 0.948142
| 0.931002
| 0.92402
| 0.893794
| 0.88393
| 0.87934
| 0
| 0.013733
| 0.294271
| 34,978
| 887
| 144
| 39.434047
| 0.81588
| 0.318915
| 0
| 0.75
| 1
| 0
| 0.190351
| 0.055962
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039256
| false
| 0
| 0.008264
| 0
| 0.105372
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c06f475e0839c2d750b0723b7f7ecbfd860e2fa7
| 9,761
|
py
|
Python
|
tests/otus/snapshots/snap_test_db.py
|
ColeVoelpel/virtool
|
859c8d2516f07343bde47f3bae0247dedd76e6c4
|
[
"MIT"
] | 1
|
2019-08-23T00:19:00.000Z
|
2019-08-23T00:19:00.000Z
|
tests/otus/snapshots/snap_test_db.py
|
ColeVoelpel/virtool
|
859c8d2516f07343bde47f3bae0247dedd76e6c4
|
[
"MIT"
] | null | null | null |
tests/otus/snapshots/snap_test_db.py
|
ColeVoelpel/virtool
|
859c8d2516f07343bde47f3bae0247dedd76e6c4
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# snapshottest: v1 - https://goo.gl/zC4yUc
from __future__ import unicode_literals
from snapshottest import GenericRepr, Snapshot
snapshots = Snapshot()
snapshots['test_create[uvloop-TMV] 1'] = {
'_id': '9pfsom1b',
'abbreviation': 'TMV',
'isolates': [
],
'last_indexed_version': None,
'lower_name': 'bar',
'name': 'Bar',
'reference': {
'id': 'foo'
},
'schema': [
],
'verified': False,
'version': 0
}
snapshots['test_create[uvloop-TMV] 2'] = {
'_id': '9pfsom1b.0',
'created_at': GenericRepr('datetime.datetime(2015, 10, 6, 20, 0)'),
'description': 'Created Bar (TMV)',
'diff': {
'_id': '9pfsom1b',
'abbreviation': 'TMV',
'isolates': [
],
'last_indexed_version': None,
'lower_name': 'bar',
'name': 'Bar',
'reference': {
'id': 'foo'
},
'schema': [
],
'verified': False,
'version': 0
},
'index': {
'id': 'unbuilt',
'version': 'unbuilt'
},
'method_name': 'create',
'otu': {
'id': '9pfsom1b',
'name': 'Bar',
'version': 0
},
'reference': {
'id': 'foo'
},
'user': {
'id': 'bob'
}
}
snapshots['test_create[uvloop] 1'] = {
'_id': '9pfsom1b',
'abbreviation': '',
'isolates': [
],
'last_indexed_version': None,
'lower_name': 'bar',
'name': 'Bar',
'reference': {
'id': 'foo'
},
'schema': [
],
'verified': False,
'version': 0
}
snapshots['test_create[uvloop] 2'] = {
'_id': '9pfsom1b.0',
'created_at': GenericRepr('datetime.datetime(2015, 10, 6, 20, 0)'),
'description': 'Created Bar',
'diff': {
'_id': '9pfsom1b',
'abbreviation': '',
'isolates': [
],
'last_indexed_version': None,
'lower_name': 'bar',
'name': 'Bar',
'reference': {
'id': 'foo'
},
'schema': [
],
'verified': False,
'version': 0
},
'index': {
'id': 'unbuilt',
'version': 'unbuilt'
},
'method_name': 'create',
'otu': {
'id': '9pfsom1b',
'name': 'Bar',
'version': 0
},
'reference': {
'id': 'foo'
},
'user': {
'id': 'bob'
}
}
snapshots['test_edit[uvloop-None] history'] = {
'_id': '6116cba1.1',
'created_at': GenericRepr('datetime.datetime(2015, 10, 6, 20, 0)'),
'description': 'Changed name to Foo Virus',
'diff': [
[
'change',
'version',
[
0,
1
]
],
[
'change',
'lower_name',
[
'prunus virus f',
'foo virus'
]
],
[
'change',
'name',
[
'Prunus virus F',
'Foo Virus'
]
]
],
'index': {
'id': 'unbuilt',
'version': 'unbuilt'
},
'method_name': 'edit',
'otu': {
'id': '6116cba1',
'name': 'Prunus virus F',
'version': 1
},
'reference': {
'id': 'hxn167'
},
'user': {
'id': 'bob'
}
}
snapshots['test_edit[uvloop-None] otu'] = {
'_id': '6116cba1',
'abbreviation': 'PVF',
'imported': True,
'isolates': [
{
'default': True,
'id': 'cab8b360',
'source_name': '8816-v2',
'source_type': 'isolate'
}
],
'last_indexed_version': 0,
'lower_name': 'foo virus',
'name': 'Foo Virus',
'reference': {
'id': 'hxn167'
},
'schema': [
],
'verified': False,
'version': 1
}
snapshots['test_edit[uvloop-TMV] history'] = {
'_id': '6116cba1.1',
'created_at': GenericRepr('datetime.datetime(2015, 10, 6, 20, 0)'),
'description': 'Changed name to Foo Virus and changed abbreviation to TMV',
'diff': [
[
'change',
'version',
[
0,
1
]
],
[
'change',
'abbreviation',
[
'PVF',
'TMV'
]
],
[
'change',
'lower_name',
[
'prunus virus f',
'foo virus'
]
],
[
'change',
'name',
[
'Prunus virus F',
'Foo Virus'
]
]
],
'index': {
'id': 'unbuilt',
'version': 'unbuilt'
},
'method_name': 'edit',
'otu': {
'id': '6116cba1',
'name': 'Prunus virus F',
'version': 1
},
'reference': {
'id': 'hxn167'
},
'user': {
'id': 'bob'
}
}
snapshots['test_edit[uvloop-TMV] otu'] = {
'_id': '6116cba1',
'abbreviation': 'TMV',
'imported': True,
'isolates': [
{
'default': True,
'id': 'cab8b360',
'source_name': '8816-v2',
'source_type': 'isolate'
}
],
'last_indexed_version': 0,
'lower_name': 'foo virus',
'name': 'Foo Virus',
'reference': {
'id': 'hxn167'
},
'schema': [
],
'verified': False,
'version': 1
}
snapshots['test_edit[uvloop] history'] = {
'_id': '6116cba1.1',
'created_at': GenericRepr('datetime.datetime(2015, 10, 6, 20, 0)'),
'description': 'Changed name to Foo Virus and removed abbreviation PVF',
'diff': [
[
'change',
'version',
[
0,
1
]
],
[
'change',
'abbreviation',
[
'PVF',
''
]
],
[
'change',
'lower_name',
[
'prunus virus f',
'foo virus'
]
],
[
'change',
'name',
[
'Prunus virus F',
'Foo Virus'
]
]
],
'index': {
'id': 'unbuilt',
'version': 'unbuilt'
},
'method_name': 'edit',
'otu': {
'id': '6116cba1',
'name': 'Prunus virus F',
'version': 1
},
'reference': {
'id': 'hxn167'
},
'user': {
'id': 'bob'
}
}
snapshots['test_edit[uvloop] otu'] = {
'_id': '6116cba1',
'abbreviation': '',
'imported': True,
'isolates': [
{
'default': True,
'id': 'cab8b360',
'source_name': '8816-v2',
'source_type': 'isolate'
}
],
'last_indexed_version': 0,
'lower_name': 'foo virus',
'name': 'Foo Virus',
'reference': {
'id': 'hxn167'
},
'schema': [
],
'verified': False,
'version': 1
}
snapshots['test_create[uvloop-] 1'] = {
'_id': '9pfsom1b',
'abbreviation': '',
'isolates': [
],
'last_indexed_version': None,
'lower_name': 'bar',
'name': 'Bar',
'reference': {
'id': 'foo'
},
'schema': [
],
'verified': False,
'version': 0
}
snapshots['test_create[uvloop-] 2'] = {
'_id': '9pfsom1b.0',
'created_at': GenericRepr('datetime.datetime(2015, 10, 6, 20, 0)'),
'description': 'Created Bar',
'diff': {
'_id': '9pfsom1b',
'abbreviation': '',
'isolates': [
],
'last_indexed_version': None,
'lower_name': 'bar',
'name': 'Bar',
'reference': {
'id': 'foo'
},
'schema': [
],
'verified': False,
'version': 0
},
'index': {
'id': 'unbuilt',
'version': 'unbuilt'
},
'method_name': 'create',
'otu': {
'id': '9pfsom1b',
'name': 'Bar',
'version': 0
},
'reference': {
'id': 'foo'
},
'user': {
'id': 'bob'
}
}
snapshots['test_edit[uvloop-] otu'] = {
'_id': '6116cba1',
'abbreviation': '',
'imported': True,
'isolates': [
{
'default': True,
'id': 'cab8b360',
'source_name': '8816-v2',
'source_type': 'isolate'
}
],
'last_indexed_version': 0,
'lower_name': 'foo virus',
'name': 'Foo Virus',
'reference': {
'id': 'hxn167'
},
'schema': [
],
'verified': False,
'version': 1
}
snapshots['test_edit[uvloop-] history'] = {
'_id': '6116cba1.1',
'created_at': GenericRepr('datetime.datetime(2015, 10, 6, 20, 0)'),
'description': 'Changed name to Foo Virus and removed abbreviation PVF',
'diff': [
[
'change',
'version',
[
0,
1
]
],
[
'change',
'abbreviation',
[
'PVF',
''
]
],
[
'change',
'lower_name',
[
'prunus virus f',
'foo virus'
]
],
[
'change',
'name',
[
'Prunus virus F',
'Foo Virus'
]
]
],
'index': {
'id': 'unbuilt',
'version': 'unbuilt'
},
'method_name': 'edit',
'otu': {
'id': '6116cba1',
'name': 'Prunus virus F',
'version': 1
},
'reference': {
'id': 'hxn167'
},
'user': {
'id': 'bob'
}
}
| 20.084362
| 79
| 0.394017
| 757
| 9,761
| 4.959049
| 0.101717
| 0.042621
| 0.047949
| 0.051145
| 0.941396
| 0.93154
| 0.926478
| 0.926478
| 0.913692
| 0.913692
| 0
| 0.046168
| 0.425264
| 9,761
| 485
| 80
| 20.125773
| 0.622995
| 0.006352
| 0
| 0.703863
| 0
| 0
| 0.370153
| 0.030219
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.012876
| 0
| 0.012876
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c0c9b0ac7c362a47a2449fa2dc154c115a309e90
| 88
|
py
|
Python
|
python/testData/optimizeImports/sameNameImportedWithDifferentAliases.after.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/optimizeImports/sameNameImportedWithDifferentAliases.after.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/optimizeImports/sameNameImportedWithDifferentAliases.after.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
from foo import bar
from foo import bar as a
from foo import bar as b
print(bar, a, b)
| 14.666667
| 24
| 0.727273
| 20
| 88
| 3.2
| 0.4
| 0.328125
| 0.609375
| 0.75
| 0.5625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.227273
| 88
| 5
| 25
| 17.6
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0.25
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
c0d4477c9e8cb8c1d850c9e195b6d620bdecd97c
| 17,496
|
py
|
Python
|
vplexapi-7.0.0/vplexapi/api/logical_units_api.py
|
dell/python-vplex
|
02c5df5e7f9ed61a13a2838f21ca6467a25dd392
|
[
"Apache-2.0"
] | 3
|
2020-12-01T11:22:13.000Z
|
2021-02-16T17:38:42.000Z
|
vplexapi-7.0.0/vplexapi/api/logical_units_api.py
|
dell/python-vplex
|
02c5df5e7f9ed61a13a2838f21ca6467a25dd392
|
[
"Apache-2.0"
] | null | null | null |
vplexapi-7.0.0/vplexapi/api/logical_units_api.py
|
dell/python-vplex
|
02c5df5e7f9ed61a13a2838f21ca6467a25dd392
|
[
"Apache-2.0"
] | 3
|
2021-01-01T21:07:55.000Z
|
2021-02-20T07:07:40.000Z
|
# coding: utf-8
"""
VPlex REST API
A definition for the next-gen VPlex API # noqa: E501
OpenAPI spec version: 0.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from vplexapi.api_client import ApiClient
class LogicalUnitsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def forget_logical_units(self, cluster_name, storagearray_name, **kwargs): # noqa: E501
"""Forgets logicalUnits in an array # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.forget_logical_units(cluster_name, storagearray_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str cluster_name: The name of the cluster (required)
:param str storagearray_name: The name of the storage array (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.forget_logical_units_with_http_info(cluster_name, storagearray_name, **kwargs) # noqa: E501
else:
(data) = self.forget_logical_units_with_http_info(cluster_name, storagearray_name, **kwargs) # noqa: E501
return data
def forget_logical_units_with_http_info(self, cluster_name, storagearray_name, **kwargs): # noqa: E501
"""Forgets logicalUnits in an array # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.forget_logical_units_with_http_info(cluster_name, storagearray_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str cluster_name: The name of the cluster (required)
:param str storagearray_name: The name of the storage array (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_name', 'storagearray_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method forget_logical_units" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_name' is set
if ('cluster_name' not in params or
params['cluster_name'] is None):
raise ValueError("Missing the required parameter `cluster_name` when calling `forget_logical_units`") # noqa: E501
# verify the required parameter 'storagearray_name' is set
if ('storagearray_name' not in params or
params['storagearray_name'] is None):
raise ValueError("Missing the required parameter `storagearray_name` when calling `forget_logical_units`") # noqa: E501
collection_formats = {}
path_params = {}
if 'cluster_name' in params:
path_params['cluster_name'] = params['cluster_name'] # noqa: E501
if 'storagearray_name' in params:
path_params['storagearray_name'] = params['storagearray_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501
return self.api_client.call_api(
'/clusters/{cluster_name}/storage_arrays/{storagearray_name}/logical_units', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_logical_unit(self, cluster_name, storagearray_name, name, **kwargs): # noqa: E501
"""Returns a single LogicalUnit by name # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_logical_unit(cluster_name, storagearray_name, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str cluster_name: The name of the cluster (required)
:param str storagearray_name: The name of the storage array (required)
:param str name: The name of a specific instance of the resource (required)
:param str fields: Select which fields are included in the response. 'name' is always included. See FieldSelectionExpression for details.
:return: LogicalUnit
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_logical_unit_with_http_info(cluster_name, storagearray_name, name, **kwargs) # noqa: E501
else:
(data) = self.get_logical_unit_with_http_info(cluster_name, storagearray_name, name, **kwargs) # noqa: E501
return data
def get_logical_unit_with_http_info(self, cluster_name, storagearray_name, name, **kwargs): # noqa: E501
"""Returns a single LogicalUnit by name # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_logical_unit_with_http_info(cluster_name, storagearray_name, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str cluster_name: The name of the cluster (required)
:param str storagearray_name: The name of the storage array (required)
:param str name: The name of a specific instance of the resource (required)
:param str fields: Select which fields are included in the response. 'name' is always included. See FieldSelectionExpression for details.
:return: LogicalUnit
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_name', 'storagearray_name', 'name', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_logical_unit" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_name' is set
if ('cluster_name' not in params or
params['cluster_name'] is None):
raise ValueError("Missing the required parameter `cluster_name` when calling `get_logical_unit`") # noqa: E501
# verify the required parameter 'storagearray_name' is set
if ('storagearray_name' not in params or
params['storagearray_name'] is None):
raise ValueError("Missing the required parameter `storagearray_name` when calling `get_logical_unit`") # noqa: E501
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_logical_unit`") # noqa: E501
collection_formats = {}
path_params = {}
if 'cluster_name' in params:
path_params['cluster_name'] = params['cluster_name'] # noqa: E501
if 'storagearray_name' in params:
path_params['storagearray_name'] = params['storagearray_name'] # noqa: E501
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501
return self.api_client.call_api(
'/clusters/{cluster_name}/storage_arrays/{storagearray_name}/logical_units/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LogicalUnit', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_logical_units(self, cluster_name, storagearray_name, **kwargs): # noqa: E501
"""Returns a list of LogicalUnits # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_logical_units(cluster_name, storagearray_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str cluster_name: The name of the cluster (required)
:param str storagearray_name: The name of the storage array (required)
:param str name: Filter results by name. See LexicalQueryExpression for details.
:param str storage_volume: Filter results by storage_volume. See LexicalQueryExpression for details.
:param int offset: Index of the first element to include in paginated results.<br> <b>'limit' must also be specified.</b>
:param int limit: <p>Maximum number of elements to include in paginated results.<br> <b>'offset' must also be specified.<b>
:param str sort_by: Specify the field priority order and direction for sorting. See SortingOrderExpression for details.
:param str fields: Select which fields are included in the response. 'name' is always included. See FieldSelectionExpression for details.
:return: list[LogicalUnit]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_logical_units_with_http_info(cluster_name, storagearray_name, **kwargs) # noqa: E501
else:
(data) = self.get_logical_units_with_http_info(cluster_name, storagearray_name, **kwargs) # noqa: E501
return data
def get_logical_units_with_http_info(self, cluster_name, storagearray_name, **kwargs): # noqa: E501
"""Returns a list of LogicalUnits # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_logical_units_with_http_info(cluster_name, storagearray_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str cluster_name: The name of the cluster (required)
:param str storagearray_name: The name of the storage array (required)
:param str name: Filter results by name. See LexicalQueryExpression for details.
:param str storage_volume: Filter results by storage_volume. See LexicalQueryExpression for details.
:param int offset: Index of the first element to include in paginated results.<br> <b>'limit' must also be specified.</b>
:param int limit: <p>Maximum number of elements to include in paginated results.<br> <b>'offset' must also be specified.<b>
:param str sort_by: Specify the field priority order and direction for sorting. See SortingOrderExpression for details.
:param str fields: Select which fields are included in the response. 'name' is always included. See FieldSelectionExpression for details.
:return: list[LogicalUnit]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_name', 'storagearray_name', 'name', 'storage_volume', 'offset', 'limit', 'sort_by', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_logical_units" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_name' is set
if ('cluster_name' not in params or
params['cluster_name'] is None):
raise ValueError("Missing the required parameter `cluster_name` when calling `get_logical_units`") # noqa: E501
# verify the required parameter 'storagearray_name' is set
if ('storagearray_name' not in params or
params['storagearray_name'] is None):
raise ValueError("Missing the required parameter `storagearray_name` when calling `get_logical_units`") # noqa: E501
if 'offset' in params and params['offset'] < 0: # noqa: E501
raise ValueError("Invalid value for parameter `offset` when calling `get_logical_units`, must be a value greater than or equal to `0`") # noqa: E501
if 'limit' in params and params['limit'] > 100: # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `get_logical_units`, must be a value less than or equal to `100`") # noqa: E501
if 'limit' in params and params['limit'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `get_logical_units`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'cluster_name' in params:
path_params['cluster_name'] = params['cluster_name'] # noqa: E501
if 'storagearray_name' in params:
path_params['storagearray_name'] = params['storagearray_name'] # noqa: E501
query_params = []
if 'name' in params:
query_params.append(('name', params['name'])) # noqa: E501
if 'storage_volume' in params:
query_params.append(('storage_volume', params['storage_volume'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'sort_by' in params:
query_params.append(('sort_by', params['sort_by'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501
return self.api_client.call_api(
'/clusters/{cluster_name}/storage_arrays/{storagearray_name}/logical_units', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[LogicalUnit]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 47.543478
| 161
| 0.644719
| 2,112
| 17,496
| 5.12642
| 0.098958
| 0.040639
| 0.044611
| 0.052369
| 0.937379
| 0.922324
| 0.911148
| 0.904129
| 0.892583
| 0.877805
| 0
| 0.014292
| 0.268176
| 17,496
| 367
| 162
| 47.673025
| 0.831303
| 0.370142
| 0
| 0.693467
| 1
| 0.015075
| 0.263354
| 0.045784
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035176
| false
| 0
| 0.020101
| 0
| 0.105528
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8d0600870e07d7dab6eaa8624eaf002d9127c2cb
| 182
|
py
|
Python
|
scripts/hostnames.py
|
maplepoet/masterthesis
|
ce2d7fbf4f3c10484b8aedd2c1c74215f9f650b6
|
[
"MIT"
] | null | null | null |
scripts/hostnames.py
|
maplepoet/masterthesis
|
ce2d7fbf4f3c10484b8aedd2c1c74215f9f650b6
|
[
"MIT"
] | null | null | null |
scripts/hostnames.py
|
maplepoet/masterthesis
|
ce2d7fbf4f3c10484b8aedd2c1c74215f9f650b6
|
[
"MIT"
] | null | null | null |
hostip = [
"172.24.5.121",
"172.24.5.183",
"172.24.3.200",
"172.24.3.70",
"172.31.7.2",
]
hostmach = [
"172.24.5.121",
"172.24.5.183",
"172.24.3.200",
"172.24.3.70",
"172.31.7.2",
]
| 12.133333
| 15
| 0.538462
| 42
| 182
| 2.333333
| 0.309524
| 0.408163
| 0.244898
| 0.183673
| 0.857143
| 0.857143
| 0.857143
| 0.857143
| 0.857143
| 0.857143
| 0
| 0.512195
| 0.098901
| 182
| 14
| 16
| 13
| 0.085366
| 0
| 0
| 0.714286
| 0
| 0
| 0.626374
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
23a09c8055a5bf80413837cfb2d8206d2c9e6fec
| 294
|
py
|
Python
|
pygismeteo_base/models/__init__.py
|
monosans/pygismeteo-base
|
b8148e3c97b8ca627e9c9b87cee67b2e034f0039
|
[
"MIT"
] | 2
|
2021-12-10T09:53:54.000Z
|
2021-12-25T11:40:30.000Z
|
pygismeteo_base/models/__init__.py
|
monosans/pygismeteo-base
|
b8148e3c97b8ca627e9c9b87cee67b2e034f0039
|
[
"MIT"
] | null | null | null |
pygismeteo_base/models/__init__.py
|
monosans/pygismeteo-base
|
b8148e3c97b8ca627e9c9b87cee67b2e034f0039
|
[
"MIT"
] | null | null | null |
from pygismeteo_base.models import (
current,
search_by_coordinates,
search_by_ip,
search_by_query,
step3,
step6,
step24,
)
__all__ = (
"current",
"search_by_coordinates",
"search_by_ip",
"search_by_query",
"step3",
"step6",
"step24",
)
| 14.7
| 36
| 0.612245
| 32
| 294
| 5.09375
| 0.46875
| 0.294479
| 0.184049
| 0.319018
| 0.797546
| 0.797546
| 0.797546
| 0.797546
| 0.797546
| 0.797546
| 0
| 0.037383
| 0.272109
| 294
| 19
| 37
| 15.473684
| 0.724299
| 0
| 0
| 0
| 0
| 0
| 0.241497
| 0.071429
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.055556
| 0
| 0.055556
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f1a045113df93937a6605bcf2bab7ef2c7ba2564
| 159,768
|
py
|
Python
|
TEST3D/GUI/0011210_page_bc/log.py
|
usnistgov/OOF3D
|
4fd423a48aea9c5dc207520f02de53ae184be74c
|
[
"X11"
] | 31
|
2015-04-01T15:59:36.000Z
|
2022-03-18T20:21:47.000Z
|
TEST3D/GUI/0011210_page_bc/log.py
|
usnistgov/OOF3D
|
4fd423a48aea9c5dc207520f02de53ae184be74c
|
[
"X11"
] | 3
|
2015-02-06T19:30:24.000Z
|
2017-05-25T14:14:31.000Z
|
TEST3D/GUI/0011210_page_bc/log.py
|
usnistgov/OOF3D
|
4fd423a48aea9c5dc207520f02de53ae184be74c
|
[
"X11"
] | 7
|
2015-01-23T15:19:22.000Z
|
2021-06-09T09:03:59.000Z
|
# -*- python -*-
# This software was produced by NIST, an agency of the U.S. government,
# and by statute is not subject to copyright in the United States.
# Recipients of this software assume all responsibilities associated
# with its operation, modification and maintenance. However, to
# facilitate maintenance we ask that before distributing modified
# versions of this software, you first contact the authors at
# oof_manager@nist.gov.
import tests
#Testing Boundary Condition creation using Floating on Displacement Field
#The equation here is Force_Balance
#The field component is x
findWidget('OOF3D').resize(550, 350)
setComboBox(findWidget('OOF3D:Navigation:PageMenu'), 'Microstructure')
checkpoint page installed Microstructure
findWidget('OOF3D:Microstructure Page:Pane').set_position(225)
findWidget('OOF3D:Microstructure Page:Pane').set_position(156)
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint microstructure page sensitized
checkpoint meshable button set
findMenu(findWidget('OOF3D:MenuBar'), 'File:Load:Data').activate()
checkpoint toplevel widget mapped Dialog-Data
findWidget('Dialog-Data').resize(190, 67)
findWidget('Dialog-Data:filename').set_text('TEST_DATA/two_walls.skeleton')
findWidget('Dialog-Data:gtk-ok').clicked()
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint named analysis chooser set
checkpoint active area status updated
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint Field page sensitized
checkpoint Materials page updated
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint pinnodes page sensitized
checkpoint boundary page updated
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page updated
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page groups sensitized
checkpoint Solver page sensitized
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page updated
checkpoint skeleton selection page groups sensitized
checkpoint named analysis chooser set
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint pinnodes page sensitized
checkpoint boundary page updated
checkpoint skeleton selection page selection sensitized
checkpoint Solver page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page updated
checkpoint pinnodes page sensitized
findWidget('OOF3D Activity Viewer').resize(400, 300)
checkpoint pinnodes page sensitized
checkpoint pinnodes page sensitized
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint boundary page updated
checkpoint OOF.File.Load.Data
setComboBox(findWidget('OOF3D:Navigation:PageMenu'), 'FE Mesh')
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint page installed FE Mesh
findWidget('OOF3D:FE Mesh Page:Pane').set_position(304)
findWidget('OOF3D:FE Mesh Page:New').clicked()
checkpoint toplevel widget mapped Dialog-Create a new mesh
findWidget('Dialog-Create a new mesh').resize(345, 153)
findWidget('Dialog-Create a new mesh:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint OOF.Mesh.New
setComboBox(findWidget('OOF3D:Navigation:PageMenu'), 'Fields & Equations')
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint page installed Fields & Equations
findWidget('OOF3D').resize(675, 350)
findWidget('OOF3D:Fields & Equations Page:HPane').set_position(301)
findWidget('OOF3D:Fields & Equations Page:HPane:Fields:Displacement defined').clicked()
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint OOF.Subproblem.Field.Define
findWidget('OOF3D:Fields & Equations Page:HPane:Fields:Displacement active').clicked()
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint OOF.Subproblem.Field.Activate
findWidget('OOF3D:Fields & Equations Page:HPane:Equations:Force_Balance active').clicked()
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint OOF.Subproblem.Equation.Activate
setComboBox(findWidget('OOF3D:Navigation:PageMenu'), 'Boundary Conditions')
checkpoint page installed Boundary Conditions
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 289)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Chooser'), 'Floating')
findWidget('Dialog-New Boundary Condition:name:Auto').clicked()
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantXmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
findWidget('OOF3D Messages 1').resize(1063, 200)
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
assert tests.boundaryConditionCheck(['floating_fieldx_componentx_constantXmax'])
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'Xmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantXmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
assert tests.boundaryConditionCheck(['floating_fieldx_componentx_constantXmax','floating_fieldx_componentx_constantXmin'])
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumXmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'Ymax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantYmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumYmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'Ymin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantYmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumYmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'Zmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'Zmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxYmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantXmaxYmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumXmaxYmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxYmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantXmaxYmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumXmaxYmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxZmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantXmaxZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumXmaxZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxZmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantXmaxZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumXmaxZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminYmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantXminYmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumXminYmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminYmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantXminYmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumXminYmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminZmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantXminZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumXminZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminZmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantXminZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumXminZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'YmaxZmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantYmaxZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumYmaxZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'YmaxZmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantYmaxZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumYmaxZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'YminZmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantYminZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumYminZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'YminZmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantYminZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumYminZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxYmaxZmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantXmaxYmaxZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumXmaxYmaxZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxYmaxZmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantXmaxYmaxZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumXmaxYmaxZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxYminZmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantXmaxYminZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumXmaxYminZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxYminZmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantXmaxYminZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumXmaxYminZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminYmaxZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantXminYmaxZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumXminYmaxZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminYmaxZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantXminYmaxZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumXminYmaxZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminYminZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantXminYminZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumXminYminZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminYminZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_constantXminYminZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentx_continuumXminYminZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 289)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'Xmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantXmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:eqn_component'), 'y')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'Xmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantXmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumXmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'Ymax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantYmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumYmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'Ymin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantYmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumYmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'Zmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'Zmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxYmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantXmaxYmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumXmaxYmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxYmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantXmaxYmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumXmaxYmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxZmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantXmaxZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumXmaxZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxZmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantXmaxZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumXmaxZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminYmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantXminYmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumXminYmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminYmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantXminYmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumXminYmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminZmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantXminZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumXminZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminZmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantXminZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumXminZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'YmaxZmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantYmaxZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumYmaxZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'YmaxZmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantYmaxZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumYmaxZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'YminZmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantYminZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumYminZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'YminZmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantYminZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumYminZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxYmaxZmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantXmaxYmaxZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumXmaxYmaxZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxYmaxZmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantXmaxYmaxZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumXmaxYmaxZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxYminZmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantXmaxYminZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumXmaxYminZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxYminZmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantXmaxYminZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumXmaxYminZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminYmaxZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantXminYmaxZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumXminYmaxZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminYmaxZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantXminYmaxZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumXminYmaxZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminYminZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantXminYminZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumXminYminZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminYminZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_constantXminYminZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componenty_continuumXminYminZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 289)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'Xmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:eqn_component'), 'z')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantXmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'Xmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantXmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumXmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'Ymax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantYmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumYmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'Ymin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantYmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumYmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'Zmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'Zmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxYmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantXmaxYmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumXmaxYmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxYmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantXmaxYmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumXmaxYmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxZmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantXmaxZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumXmaxZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxZmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantXmaxZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumXmaxZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminYmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantXminYmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumXminYmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminYmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantXminYmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumXminYmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminZmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantXminZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumXminZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminZmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantXminZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumXminZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'YmaxZmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantYmaxZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumYmaxZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'YmaxZmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantYmaxZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumYmaxZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'YminZmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantYminZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumYminZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'YminZmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantYminZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumYminZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxYmaxZmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantXmaxYmaxZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumXmaxYmaxZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxYmaxZmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantXmaxYmaxZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumXmaxYmaxZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxYminZmax')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantXmaxYminZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumXmaxYminZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XmaxYminZmin')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantXmaxYminZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumXmaxYminZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminYmaxZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantXminYmaxZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumXminYmaxZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminYmaxZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantXminYmaxZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumXminYmaxZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminYminZmax')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantXminYminZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumXminYminZmax')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(467, 309)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:boundary'), 'XminYminZmin')
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Constant Profile')
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_constantXminYminZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
findWidget('OOF3D Messages 1').resize(983, 200)
findWidget('OOF3D:Boundary Conditions Page:Condition:New').clicked()
checkpoint toplevel widget mapped Dialog-New Boundary Condition
findWidget('Dialog-New Boundary Condition').resize(412, 265)
setComboBox(findWidget('Dialog-New Boundary Condition:condition:Floating:profile:Chooser'), 'Continuum Profile')
findWidget('Dialog-New Boundary Condition').resize(467, 309)
findWidget('Dialog-New Boundary Condition:name:Text').set_text('floating_fieldx_componentz_continuumXminYminZmin')
findWidget('Dialog-New Boundary Condition:gtk-ok').clicked()
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint named analysis chooser set
checkpoint Solver page sensitized
checkpoint OOF.Mesh.Boundary_Conditions.New
assert tests.boundaryConditionCheck(['floating_fieldx_componentx_constantXmax', 'floating_fieldx_componentx_constantXmaxYmax', 'floating_fieldx_componentx_constantXmaxYmaxZmax', 'floating_fieldx_componentx_constantXmaxYmaxZmin', 'floating_fieldx_componentx_constantXmaxYmin', 'floating_fieldx_componentx_constantXmaxYminZmax', 'floating_fieldx_componentx_constantXmaxYminZmin', 'floating_fieldx_componentx_constantXmaxZmax', 'floating_fieldx_componentx_constantXmaxZmin', 'floating_fieldx_componentx_constantXmin', 'floating_fieldx_componentx_constantXminYmax', 'floating_fieldx_componentx_constantXminYmaxZmax', 'floating_fieldx_componentx_constantXminYmaxZmin', 'floating_fieldx_componentx_constantXminYmin', 'floating_fieldx_componentx_constantXminYminZmax', 'floating_fieldx_componentx_constantXminYminZmin', 'floating_fieldx_componentx_constantXminZmax', 'floating_fieldx_componentx_constantXminZmin', 'floating_fieldx_componentx_constantYmax', 'floating_fieldx_componentx_constantYmaxZmax', 'floating_fieldx_componentx_constantYmaxZmin', 'floating_fieldx_componentx_constantYmin', 'floating_fieldx_componentx_constantYminZmax', 'floating_fieldx_componentx_constantYminZmin', 'floating_fieldx_componentx_constantZmax', 'floating_fieldx_componentx_constantZmin', 'floating_fieldx_componentx_continuumXmaxYmax', 'floating_fieldx_componentx_continuumXmaxYmaxZmax', 'floating_fieldx_componentx_continuumXmaxYmaxZmin', 'floating_fieldx_componentx_continuumXmaxYmin', 'floating_fieldx_componentx_continuumXmaxYminZmax', 'floating_fieldx_componentx_continuumXmaxYminZmin', 'floating_fieldx_componentx_continuumXmaxZmax', 'floating_fieldx_componentx_continuumXmaxZmin', 'floating_fieldx_componentx_continuumXmin', 'floating_fieldx_componentx_continuumXminYmax', 'floating_fieldx_componentx_continuumXminYmaxZmax', 'floating_fieldx_componentx_continuumXminYmaxZmin', 'floating_fieldx_componentx_continuumXminYmin', 'floating_fieldx_componentx_continuumXminYminZmax', 'floating_fieldx_componentx_continuumXminYminZmin', 'floating_fieldx_componentx_continuumXminZmax', 'floating_fieldx_componentx_continuumXminZmin', 'floating_fieldx_componentx_continuumYmax', 'floating_fieldx_componentx_continuumYmaxZmax', 'floating_fieldx_componentx_continuumYmaxZmin', 'floating_fieldx_componentx_continuumYmin', 'floating_fieldx_componentx_continuumYminZmax', 'floating_fieldx_componentx_continuumYminZmin', 'floating_fieldx_componentx_continuumZmax', 'floating_fieldx_componentx_continuumZmin', 'floating_fieldx_componenty_constantXmax', 'floating_fieldx_componenty_constantXmaxYmax', 'floating_fieldx_componenty_constantXmaxYmaxZmax', 'floating_fieldx_componenty_constantXmaxYmaxZmin', 'floating_fieldx_componenty_constantXmaxYmin', 'floating_fieldx_componenty_constantXmaxYminZmax', 'floating_fieldx_componenty_constantXmaxYminZmin', 'floating_fieldx_componenty_constantXmaxZmax', 'floating_fieldx_componenty_constantXmaxZmin', 'floating_fieldx_componenty_constantXmin', 'floating_fieldx_componenty_constantXminYmax', 'floating_fieldx_componenty_constantXminYmaxZmax', 'floating_fieldx_componenty_constantXminYmaxZmin', 'floating_fieldx_componenty_constantXminYmin', 'floating_fieldx_componenty_constantXminYminZmax', 'floating_fieldx_componenty_constantXminYminZmin', 'floating_fieldx_componenty_constantXminZmax', 'floating_fieldx_componenty_constantXminZmin', 'floating_fieldx_componenty_constantYmax', 'floating_fieldx_componenty_constantYmaxZmax', 'floating_fieldx_componenty_constantYmaxZmin', 'floating_fieldx_componenty_constantYmin', 'floating_fieldx_componenty_constantYminZmax', 'floating_fieldx_componenty_constantYminZmin', 'floating_fieldx_componenty_constantZmax', 'floating_fieldx_componenty_constantZmin', 'floating_fieldx_componenty_continuumXmaxYmax', 'floating_fieldx_componenty_continuumXmaxYmaxZmax', 'floating_fieldx_componenty_continuumXmaxYmaxZmin', 'floating_fieldx_componenty_continuumXmaxYmin', 'floating_fieldx_componenty_continuumXmaxYminZmax', 'floating_fieldx_componenty_continuumXmaxYminZmin', 'floating_fieldx_componenty_continuumXmaxZmax', 'floating_fieldx_componenty_continuumXmaxZmin', 'floating_fieldx_componenty_continuumXmin', 'floating_fieldx_componenty_continuumXminYmax', 'floating_fieldx_componenty_continuumXminYmaxZmax', 'floating_fieldx_componenty_continuumXminYmaxZmin', 'floating_fieldx_componenty_continuumXminYmin', 'floating_fieldx_componenty_continuumXminYminZmax', 'floating_fieldx_componenty_continuumXminYminZmin', 'floating_fieldx_componenty_continuumXminZmax', 'floating_fieldx_componenty_continuumXminZmin', 'floating_fieldx_componenty_continuumYmax', 'floating_fieldx_componenty_continuumYmaxZmax', 'floating_fieldx_componenty_continuumYmaxZmin', 'floating_fieldx_componenty_continuumYmin', 'floating_fieldx_componenty_continuumYminZmax', 'floating_fieldx_componenty_continuumYminZmin', 'floating_fieldx_componenty_continuumZmax', 'floating_fieldx_componenty_continuumZmin', 'floating_fieldx_componentz_constantXmax', 'floating_fieldx_componentz_constantXmaxYmax', 'floating_fieldx_componentz_constantXmaxYmaxZmax', 'floating_fieldx_componentz_constantXmaxYmaxZmin', 'floating_fieldx_componentz_constantXmaxYmin', 'floating_fieldx_componentz_constantXmaxYminZmax', 'floating_fieldx_componentz_constantXmaxYminZmin', 'floating_fieldx_componentz_constantXmaxZmax', 'floating_fieldx_componentz_constantXmaxZmin', 'floating_fieldx_componentz_constantXmin', 'floating_fieldx_componentz_constantXminYmax', 'floating_fieldx_componentz_constantXminYmaxZmax', 'floating_fieldx_componentz_constantXminYmaxZmin', 'floating_fieldx_componentz_constantXminYmin', 'floating_fieldx_componentz_constantXminYminZmax', 'floating_fieldx_componentz_constantXminYminZmin', 'floating_fieldx_componentz_constantXminZmax', 'floating_fieldx_componentz_constantXminZmin', 'floating_fieldx_componentz_constantYmax', 'floating_fieldx_componentz_constantYmaxZmax', 'floating_fieldx_componentz_constantYmaxZmin', 'floating_fieldx_componentz_constantYmin', 'floating_fieldx_componentz_constantYminZmax', 'floating_fieldx_componentz_constantYminZmin', 'floating_fieldx_componentz_constantZmax', 'floating_fieldx_componentz_constantZmin', 'floating_fieldx_componentz_continuumXmaxYmax', 'floating_fieldx_componentz_continuumXmaxYmaxZmax', 'floating_fieldx_componentz_continuumXmaxYmaxZmin', 'floating_fieldx_componentz_continuumXmaxYmin', 'floating_fieldx_componentz_continuumXmaxYminZmax', 'floating_fieldx_componentz_continuumXmaxYminZmin', 'floating_fieldx_componentz_continuumXmaxZmax', 'floating_fieldx_componentz_continuumXmaxZmin', 'floating_fieldx_componentz_continuumXmin', 'floating_fieldx_componentz_continuumXminYmax', 'floating_fieldx_componentz_continuumXminYmaxZmax', 'floating_fieldx_componentz_continuumXminYmaxZmin', 'floating_fieldx_componentz_continuumXminYmin', 'floating_fieldx_componentz_continuumXminYminZmax', 'floating_fieldx_componentz_continuumXminYminZmin', 'floating_fieldx_componentz_continuumXminZmax', 'floating_fieldx_componentz_continuumXminZmin', 'floating_fieldx_componentz_continuumYmax', 'floating_fieldx_componentz_continuumYmaxZmax', 'floating_fieldx_componentz_continuumYmaxZmin', 'floating_fieldx_componentz_continuumYmin', 'floating_fieldx_componentz_continuumYminZmax', 'floating_fieldx_componentz_continuumYminZmin', 'floating_fieldx_componentz_continuumZmax', 'floating_fieldx_componentz_continuumZmin'])
findMenu(findWidget('OOF3D:MenuBar'), 'File:Save:Python_Log').activate()
checkpoint toplevel widget mapped Dialog-Python_Log
findWidget('Dialog-Python_Log').resize(190, 92)
findWidget('Dialog-Python_Log:filename').set_text('bcpage.log')
findWidget('Dialog-Python_Log:gtk-ok').clicked()
checkpoint OOF.File.Save.Python_Log
assert tests.filediff('bcpage.log')
widget_2=findWidget('OOF3D')
handled_2=widget_2.event(event(gtk.gdk.DELETE,window=widget_2.window))
postpone if not handled_2: widget_2.destroy()
checkpoint OOF.Graphics_1.File.Close
| 55.745987
| 7,363
| 0.846127
| 19,185
| 159,768
| 6.980714
| 0.013604
| 0.152921
| 0.177592
| 0.178802
| 0.943341
| 0.939981
| 0.937368
| 0.936151
| 0.9353
| 0.933224
| 0
| 0.01916
| 0.07387
| 159,768
| 2,865
| 7,364
| 55.765445
| 0.885947
| 0.003468
| 0
| 0.927018
| 0
| 0
| 0.370793
| 0.146839
| 0
| 0
| 0
| 0
| 0.001404
| 0
| null | null | 0
| 0.000351
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
f1b494d677a6d8e3cc10d47ecb7279b43f797fb7
| 37,307
|
py
|
Python
|
abeja/datasets/api/client.py
|
abeja-inc/abeja-platform-sdk
|
97cfc99b11ffc1fccb3f527435277bc89e18b8c3
|
[
"Apache-2.0"
] | 2
|
2020-10-20T18:38:16.000Z
|
2020-10-20T20:12:35.000Z
|
abeja/datasets/api/client.py
|
abeja-inc/abeja-platform-sdk
|
97cfc99b11ffc1fccb3f527435277bc89e18b8c3
|
[
"Apache-2.0"
] | 30
|
2020-04-07T01:15:47.000Z
|
2020-11-18T03:25:19.000Z
|
abeja/datasets/api/client.py
|
abeja-inc/abeja-platform-sdk
|
97cfc99b11ffc1fccb3f527435277bc89e18b8c3
|
[
"Apache-2.0"
] | null | null | null |
from typing import List, Optional
from abeja.common.api_client import BaseAPIClient
class APIClient(BaseAPIClient):
"""A Low-Level client for Dataset API
.. code-block:: python
from abeja.datasets import APIClient
api_client = APIClient()
"""
def create_dataset(
self,
organization_id: str,
name: str,
type: str,
props: dict) -> dict:
"""create a dataset
API reference: POST /organizations/<organization_id>/datasets/
Request Syntax:
.. code-block:: python
organization_id = "1102940376065"
dataset_name = "test-dataset"
dataset_type = "classification"
props = {
"categories": [
{
"labels": [
{
"label_id": 1,
"label": "dog"
},
{
"label_id": 2,
"label": "cat"
},
{
"label_id": 3,
"label": "others"
}
],
"category_id": 1,
"name": "cats_dogs"
}
]
}
response = api_client.create_dataset(organization_id, dataset_name, dataset_type, props)
Params:
- **organization_id** (str): ORGANIZATION_ID
- **name** (str): dataset name
- **type** (str): dataset types eg: classification, detection
- **props** (dict): properties of dataset
- **categories** (list): list of categories which are used as validation rules for dataset item
- **category_id** (int): identifier of category
- **name** (str): name of category
- **labels** (list):
- **label_id** (int): identifier of label
- **label** (str): name of label **[optional]**
Return type:
dict
Returns:
Response Syntax:
.. code-block:: json
{
"created_at": "2018-04-10T07:49:30.514794",
"dataset_id": "1410805969256",
"name": "test-dataset",
"organization_id": "1102940376065",
"props": {
"categories": [
{
"labels": [
{
"label_id": 1,
"label": "dog"
},
{
"label_id": 2,
"label": "cat"
},
{
"label_id": 3,
"label": "others"
}
],
"category_id": 1,
"name": "cats_dogs"
}
]
},
"type": "classification",
"updated_at": "2018-04-10T07:49:30.514794"
}
Raises:
- BadRequest: the resource already exists or parameters is insufficient or invalid.
- Unauthorized: Authentication failed
- InternalServerError
"""
params = {
'name': name,
'type': type,
'props': props
}
path = '/organizations/{}/datasets'.format(organization_id)
return self._connection.api_request(
method='POST', path=path, json=params)
def get_dataset(self, organization_id: str, dataset_id: str) -> dict:
"""get a dataset
API reference: GET /organizations/<organization_id>/datasets/<dataset_id>
Request Syntax:
.. code-block:: python
response = api_client.get_dataset(organization_id='1102940376065', dataset_id='1410805969256')
Params:
- **organization_id** (str): ORGANIZATION_ID
- **dataset_id** (str): dataset_id of the requested dataset
Return type:
dict
Returns:
Response Syntax:
.. code-block:: json
{
"created_at": "2018-04-10T07:49:30.514794",
"dataset_id": "1410805969256",
"total_count": 3670,
"name": "test-dataset",
"organization_id": "1102940376065",
"props": {
"categories": [
{
"labels": [
{
"label_id": 1,
"label": "dog"
},
{
"label_id": 2,
"label": "cat"
},
{
"label_id": 3,
"label": "others"
}
],
"category_id": 1,
"name": "cats_dogs"
}
]
},
"type": "classification",
"updated_at": "2018-04-10T07:49:30.514794"
}
Raises:
- NotFound: dataset not found
- Unauthorized: Authentication failed
- InternalServerError
"""
path = '/organizations/{}/datasets/{}'.format(
organization_id, dataset_id)
return self._connection.api_request(method='GET', path=path)
def list_datasets(
self, organization_id: str, max_results: Optional[int]=None,
next_token: Optional[str]=None) -> List[dict]:
"""Get datasets list
API reference: GET /organizations/<organization_id>/datasets/
Request syntax:
.. code-block:: python
response = api_client.get_dataset(organization_id='1102940376065')
Params:
- **organization_id** (str): ORGANIZATION_ID
- **max_results** (int): maximum number of datasets in case of passing **[optional]**
- **next_token** (str): To get the next list of datasets **[optional]**
Return type:
list
Returns:
Return syntax:
.. code-block:: python
[
{
"created_at": "2018-03-03T09:04:58.274324",
"dataset_id": "1377232365920",
"name": "cats-dogs",
"organization_id": "1102940376065",
"props": {
"categories": [
{
"labels": [
{
"label_id": 1,
"label": "dog"
},
{
"label_id": 2,
"label": "cat"
},
{
"label_id": 3,
"label": "others"
}
],
"category_id": 1,
"name": "cats_dogs"
}
]
},
"type": "classification",
"updated_at": "2018-03-03T09:04:58.274324"
},
{
.....
.....
},
.....
]
Raises:
- Unauthorized: Authentication failed
- InternalServerError
"""
params = {}
if max_results:
params['max_results'] = max_results
if next_token:
params['next_token'] = next_token
path = '/organizations/{}/datasets'.format(organization_id)
return self._connection.api_request(
method='GET', path=path, params=params)
def delete_dataset(self, organization_id: str, dataset_id: str) -> dict:
"""delete a dataset
API reference: DELETE /organizations/<organization_id>/datasets/<dataset_id>
Request syntax:
.. code-block:: python
response = api_client.delete_dataset(organization_id='1102940376065', dataset_id='1410805969256')
Params:
- **organization_id** (str): ORGANIZATION_ID
- **dataset_id** (str): dataset id
Return type:
dict
Responses:
Response syntax:
.. code-block:: python
{
"created_at": "2018-04-10T07:49:30.514794",
"dataset_id": "1410805969256",
"name": "test-dataset",
"organization_id": "1102940376065",
"props": {
"categories": [
{
"labels": [
{
"label_id": 1,
"label": "dog"
},
{
"label_id": 2,
"label": "cat"
},
{
"label_id": 3,
"label": "others"
}
],
"category_id": 1,
"name": "cats_dogs"
}
]
},
"type": "classification",
"updated_at": "2018-04-10T07:49:30.514794"
}
Raises:
- NotFound: dataset not found
- Unauthorized: Authentication failed
- InternalServerError
"""
path = '/organizations/{}/datasets/{}'.format(
organization_id, dataset_id)
return self._connection.api_request(method='DELETE', path=path)
# dataset_item
def create_dataset_item(
self, organization_id: str, dataset_id: str,
source_data: dict, attributes: Optional[dict]=None) -> dict:
"""create a item in a dataset
API reference: POST /organizations/<organization_id>/datasets/<dataset_id>/items/
Request syntax:
.. code-block:: python
source_data = [
{
"data_type": "image/jpeg",
"data_uri": "datalake://1200123803688/20170815T044617-f20dde80-1e3b-4496-bc06-1b63b026b872",
"height": 500,
"width": 200
}
]
attributes = {
"classification": [
{
"category_id": 1,
"label_id": 1,
}
],
"custom": [
{
"anything": "something"
}
],
"detection": [
{
"category_id": 1,
"label_id": 2,
"rect": {
"xmin": 22,
"ymin": 145,
"xmax": 140,
"ymax": 220
}
},
]
}
response = api_client.create_dataset_item(
organization_id="1102940376065",
dataset_id="1410805969256",
source_data=source_data,
attributes=attributes
)
Params:
- **organization_id** (str): ORGANIZATION_ID
- **dataset_id** (str): dataset id
- **source_data** (list): list of source data
- **data_type** (str): MIME type of file
- **data_uri** (str): reference identifier of the source file. ex) datalake://1200000000000/20170815T044617-f20dde80-1e3b-4496-bc06-1b63b026b872
- **height** (int): height of image, if the source file is an image **[optional]**
- **width** (int): width of image, if the source file is an image **[optional]**
- **attributes** (dict): attribute of dataset item **[optional]**
- **classification** (list): list of label, if dataset type is `classification`
- **category_id** (int): identifier of category
- **label_id** (int): identifier of label, registered in dataset.props
- **label** (str): name of label, registered in dataset.props **[optional]**
- **detection** (list): list of label, if dataset type is `detection`
- **category_id** (int): identifier of category
- **label_id** (int): identifier of label, registered in dataset.props
- **label** (str): name of label, registered in dataset.props **[optional]**
- **rect** (dict): coordinates of bounding box
- **xmin** (int):
- **ymin** (int):
- **xmax** (int):
- **ymax** (int):
- **segmentation** (list): list of label, if dataset type is `segmentation`
- **custom** (any): any primitive type of objects, if dataset type is `custom`
Return type:
list
Returns:
Return syntax:
.. code-block:: python
{
"attributes": {
"classification": [
{
"category_id": 1,
"label_id": 1
}
],
"custom": [
{
"anything": "something"
}
],
"detection": [
{
"category_id": 1,
"label_id": 2,
"rect": {
"xmin": 22,
"ymin": 145,
"xmax": 140,
"ymax": 220
}
},
]
},
"created_at": "2017-12-27T06:25:00.394026",
"dataset_id": "1410805969256",
"dataset_item_id": 0,
"source_data": [
{
"data_type": "image/jpeg",
"data_uri": "datalake://1200123803688/20170815T044617-f20dde80-1e3b-4496-bc06-1b63b026b872",
"height": 500,
"width": 200
}
],
"organization_id": "1102940376065",
"updated_at": "2017-12-27T06:25:00.394026"
}
Raises:
- BadRequest: specified dataset id does not exist or dataset item id already exist,
parameters is insufficient or invalid,
input data exceeded the max limit
- Unauthorized: Authentication failed
- InternalServerError
"""
params = {
'source_data': source_data,
'attributes': attributes
}
path = '/organizations/{}/datasets/{}/items'.format(organization_id,
dataset_id)
return self._connection.api_request(method='POST',
path=path,
json=params)
def get_dataset_item(
self,
organization_id: str,
dataset_id: str,
dataset_item_id: str) -> dict:
"""get a item in a dataset
API reference: GET /organizations/<organization_id>/datasets/<dataset_id>/items/<dataset_item_id>
Request syntax:
.. code-block:: python
response = api_client.get_dataset_item(organization_id='1102940376065',
dataset_id='1410805969256',
dataset_item_id=0
)
Params:
- **organization_id** (str): ORGANIZATION_ID
- **dataset_id** (str): dataset id
- **dataset_item_id** (int): dataset item id
Return type:
dict
Returns:
Return syntax:
.. code-block:: python
{
"attributes": {
"classification": [
{
"category_id": 1,
"label_id": 1
}
],
"custom": [
{
"anything": "something"
}
],
"detection": [
{
"category_id": 1,
"label_id": 2,
"rect": {
"xmin": 22,
"ymin": 145,
"xmax": 140,
"ymax": 220
}
},
]
},
"created_at": "2017-12-27T06:25:00.394026",
"dataset_id": "1410805969256",
"dataset_item_id": 0,
"source_data": [
{
"data_type": "image/jpeg",
"data_uri": "datalake://1200123803688/20170815T044617-f20dde80-1e3b-4496-bc06-1b63b026b872",
"height": 500,
"width": 200
}
],
"organization_id": "1102940376065",
"updated_at": "2017-12-27T06:25:00.394026"
}
Raises:
- NotFound: dataset not found
- Unauthorized: Authentication failed
- InternalServerError
"""
path = '/organizations/{}/datasets/{}/items/{}'.format(organization_id,
dataset_id,
dataset_item_id)
return self._connection.api_request(method='GET', path=path)
def list_dataset_items(
self, organization_id: str, dataset_id: str,
params: Optional[dict]=None) -> List[dict]:
"""Get item list in a dataset
API reference: GET /organizations/<organization_id>/datasets/<dataset_id>/items/
Params:
- **organization_id** (str): ORGANIZATION_ID
- **dataset_id** (str): dataset id
- **params** (dict): **[optional]**
- **next_page_token** (str): token to get the next page
- **q** (str): search query, ex) `label_id:1 AND label:dog OR tag:A`
Return type:
dict
Returns:
dataset item list
Return syntax:
.. code-block:: python
{
"items": [
{
"attributes": {
"classification": [
{
"category_id": 1,
"label_id": 1
}
],
"custom": [
{
"anything": "something"
}
],
"detection": [
{
"category_id": 1,
"label_id": 2,
"rect": {
"xmin": 22,
"ymin": 145,
"xmax": 140,
"ymax": 220
}
},
]
},
"created_at": "2017-12-27T06:25:00.394026",
"dataset_id": "1410805969256",
"dataset_item_id": 0,
"source_data": [
{
"data_type": "image/jpeg",
"data_uri": "datalake://1200123803688/20170815T044617-f20dde80-1e3b-4496-bc06-1b63b026b872",
"height": 500,
"width": 200
}
],
"organization_id": "1102940376065",
"updated_at": "2017-12-27T06:25:00.394026"
},
...
],
"total_count": 1000,
"next_page_token": "xxx"
}
- **items** (list): list of dataset item dict
- **total_count** (int): total number of filtered dataset items
- **next_page_token** (str): token to get the next page
Raises:
- NotFound: dataset not found
- Unauthorized: Authentication failed
- InternalServerError
"""
path = '/organizations/{}/datasets/{}/items'.format(organization_id,
dataset_id)
if not params:
params = {}
return self._connection.api_request(
method='GET', path=path, params=params)
def update_dataset_item(
self, organization_id: str, dataset_id: str, dataset_item_id: str,
attributes: Optional[dict] = None) -> dict:
"""update a item in a dataset
API reference: PUT /organizations/<organization_id>/datasets/<dataset_id>/items/<dataset_item_id>
Request syntax:
.. code-block:: python
attributes = {
"classification": [
{
"category_id": 1,
"label_id": 1,
}
],
"custom": [
{
"anything": "something"
}
],
"detection": [
{
"category_id": 1,
"label_id": 2,
"rect": {
"xmin": 22,
"ymin": 145,
"xmax": 140,
"ymax": 220
}
},
]
}
api_client.update_dataset_item(
organization_id='1102940376065',
dataset_id='1410805969256',
dataset_item_id=0,
attributes=attributes)
Params:
- **organization_id** (str): ORGANIZATION_ID
- **dataset_id** (str): dataset id
- **dataset_item_id** (str): dataset item id
- **attributes** (dict): attribute of dataset item **[optional]**
- **classification** (list): list of label, if dataset type is `classification`
- **category_id** (int): identifier of category
- **label_id** (int): identifier of label, registered in dataset.props
- **label** (str): name of label, registered in dataset.props **[optional]**
- **detection** (list): list of label, if dataset type is `detection`
- **category_id** (int): identifier of category
- **label_id** (int): identifier of label, registered in dataset.props
- **label** (str): name of label, registered in dataset.props **[optional]**
- **rect** (dict): coordinates of bounding box
- **xmin** (int):
- **ymin** (int):
- **xmax** (int):
- **ymax** (int):
- **segmentation** (list): list of label, if dataset type is `segmentation`
- **custom** (any): any primitive type of objects, if dataset type is `custom`
Return type:
list
Returns:
Return syntax:
.. code-block:: python
{
"attributes": {
"classification": [
{
"category_id": 1,
"label_id": 1
}
],
"custom": [
{
"anything": "something"
}
],
"detection": [
{
"category_id": 1,
"label_id": 2,
"rect": {
"xmin": 22,
"ymin": 145,
"xmax": 140,
"ymax": 220
}
},
]
},
"created_at": "2017-12-27T06:25:00.394026",
"dataset_id": "1410805969256",
"dataset_item_id": 0,
"source_data": [
{
"data_type": "image/jpeg",
"data_uri": "datalake://1200123803688/20170815T044617-f20dde80-1e3b-4496-bc06-1b63b026b872",
"height": 500,
"width": 200
}
],
"organization_id": "1102940376065",
"updated_at": "2017-12-27T06:25:00.394026"
}
Raises:
- BadRequest: specified dataset id does not exist
- NotFound: dataset not found
- Unauthorized: Authentication failed
- InternalServerError
"""
params = {
'attributes': attributes
}
path = '/organizations/{}/datasets/{}/items/{}'.format(organization_id,
dataset_id,
dataset_item_id)
return self._connection.api_request(
method='PUT', path=path, json=params)
def bulk_update_dataset_item(
self, organization_id: str, dataset_id: str,
bulk_attributes: Optional[dict] = None) -> dict:
"""update a item in a dataset
API reference: PUT /organizations/<organization_id>/datasets/<dataset_id>/items
Request syntax:
.. code-block:: python
bulk_attributes = [
{
"dataset_item_id": 1111111111111,
"attributes": {
"classification": [
{
"category_id": 1,
"label_id": 1
}
],
"custom_format": {
"anything": "something"
},
"detection": [
{
"category_id": 1,
"label_id": 2,
"rect": {
"xmin": 22,
"ymin": 145,
"xmax": 140,
"ymax": 220
}
}
]
}
}
]
api_client.bulk_update_dataset_item(organization_id='1102940376065',
dataset_id='1410805969256',
bulk_attributes=bulk_attributes
)
Params:
- **organization_id** (str): ORGANIZATION_ID
- **dataset_id** (str): dataset id
- **bulk_attributes** (dict): bulk_attribute of dataset item **[optional]**
- **dataset_item_id** (list): list of dataset_item_id
- **classification** (list): list of label, if dataset type is `classification`
- **category_id** (int): identifier of category
- **label_id** (int): identifier of label, registered in dataset.props
- **label** (str): name of label, registered in dataset.props **[optional]**
- **detection** (list): list of label, if dataset type is `detection`
- **category_id** (int): identifier of category
- **label_id** (int): identifier of label, registered in dataset.props
- **label** (str): name of label, registered in dataset.props **[optional]**
- **rect** (dict): coordinates of bounding box
- **xmin** (int):
- **ymin** (int):
- **xmax** (int):
- **ymax** (int):
- **segmentation** (list): list of label, if dataset type is `segmentation`
- **custom** (any): any primitive type of objects, if dataset type is `custom`
Return type:
list
Returns:
Return syntax:
.. code-block:: python
[
{
"organization_id": "1200000000000",
"dataset_id": "1440000000000",
"dataset_item_id": 101554,
"source_data": [
{
"data_type": "image/jpeg",
"data_uri": "datalake://1230000000000/20180520T133855-10051aa4-d7aa-43a1-8d5e-4d59dae5bb83"
}
],
"attributes": {
"classification": {
"category_id": 1,
"label_id": 1
},
"detection": [
{
"category_id": 1,
"label_id": 2,
"rect": {
"xmin": 22,
"ymin": 145,
"xmax": 140,
"ymax": 220
}
}
]
},
"created_at": "2018-05-20T13:51:16.010344",
"updated_at": "2018-05-20T13:51:16.010344"
}
]
Raises:
- BadRequest: specified dataset id does not exist
- Unauthorized: Authentication failed
- InternalServerError
"""
params = bulk_attributes
path = '/organizations/{}/datasets/{}/items'.format(organization_id,
dataset_id)
return self._connection.api_request(
method='PUT', path=path, json=params)
def delete_dataset_item(
self,
organization_id: str,
dataset_id: str,
dataset_item_id: str) -> dict:
"""delete a item in a dataset
API reference: DELETE /organizations/<organization_id>/datasets/<dataset_id>/items/<dataset_item_id>
Request syntax:
.. code-block:: python
api_client.delete_dataset_item(organization_id='1102940376065',
dataset_id='1410805969256',
dataset_item_id=0
)
Params:
- **organization_id** (str): ORGANIZATION_ID
- **dataset_id** (str): dataset id
- **dataset_item_id** (str): dataset item id
Return type:
dict
Returns:
deleted dataset item
Raises:
- NotFound: dataset not found
- Unauthorized: Authentication failed
- InternalServerError
"""
path = '/organizations/{}/datasets/{}/items/{}'.format(organization_id,
dataset_id,
dataset_item_id)
return self._connection.api_request(method='DELETE', path=path)
| 41.177704
| 160
| 0.345458
| 2,347
| 37,307
| 5.331487
| 0.090328
| 0.0772
| 0.028051
| 0.026852
| 0.858547
| 0.814833
| 0.798689
| 0.776313
| 0.758491
| 0.719971
| 0
| 0.082492
| 0.564586
| 37,307
| 905
| 161
| 41.223204
| 0.687823
| 0.726271
| 0
| 0.574468
| 0
| 0
| 0.09505
| 0.072387
| 0
| 0
| 0
| 0
| 0
| 1
| 0.106383
| false
| 0
| 0.021277
| 0
| 0.244681
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f1e281d1e1698b80849ee72c71b91d81a9ad39c1
| 9,004
|
py
|
Python
|
gr-ieee802-15-4/python/css_constants.py
|
xueyuecanfeng/C-LQI
|
f489c6447428d6affb2159e9d8f895caab2868c7
|
[
"BSD-2-Clause"
] | 2
|
2021-11-30T02:35:48.000Z
|
2021-11-30T02:53:02.000Z
|
gr-ieee802-15-4/python/css_constants.py
|
xueyuecanfeng/C-LQI
|
f489c6447428d6affb2159e9d8f895caab2868c7
|
[
"BSD-2-Clause"
] | null | null | null |
gr-ieee802-15-4/python/css_constants.py
|
xueyuecanfeng/C-LQI
|
f489c6447428d6affb2159e9d8f895caab2868c7
|
[
"BSD-2-Clause"
] | null | null | null |
import numpy as np
max_phy_packetsize_bytes = 127
codewords_1mbps = np.array([[1, 1, 1, 1],
[1, -1, 1, -1],
[1, 1, -1, -1],
[1, -1, -1, 1],
[-1, -1, -1, -1],
[-1, 1, -1, 1],
[-1, -1, 1, 1],
[-1, 1, 1, -1]], dtype=int)
codewords_250kbps = np.array(
[[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1],
[1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1],
[1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1],
[1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1],
[1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1],
[1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1],
[1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1],
[1, 1, 1, 1, 1, 1, 1, 1, -1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1, 1, 1, 1, 1, -1, -1, -1, -1, -1, -1, -1, -1],
[1, -1, 1, -1, 1, -1, 1, -1, -1, 1, -1, 1, -1, 1, -1, 1, 1, -1, 1, -1, 1, -1, 1, -1, -1, 1, -1, 1, -1, 1, -1, 1],
[1, 1, -1, -1, 1, 1, -1, -1, -1, -1, 1, 1, -1, -1, 1, 1, 1, 1, -1, -1, 1, 1, -1, -1, -1, -1, 1, 1, -1, -1, 1, 1],
[1, -1, -1, 1, 1, -1, -1, 1, -1, 1, 1, -1, -1, 1, 1, -1, 1, -1, -1, 1, 1, -1, -1, 1, -1, 1, 1, -1, -1, 1, 1, -1],
[1, 1, 1, 1, -1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1, 1, 1, 1, 1, -1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1],
[1, -1, 1, -1, -1, 1, -1, 1, -1, 1, -1, 1, 1, -1, 1, -1, 1, -1, 1, -1, -1, 1, -1, 1, -1, 1, -1, 1, 1, -1, 1, -1],
[1, 1, -1, -1, -1, -1, 1, 1, -1, -1, 1, 1, 1, 1, -1, -1, 1, 1, -1, -1, -1, -1, 1, 1, -1, -1, 1, 1, 1, 1, -1, -1],
[1, -1, -1, 1, -1, 1, 1, -1, -1, 1, 1, -1, 1, -1, -1, 1, 1, -1, -1, 1, -1, 1, 1, -1, -1, 1, 1, -1, 1, -1, -1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
[1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1],
[1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1],
[1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1],
[1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1],
[1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1],
[1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, -1],
[1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, -1, 1, -1, 1, -1, 1, -1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, 1, -1, 1, -1, 1, -1, 1, -1],
[1, 1, -1, -1, 1, 1, -1, -1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, 1, 1, -1, -1, 1, 1, -1, -1],
[1, -1, -1, 1, 1, -1, -1, 1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, 1, -1, -1, 1, 1, -1, -1, 1],
[1, 1, 1, 1, -1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1, 1, 1, 1, 1, -1, -1, -1, -1],
[1, -1, 1, -1, -1, 1, -1, 1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, 1, -1, 1, -1, -1, 1, -1, 1],
[1, 1, -1, -1, -1, -1, 1, 1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, -1, 1, 1, -1, -1, -1, -1, 1, 1],
[1, -1, -1, 1, -1, 1, 1, -1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, 1, -1, -1, 1, -1, 1, 1, -1],
[-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1],
[-1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1],
[-1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1],
[-1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1],
[-1, -1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1],
[-1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1],
[-1, -1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, -1],
[-1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1],
[-1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1, 1, 1, 1, 1, -1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1, 1, 1, 1, 1],
[-1, 1, -1, 1, -1, 1, -1, 1, 1, -1, 1, -1, 1, -1, 1, -1, -1, 1, -1, 1, -1, 1, -1, 1, 1, -1, 1, -1, 1, -1, 1, -1],
[-1, -1, 1, 1, -1, -1, 1, 1, 1, 1, -1, -1, 1, 1, -1, -1, -1, -1, 1, 1, -1, -1, 1, 1, 1, 1, -1, -1, 1, 1, -1, -1],
[-1, 1, 1, -1, -1, 1, 1, -1, 1, -1, -1, 1, 1, -1, -1, 1, -1, 1, 1, -1, -1, 1, 1, -1, 1, -1, -1, 1, 1, -1, -1, 1],
[-1, -1, -1, -1, 1, 1, 1, 1, 1, 1, 1, 1, -1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1, 1, 1, 1, 1, -1, -1, -1, -1],
[-1, 1, -1, 1, 1, -1, 1, -1, 1, -1, 1, -1, -1, 1, -1, 1, -1, 1, -1, 1, 1, -1, 1, -1, 1, -1, 1, -1, -1, 1, -1, 1],
[-1, -1, 1, 1, 1, 1, -1, -1, 1, 1, -1, -1, -1, -1, 1, 1, -1, -1, 1, 1, 1, 1, -1, -1, 1, 1, -1, -1, -1, -1, 1, 1],
[-1, 1, 1, -1, 1, -1, -1, 1, 1, -1, -1, 1, -1, 1, 1, -1, -1, 1, 1, -1, 1, -1, -1, 1, 1, -1, -1, 1, -1, 1, 1, -1],
[-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[-1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1],
[-1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1],
[-1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1],
[-1, -1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1],
[-1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1],
[-1, -1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, -1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1],
[-1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1],
[-1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, -1, -1, -1, -1, -1, -1, -1, -1],
[-1, 1, -1, 1, -1, 1, -1, 1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, -1, 1, -1, 1, -1, 1, -1, 1],
[-1, -1, 1, 1, -1, -1, 1, 1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, -1, -1, 1, 1, -1, -1, 1, 1],
[-1, 1, 1, -1, -1, 1, 1, -1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, -1, 1, 1, -1, -1, 1, 1, -1],
[-1, -1, -1, -1, 1, 1, 1, 1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1],
[-1, 1, -1, 1, 1, -1, 1, -1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, -1, 1, -1, 1, 1, -1, 1, -1],
[-1, -1, 1, 1, 1, 1, -1, -1, 1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1, -1, -1, 1, 1, 1, 1, -1, -1],
[-1, 1, 1, -1, 1, -1, -1, 1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, -1, 1, 1, -1, 1, -1, -1, 1]],
dtype=int)
intlv_seq = [0, 1, 2, 3, 32 + 20, 32 + 21, 32 + 22, 32 + 23, 8, 9, 10, 11, 32 + 28, 32 + 29, 32 + 30, 32 + 31, 16, 17,
18, 19, 32 + 4, 32 + 5, 32 + 6, 32 + 7, 24, 25, 26, 27, 32 + 12, 32 + 13, 32 + 14, 32 + 15, 32 + 0, 32 + 1,
32 + 2, 32 + 3, 20, 21, 22, 23, 32 + 8, 32 + 9, 32 + 10, 32 + 11, 28, 29, 30, 31, 32 + 16, 32 + 17,
32 + 18, 32 + 19, 4, 5, 6, 7, 32 + 24, 32 + 25, 32 + 26, 32 + 27, 12, 13, 14, 15]
preamble_250kbps = np.ones((80,), dtype=int)
preamble_1mbps = np.ones((32,), dtype=int)
SFD_250kbps = np.array([-1, 1, 1, 1, 1, -1, 1, -1, -1, -1, 1, -1, -1, -1, 1, 1], dtype=int)
SFD_1mbps = np.array([-1, 1, 1, 1, -1, 1, -1, -1, 1, -1, -1, 1, 1, 1, -1, -1], dtype=int)
bb_samp_rate = 32e6
fc = 3.15e6
t_chirp = 6e-6
t_sub = 1.1875e-6
t_tau = [468.75e-9, 312.5e-9, 156.25e-9, 0]
n_chirp = 192
n_sub = 38
n_tau = [15, 10, 5, 0]
mu = 2 * np.pi * 7.3158 * (10 ** 12)
| 85.752381
| 120
| 0.291426
| 2,305
| 9,004
| 1.130586
| 0.033839
| 1.617805
| 2.422103
| 3.223331
| 0.842671
| 0.842671
| 0.842671
| 0.842671
| 0.842671
| 0.842671
| 0
| 0.385194
| 0.320413
| 9,004
| 104
| 121
| 86.576923
| 0.040693
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.010638
| 0
| 0.010638
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
f1e751068a621112ea9462b2c4e23a7081072afd
| 261,500
|
py
|
Python
|
UFO_models/SMEFTsim_top_alphaScheme_UFO/decays.py
|
matthewfeickert/SMEFTsim
|
db7d4a80bdcff424eee27dde71f1eb09ac894039
|
[
"MIT"
] | 4
|
2020-12-29T03:42:43.000Z
|
2021-09-22T09:57:37.000Z
|
UFO_models/SMEFTsim_top_alphaScheme_UFO/decays.py
|
matthewfeickert/SMEFTsim
|
db7d4a80bdcff424eee27dde71f1eb09ac894039
|
[
"MIT"
] | 3
|
2021-05-19T11:06:59.000Z
|
2021-12-11T00:12:02.000Z
|
UFO_models/SMEFTsim_top_alphaScheme_UFO/decays.py
|
matthewfeickert/SMEFTsim
|
db7d4a80bdcff424eee27dde71f1eb09ac894039
|
[
"MIT"
] | 4
|
2021-09-22T09:57:39.000Z
|
2022-03-29T16:09:36.000Z
|
# This file was automatically created by FeynRules 2.3.35
# Mathematica version: 12.1.0 for Linux x86 (64-bit) (March 18, 2020)
# Date: Fri 8 Jan 2021 11:25:14
from object_library import all_decays, Decay
import particles as P
Decay_b = Decay(name = 'Decay_b',
particle = P.b,
partial_widths = {(P.W__minus__,P.t):'((16*ee**2*LambdaSMEFT**4*MB**4 - 32*ee**2*LambdaSMEFT**4*MB**2*MT**2 + 16*ee**2*LambdaSMEFT**4*MT**4 + 16*ee**2*LambdaSMEFT**4*MB**2*MW**2 + 16*ee**2*LambdaSMEFT**4*MT**2*MW**2 - 32*ee**2*LambdaSMEFT**4*MW**4 - 64*ee**2*LambdaSMEFT**4*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))*sth**2 + 64*ee**2*LambdaSMEFT**4*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))*sth**4 - 8*ee**2*LambdaSMEFT**2*(2*cHl322*MB**4 - 4*cHQ3*MB**4 - cll1221*MB**4 - 4*cHl322*MB**2*MT**2 + 8*cHQ3*MB**2*MT**2 + 2*cll1221*MB**2*MT**2 + 2*cHl322*MT**4 - 4*cHQ3*MT**4 - cll1221*MT**4 + 2*cHl322*MB**2*MW**2 - 4*cHQ3*MB**2*MW**2 - cll1221*MB**2*MW**2 - 12*cHtbRe*MB*MT*MW**2 + 2*cHl322*MT**2*MW**2 - 4*cHQ3*MT**2*MW**2 - cll1221*MT**2*MW**2 - 4*cHl322*MW**4 + 8*cHQ3*MW**4 + 2*cll1221*MW**4 + cHDD*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHl311*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)))*vevhat**2 - 32*cHWB*cth*ee**2*LambdaSMEFT**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))*sth*vevhat**2 + 8*(6*cHl322*ee**2*LambdaSMEFT**2*MB**4 - 16*cHQ3*ee**2*LambdaSMEFT**2*MB**4 - 3*cll1221*ee**2*LambdaSMEFT**2*MB**4 - 12*cHl322*ee**2*LambdaSMEFT**2*MB**2*MT**2 + 32*cHQ3*ee**2*LambdaSMEFT**2*MB**2*MT**2 + 6*cll1221*ee**2*LambdaSMEFT**2*MB**2*MT**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MT**4 - 16*cHQ3*ee**2*LambdaSMEFT**2*MT**4 - 3*cll1221*ee**2*LambdaSMEFT**2*MT**4 + 6*cHl322*ee**2*LambdaSMEFT**2*MB**2*MW**2 - 16*cHQ3*ee**2*LambdaSMEFT**2*MB**2*MW**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MB**2*MW**2 + 32*cbWIm**2*MB**4*MW**2 + 32*cbWRe**2*MB**4*MW**2 + 32*ctWIm**2*MB**4*MW**2 + 32*ctWRe**2*MB**4*MW**2 - 48*cHtbRe*ee**2*LambdaSMEFT**2*MB*MT*MW**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MT**2*MW**2 - 16*cHQ3*ee**2*LambdaSMEFT**2*MT**2*MW**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MT**2*MW**2 - 64*cbWIm**2*MB**2*MT**2*MW**2 - 64*cbWRe**2*MB**2*MT**2*MW**2 - 64*ctWIm**2*MB**2*MT**2*MW**2 - 64*ctWRe**2*MB**2*MT**2*MW**2 + 32*cbWIm**2*MT**4*MW**2 + 32*cbWRe**2*MT**4*MW**2 + 32*ctWIm**2*MT**4*MW**2 + 32*ctWRe**2*MT**4*MW**2 - 12*cHl322*ee**2*LambdaSMEFT**2*MW**4 + 32*cHQ3*ee**2*LambdaSMEFT**2*MW**4 + 6*cll1221*ee**2*LambdaSMEFT**2*MW**4 - 16*cbWIm**2*MB**2*MW**4 - 16*cbWRe**2*MB**2*MW**4 - 16*ctWIm**2*MB**2*MW**4 - 16*ctWRe**2*MB**2*MW**4 + 192*cbWIm*ctWIm*MB*MT*MW**4 - 192*cbWRe*ctWRe*MB*MT*MW**4 - 16*cbWIm**2*MT**2*MW**4 - 16*cbWRe**2*MT**2*MW**4 - 16*ctWIm**2*MT**2*MW**4 - 16*ctWRe**2*MT**2*MW**4 - 16*cbWIm**2*MW**6 - 16*cbWRe**2*MW**6 - 16*ctWIm**2*MW**6 - 16*ctWRe**2*MW**6 + 3*cHDD*ee**2*LambdaSMEFT**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 6*cHl311*ee**2*LambdaSMEFT**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)))*sth**2*vevhat**2 + 64*cHWB*cth*ee**2*LambdaSMEFT**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))*sth**3*vevhat**2 - 16*(2*cHl322*ee**2*LambdaSMEFT**2*MB**4 - 8*cHQ3*ee**2*LambdaSMEFT**2*MB**4 - cll1221*ee**2*LambdaSMEFT**2*MB**4 - 4*cHl322*ee**2*LambdaSMEFT**2*MB**2*MT**2 + 16*cHQ3*ee**2*LambdaSMEFT**2*MB**2*MT**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MB**2*MT**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MT**4 - 8*cHQ3*ee**2*LambdaSMEFT**2*MT**4 - cll1221*ee**2*LambdaSMEFT**2*MT**4 + 2*cHl322*ee**2*LambdaSMEFT**2*MB**2*MW**2 - 8*cHQ3*ee**2*LambdaSMEFT**2*MB**2*MW**2 - cll1221*ee**2*LambdaSMEFT**2*MB**2*MW**2 + 64*cbWIm**2*MB**4*MW**2 + 64*cbWRe**2*MB**4*MW**2 + 64*ctWIm**2*MB**4*MW**2 + 64*ctWRe**2*MB**4*MW**2 - 24*cHtbRe*ee**2*LambdaSMEFT**2*MB*MT*MW**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MT**2*MW**2 - 8*cHQ3*ee**2*LambdaSMEFT**2*MT**2*MW**2 - cll1221*ee**2*LambdaSMEFT**2*MT**2*MW**2 - 128*cbWIm**2*MB**2*MT**2*MW**2 - 128*cbWRe**2*MB**2*MT**2*MW**2 - 128*ctWIm**2*MB**2*MT**2*MW**2 - 128*ctWRe**2*MB**2*MT**2*MW**2 + 64*cbWIm**2*MT**4*MW**2 + 64*cbWRe**2*MT**4*MW**2 + 64*ctWIm**2*MT**4*MW**2 + 64*ctWRe**2*MT**4*MW**2 - 4*cHl322*ee**2*LambdaSMEFT**2*MW**4 + 16*cHQ3*ee**2*LambdaSMEFT**2*MW**4 + 2*cll1221*ee**2*LambdaSMEFT**2*MW**4 - 32*cbWIm**2*MB**2*MW**4 - 32*cbWRe**2*MB**2*MW**4 - 32*ctWIm**2*MB**2*MW**4 - 32*ctWRe**2*MB**2*MW**4 + 384*cbWIm*ctWIm*MB*MT*MW**4 - 384*cbWRe*ctWRe*MB*MT*MW**4 - 32*cbWIm**2*MT**2*MW**4 - 32*cbWRe**2*MT**2*MW**4 - 32*ctWIm**2*MT**2*MW**4 - 32*ctWRe**2*MT**2*MW**4 - 32*cbWIm**2*MW**6 - 32*cbWRe**2*MW**6 - 32*ctWIm**2*MW**6 - 32*ctWRe**2*MW**6 + cHDD*ee**2*LambdaSMEFT**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHl311*ee**2*LambdaSMEFT**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)))*sth**4*vevhat**2 - 512*MW**2*(-12*cbWIm*ctWIm*MB*MT*MW**2 + 12*cbWRe*ctWRe*MB*MT*MW**2 - (ctWIm**2 + ctWRe**2)*(2*MB**4 + 2*MT**4 - MT**2*MW**2 - MW**4 - MB**2*(4*MT**2 + MW**2)) + cbWIm**2*(-2*MB**4 - 2*MT**4 + MT**2*MW**2 + MW**4 + MB**2*(4*MT**2 + MW**2)) + cbWRe**2*(-2*MB**4 - 2*MT**4 + MT**2*MW**2 + MW**4 + MB**2*(4*MT**2 + MW**2)))*sth**6*vevhat**2 + ee**2*(4*cHl322**2*MB**4 - 16*cHl322*cHQ3*MB**4 + 16*cHQ3**2*MB**4 + 4*cHtbIm**2*MB**4 + 4*cHtbRe**2*MB**4 - 4*cHl322*cll1221*MB**4 + 8*cHQ3*cll1221*MB**4 + cll1221**2*MB**4 - 8*cHl322**2*MB**2*MT**2 + 32*cHl322*cHQ3*MB**2*MT**2 - 32*cHQ3**2*MB**2*MT**2 - 8*cHtbIm**2*MB**2*MT**2 - 8*cHtbRe**2*MB**2*MT**2 + 8*cHl322*cll1221*MB**2*MT**2 - 16*cHQ3*cll1221*MB**2*MT**2 - 2*cll1221**2*MB**2*MT**2 + 4*cHl322**2*MT**4 - 16*cHl322*cHQ3*MT**4 + 16*cHQ3**2*MT**4 + 4*cHtbIm**2*MT**4 + 4*cHtbRe**2*MT**4 - 4*cHl322*cll1221*MT**4 + 8*cHQ3*cll1221*MT**4 + cll1221**2*MT**4 + 4*cHl322**2*MB**2*MW**2 - 16*cHl322*cHQ3*MB**2*MW**2 + 16*cHQ3**2*MB**2*MW**2 + 4*cHtbIm**2*MB**2*MW**2 + 4*cHtbRe**2*MB**2*MW**2 - 4*cHl322*cll1221*MB**2*MW**2 + 8*cHQ3*cll1221*MB**2*MW**2 + cll1221**2*MB**2*MW**2 - 48*cHl322*cHtbRe*MB*MT*MW**2 + 96*cHQ3*cHtbRe*MB*MT*MW**2 + 24*cHtbRe*cll1221*MB*MT*MW**2 + 4*cHl322**2*MT**2*MW**2 - 16*cHl322*cHQ3*MT**2*MW**2 + 16*cHQ3**2*MT**2*MW**2 + 4*cHtbIm**2*MT**2*MW**2 + 4*cHtbRe**2*MT**2*MW**2 - 4*cHl322*cll1221*MT**2*MW**2 + 8*cHQ3*cll1221*MT**2*MW**2 + cll1221**2*MT**2*MW**2 - 8*cHl322**2*MW**4 + 32*cHl322*cHQ3*MW**4 - 32*cHQ3**2*MW**4 - 8*cHtbIm**2*MW**4 - 8*cHtbRe**2*MW**4 + 8*cHl322*cll1221*MW**4 - 16*cHQ3*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 4*cHl311**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHDD*(-4*cHQ3*MB**4 - cll1221*MB**4 + 8*cHQ3*MB**2*MT**2 + 2*cll1221*MB**2*MT**2 - 4*cHQ3*MT**4 - cll1221*MT**4 - 4*cHQ3*MB**2*MW**2 - cll1221*MB**2*MW**2 - 12*cHtbRe*MB*MT*MW**2 - 4*cHQ3*MT**2*MW**2 - cll1221*MT**2*MW**2 + 8*cHQ3*MW**4 + 2*cll1221*MW**4 + 2*cHl311*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHl322*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))) + 4*cHl311*(-(cll1221*MB**4) + 2*cll1221*MB**2*MT**2 - cll1221*MT**4 - cll1221*MB**2*MW**2 - 12*cHtbRe*MB*MT*MW**2 - cll1221*MT**2*MW**2 + 2*cll1221*MW**4 + 2*cHl322*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) - 4*cHQ3*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))))*vevhat**4 + 8*cHWB*cth*ee**2*(2*cHl322*MB**4 - 4*cHQ3*MB**4 - cll1221*MB**4 - 4*cHl322*MB**2*MT**2 + 8*cHQ3*MB**2*MT**2 + 2*cll1221*MB**2*MT**2 + 2*cHl322*MT**4 - 4*cHQ3*MT**4 - cll1221*MT**4 + 2*cHl322*MB**2*MW**2 - 4*cHQ3*MB**2*MW**2 - cll1221*MB**2*MW**2 - 12*cHtbRe*MB*MT*MW**2 + 2*cHl322*MT**2*MW**2 - 4*cHQ3*MT**2*MW**2 - cll1221*MT**2*MW**2 - 4*cHl322*MW**4 + 8*cHQ3*MW**4 + 2*cll1221*MW**4 + cHDD*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHl311*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)))*sth*vevhat**4 - 2*ee**2*(4*cHl322**2*MB**4 - 24*cHl322*cHQ3*MB**4 + 32*cHQ3**2*MB**4 + 8*cHtbIm**2*MB**4 + 8*cHtbRe**2*MB**4 - 8*cHWB**2*MB**4 - 4*cHl322*cll1221*MB**4 + 12*cHQ3*cll1221*MB**4 + cll1221**2*MB**4 - 8*cHl322**2*MB**2*MT**2 + 48*cHl322*cHQ3*MB**2*MT**2 - 64*cHQ3**2*MB**2*MT**2 - 16*cHtbIm**2*MB**2*MT**2 - 16*cHtbRe**2*MB**2*MT**2 + 16*cHWB**2*MB**2*MT**2 + 8*cHl322*cll1221*MB**2*MT**2 - 24*cHQ3*cll1221*MB**2*MT**2 - 2*cll1221**2*MB**2*MT**2 + 4*cHl322**2*MT**4 - 24*cHl322*cHQ3*MT**4 + 32*cHQ3**2*MT**4 + 8*cHtbIm**2*MT**4 + 8*cHtbRe**2*MT**4 - 8*cHWB**2*MT**4 - 4*cHl322*cll1221*MT**4 + 12*cHQ3*cll1221*MT**4 + cll1221**2*MT**4 + 4*cHl322**2*MB**2*MW**2 - 24*cHl322*cHQ3*MB**2*MW**2 + 32*cHQ3**2*MB**2*MW**2 + 8*cHtbIm**2*MB**2*MW**2 + 8*cHtbRe**2*MB**2*MW**2 - 8*cHWB**2*MB**2*MW**2 - 4*cHl322*cll1221*MB**2*MW**2 + 12*cHQ3*cll1221*MB**2*MW**2 + cll1221**2*MB**2*MW**2 - 72*cHl322*cHtbRe*MB*MT*MW**2 + 192*cHQ3*cHtbRe*MB*MT*MW**2 + 36*cHtbRe*cll1221*MB*MT*MW**2 + 4*cHl322**2*MT**2*MW**2 - 24*cHl322*cHQ3*MT**2*MW**2 + 32*cHQ3**2*MT**2*MW**2 + 8*cHtbIm**2*MT**2*MW**2 + 8*cHtbRe**2*MT**2*MW**2 - 8*cHWB**2*MT**2*MW**2 - 4*cHl322*cll1221*MT**2*MW**2 + 12*cHQ3*cll1221*MT**2*MW**2 + cll1221**2*MT**2*MW**2 - 8*cHl322**2*MW**4 + 48*cHl322*cHQ3*MW**4 - 64*cHQ3**2*MW**4 - 16*cHtbIm**2*MW**4 - 16*cHtbRe**2*MW**4 + 16*cHWB**2*MW**4 + 8*cHl322*cll1221*MW**4 - 24*cHQ3*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 4*cHl311**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHDD*(-6*cHQ3*MB**4 - cll1221*MB**4 + 12*cHQ3*MB**2*MT**2 + 2*cll1221*MB**2*MT**2 - 6*cHQ3*MT**4 - cll1221*MT**4 - 6*cHQ3*MB**2*MW**2 - cll1221*MB**2*MW**2 - 18*cHtbRe*MB*MT*MW**2 - 6*cHQ3*MT**2*MW**2 - cll1221*MT**2*MW**2 + 12*cHQ3*MW**4 + 2*cll1221*MW**4 + 2*cHl311*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHl322*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))) + 4*cHl311*(-(cll1221*MB**4) + 2*cll1221*MB**2*MT**2 - cll1221*MT**4 - cll1221*MB**2*MW**2 - 18*cHtbRe*MB*MT*MW**2 - cll1221*MT**2*MW**2 + 2*cll1221*MW**4 + 2*cHl322*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) - 6*cHQ3*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))))*sth**2*vevhat**4 - 8*cHWB*cth*ee**2*(2*cHl322*MB**4 - 8*cHQ3*MB**4 - cll1221*MB**4 - 4*cHl322*MB**2*MT**2 + 16*cHQ3*MB**2*MT**2 + 2*cll1221*MB**2*MT**2 + 2*cHl322*MT**4 - 8*cHQ3*MT**4 - cll1221*MT**4 + 2*cHl322*MB**2*MW**2 - 8*cHQ3*MB**2*MW**2 - cll1221*MB**2*MW**2 - 24*cHtbRe*MB*MT*MW**2 + 2*cHl322*MT**2*MW**2 - 8*cHQ3*MT**2*MW**2 - cll1221*MT**2*MW**2 - 4*cHl322*MW**4 + 16*cHQ3*MW**4 + 2*cll1221*MW**4 + cHDD*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHl311*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)))*sth**3*vevhat**4 + ee**2*(4*cHl322**2*MB**4 - 32*cHl322*cHQ3*MB**4 + 64*cHQ3**2*MB**4 + 16*cHtbIm**2*MB**4 + 16*cHtbRe**2*MB**4 - 16*cHWB**2*MB**4 - 4*cHl322*cll1221*MB**4 + 16*cHQ3*cll1221*MB**4 + cll1221**2*MB**4 - 8*cHl322**2*MB**2*MT**2 + 64*cHl322*cHQ3*MB**2*MT**2 - 128*cHQ3**2*MB**2*MT**2 - 32*cHtbIm**2*MB**2*MT**2 - 32*cHtbRe**2*MB**2*MT**2 + 32*cHWB**2*MB**2*MT**2 + 8*cHl322*cll1221*MB**2*MT**2 - 32*cHQ3*cll1221*MB**2*MT**2 - 2*cll1221**2*MB**2*MT**2 + 4*cHl322**2*MT**4 - 32*cHl322*cHQ3*MT**4 + 64*cHQ3**2*MT**4 + 16*cHtbIm**2*MT**4 + 16*cHtbRe**2*MT**4 - 16*cHWB**2*MT**4 - 4*cHl322*cll1221*MT**4 + 16*cHQ3*cll1221*MT**4 + cll1221**2*MT**4 + 4*cHl322**2*MB**2*MW**2 - 32*cHl322*cHQ3*MB**2*MW**2 + 64*cHQ3**2*MB**2*MW**2 + 16*cHtbIm**2*MB**2*MW**2 + 16*cHtbRe**2*MB**2*MW**2 - 16*cHWB**2*MB**2*MW**2 - 4*cHl322*cll1221*MB**2*MW**2 + 16*cHQ3*cll1221*MB**2*MW**2 + cll1221**2*MB**2*MW**2 - 96*cHl322*cHtbRe*MB*MT*MW**2 + 384*cHQ3*cHtbRe*MB*MT*MW**2 + 48*cHtbRe*cll1221*MB*MT*MW**2 + 4*cHl322**2*MT**2*MW**2 - 32*cHl322*cHQ3*MT**2*MW**2 + 64*cHQ3**2*MT**2*MW**2 + 16*cHtbIm**2*MT**2*MW**2 + 16*cHtbRe**2*MT**2*MW**2 - 16*cHWB**2*MT**2*MW**2 - 4*cHl322*cll1221*MT**2*MW**2 + 16*cHQ3*cll1221*MT**2*MW**2 + cll1221**2*MT**2*MW**2 - 8*cHl322**2*MW**4 + 64*cHl322*cHQ3*MW**4 - 128*cHQ3**2*MW**4 - 32*cHtbIm**2*MW**4 - 32*cHtbRe**2*MW**4 + 32*cHWB**2*MW**4 + 8*cHl322*cll1221*MW**4 - 32*cHQ3*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 4*cHl311**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHDD*(-8*cHQ3*MB**4 - cll1221*MB**4 + 16*cHQ3*MB**2*MT**2 + 2*cll1221*MB**2*MT**2 - 8*cHQ3*MT**4 - cll1221*MT**4 - 8*cHQ3*MB**2*MW**2 - cll1221*MB**2*MW**2 - 24*cHtbRe*MB*MT*MW**2 - 8*cHQ3*MT**2*MW**2 - cll1221*MT**2*MW**2 + 16*cHQ3*MW**4 + 2*cll1221*MW**4 + 2*cHl311*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHl322*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))) + 4*cHl311*(-(cll1221*MB**4) + 2*cll1221*MB**2*MT**2 - cll1221*MT**4 - cll1221*MB**2*MW**2 - 24*cHtbRe*MB*MT*MW**2 - cll1221*MT**2*MW**2 + 2*cll1221*MW**4 + 2*cHl322*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) - 8*cHQ3*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))))*sth**4*vevhat**4 + 192*ee*LambdaSMEFT**2*MW**2*(ctWRe*MT*(MB**2 - MT**2 + MW**2) + cbWRe*MB*(-MB**2 + MT**2 + MW**2))*sth*vevhat*cmath.sqrt(2) - 768*ee*LambdaSMEFT**2*MW**2*(ctWRe*MT*(MB**2 - MT**2 + MW**2) + cbWRe*MB*(-MB**2 + MT**2 + MW**2))*sth**3*vevhat*cmath.sqrt(2) + 768*ee*LambdaSMEFT**2*MW**2*(ctWRe*MT*(MB**2 - MT**2 + MW**2) + cbWRe*MB*(-MB**2 + MT**2 + MW**2))*sth**5*vevhat*cmath.sqrt(2) - 48*ee*MW**2*(2*cHtbIm*ctWIm*MB*(MB**2 - MT**2 - MW**2) + 2*cbWIm*cHtbIm*MT*(MB**2 - MT**2 + MW**2) + (cHDD + 2*cHl311 + 2*cHl322 - 4*cHQ3 - cll1221)*ctWRe*MT*(MB**2 - MT**2 + MW**2) + 2*cHtbRe*ctWRe*MB*(-MB**2 + MT**2 + MW**2) + cbWRe*(-2*cHl322*MB**3 + 4*cHQ3*MB**3 + cll1221*MB**3 + 2*cHtbRe*MB**2*MT + 2*cHl322*MB*MT**2 - 4*cHQ3*MB*MT**2 - cll1221*MB*MT**2 - 2*cHtbRe*MT**3 + 2*cHl322*MB*MW**2 - 4*cHQ3*MB*MW**2 - cll1221*MB*MW**2 + 2*cHtbRe*MT*MW**2 + cHDD*MB*(-MB**2 + MT**2 + MW**2) + 2*cHl311*MB*(-MB**2 + MT**2 + MW**2)))*sth*vevhat**3*cmath.sqrt(2) - 192*cHWB*cth*ee*MW**2*(ctWRe*MT*(MB**2 - MT**2 + MW**2) + cbWRe*MB*(-MB**2 + MT**2 + MW**2))*sth**2*vevhat**3*cmath.sqrt(2) + 48*ee*MW**2*(8*cHtbIm*ctWIm*MB*(MB**2 - MT**2 - MW**2) + 8*cbWIm*cHtbIm*MT*(MB**2 - MT**2 + MW**2) + (3*cHDD + 6*cHl311 + 6*cHl322 - 16*cHQ3 - 3*cll1221)*ctWRe*MT*(MB**2 - MT**2 + MW**2) + 8*cHtbRe*ctWRe*MB*(-MB**2 + MT**2 + MW**2) + cbWRe*(-6*cHl322*MB**3 + 16*cHQ3*MB**3 + 3*cll1221*MB**3 + 8*cHtbRe*MB**2*MT + 6*cHl322*MB*MT**2 - 16*cHQ3*MB*MT**2 - 3*cll1221*MB*MT**2 - 8*cHtbRe*MT**3 + 6*cHl322*MB*MW**2 - 16*cHQ3*MB*MW**2 - 3*cll1221*MB*MW**2 + 8*cHtbRe*MT*MW**2 + 3*cHDD*MB*(-MB**2 + MT**2 + MW**2) + 6*cHl311*MB*(-MB**2 + MT**2 + MW**2)))*sth**3*vevhat**3*cmath.sqrt(2) + 384*cHWB*cth*ee*MW**2*(ctWRe*MT*(MB**2 - MT**2 + MW**2) + cbWRe*MB*(-MB**2 + MT**2 + MW**2))*sth**4*vevhat**3*cmath.sqrt(2) - 96*ee*MW**2*(4*cHtbIm*ctWIm*MB*(MB**2 - MT**2 - MW**2) + 4*cbWIm*cHtbIm*MT*(MB**2 - MT**2 + MW**2) + (cHDD + 2*cHl311 + 2*cHl322 - 8*cHQ3 - cll1221)*ctWRe*MT*(MB**2 - MT**2 + MW**2) + 4*cHtbRe*ctWRe*MB*(-MB**2 + MT**2 + MW**2) + cbWRe*(-2*cHl322*MB**3 + 8*cHQ3*MB**3 + cll1221*MB**3 + 4*cHtbRe*MB**2*MT + 2*cHl322*MB*MT**2 - 8*cHQ3*MB*MT**2 - cll1221*MB*MT**2 - 4*cHtbRe*MT**3 + 2*cHl322*MB*MW**2 - 8*cHQ3*MB*MW**2 - cll1221*MB*MW**2 + 4*cHtbRe*MT*MW**2 + cHDD*MB*(-MB**2 + MT**2 + MW**2) + 2*cHl311*MB*(-MB**2 + MT**2 + MW**2)))*sth**5*vevhat**3*cmath.sqrt(2))*cmath.sqrt(MB**4 + (MT**2 - MW**2)**2 - 2*MB**2*(MT**2 + MW**2)))/(1024.*cmath.pi*LambdaSMEFT**4*MB**3*MW**2*sth**2*(1 - 2*sth**2)**2)'})
Decay_c = Decay(name = 'Decay_c',
particle = P.c,
partial_widths = {(P.W__plus__,P.s):'((16*ee**2*LambdaSMEFT**4*MC**4 - 32*ee**2*LambdaSMEFT**4*MC**2*MS**2 + 16*ee**2*LambdaSMEFT**4*MS**4 + 16*ee**2*LambdaSMEFT**4*MC**2*MW**2 + 16*ee**2*LambdaSMEFT**4*MS**2*MW**2 - 32*ee**2*LambdaSMEFT**4*MW**4 - 64*ee**2*LambdaSMEFT**4*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2))*sth**2 + 64*ee**2*LambdaSMEFT**4*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2))*sth**4 - 32*cHWB*cth*ee**2*LambdaSMEFT**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2))*sth*vevhat**2 + 64*cHWB*cth*ee**2*LambdaSMEFT**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2))*sth**3*vevhat**2 - 8*cHWB*cth*ee**2*sth**3*vevhat**4*(2*cHl311*MC**4 + 2*cHl322*MC**4 - cll1221*MC**4 - 4*cHl311*MC**2*MS**2 - 4*cHl322*MC**2*MS**2 + 2*cll1221*MC**2*MS**2 + 2*cHl311*MS**4 + 2*cHl322*MS**4 - cll1221*MS**4 + 2*cHl311*MC**2*MW**2 + 2*cHl322*MC**2*MW**2 - cll1221*MC**2*MW**2 + 2*cHl311*MS**2*MW**2 + 2*cHl322*MS**2*MW**2 - cll1221*MS**2*MW**2 - 4*cHl311*MW**4 - 4*cHl322*MW**4 + 2*cll1221*MW**4 + cHDD*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 8*cHj3*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 24*cHudRe*MC*MS*MW**2*yc*ys) - 8*ee**2*LambdaSMEFT**2*vevhat**2*(2*cHl311*MC**4 + 2*cHl322*MC**4 - cll1221*MC**4 - 4*cHl311*MC**2*MS**2 - 4*cHl322*MC**2*MS**2 + 2*cll1221*MC**2*MS**2 + 2*cHl311*MS**4 + 2*cHl322*MS**4 - cll1221*MS**4 + 2*cHl311*MC**2*MW**2 + 2*cHl322*MC**2*MW**2 - cll1221*MC**2*MW**2 + 2*cHl311*MS**2*MW**2 + 2*cHl322*MS**2*MW**2 - cll1221*MS**2*MW**2 - 4*cHl311*MW**4 - 4*cHl322*MW**4 + 2*cll1221*MW**4 + cHDD*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 4*cHj3*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 12*cHudRe*MC*MS*MW**2*yc*ys) + 8*cHWB*cth*ee**2*sth*vevhat**4*(2*cHl311*MC**4 + 2*cHl322*MC**4 - cll1221*MC**4 - 4*cHl311*MC**2*MS**2 - 4*cHl322*MC**2*MS**2 + 2*cll1221*MC**2*MS**2 + 2*cHl311*MS**4 + 2*cHl322*MS**4 - cll1221*MS**4 + 2*cHl311*MC**2*MW**2 + 2*cHl322*MC**2*MW**2 - cll1221*MC**2*MW**2 + 2*cHl311*MS**2*MW**2 + 2*cHl322*MS**2*MW**2 - cll1221*MS**2*MW**2 - 4*cHl311*MW**4 - 4*cHl322*MW**4 + 2*cll1221*MW**4 + cHDD*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 4*cHj3*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 12*cHudRe*MC*MS*MW**2*yc*ys) - 16*sth**4*vevhat**2*(2*cHl311*ee**2*LambdaSMEFT**2*MC**4 + 2*cHl322*ee**2*LambdaSMEFT**2*MC**4 - cll1221*ee**2*LambdaSMEFT**2*MC**4 - 4*cHl311*ee**2*LambdaSMEFT**2*MC**2*MS**2 - 4*cHl322*ee**2*LambdaSMEFT**2*MC**2*MS**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MC**2*MS**2 + 2*cHl311*ee**2*LambdaSMEFT**2*MS**4 + 2*cHl322*ee**2*LambdaSMEFT**2*MS**4 - cll1221*ee**2*LambdaSMEFT**2*MS**4 + 2*cHl311*ee**2*LambdaSMEFT**2*MC**2*MW**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MC**2*MW**2 - cll1221*ee**2*LambdaSMEFT**2*MC**2*MW**2 + 2*cHl311*ee**2*LambdaSMEFT**2*MS**2*MW**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MS**2*MW**2 - cll1221*ee**2*LambdaSMEFT**2*MS**2*MW**2 - 4*cHl311*ee**2*LambdaSMEFT**2*MW**4 - 4*cHl322*ee**2*LambdaSMEFT**2*MW**4 + 2*cll1221*ee**2*LambdaSMEFT**2*MW**4 + cHDD*ee**2*LambdaSMEFT**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 8*cHj3*ee**2*LambdaSMEFT**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 64*cuWIm**2*MC**4*MW**2*yc**2 + 64*cuWRe**2*MC**4*MW**2*yc**2 - 128*cuWIm**2*MC**2*MS**2*MW**2*yc**2 - 128*cuWRe**2*MC**2*MS**2*MW**2*yc**2 + 64*cuWIm**2*MS**4*MW**2*yc**2 + 64*cuWRe**2*MS**4*MW**2*yc**2 - 32*cuWIm**2*MC**2*MW**4*yc**2 - 32*cuWRe**2*MC**2*MW**4*yc**2 - 32*cuWIm**2*MS**2*MW**4*yc**2 - 32*cuWRe**2*MS**2*MW**4*yc**2 - 32*cuWIm**2*MW**6*yc**2 - 32*cuWRe**2*MW**6*yc**2 - 24*cHudRe*ee**2*LambdaSMEFT**2*MC*MS*MW**2*yc*ys + 384*cdWIm*cuWIm*MC*MS*MW**4*yc*ys - 384*cdWRe*cuWRe*MC*MS*MW**4*yc*ys + 64*cdWIm**2*MC**4*MW**2*ys**2 + 64*cdWRe**2*MC**4*MW**2*ys**2 - 128*cdWIm**2*MC**2*MS**2*MW**2*ys**2 - 128*cdWRe**2*MC**2*MS**2*MW**2*ys**2 + 64*cdWIm**2*MS**4*MW**2*ys**2 + 64*cdWRe**2*MS**4*MW**2*ys**2 - 32*cdWIm**2*MC**2*MW**4*ys**2 - 32*cdWRe**2*MC**2*MW**4*ys**2 - 32*cdWIm**2*MS**2*MW**4*ys**2 - 32*cdWRe**2*MS**2*MW**4*ys**2 - 32*cdWIm**2*MW**6*ys**2 - 32*cdWRe**2*MW**6*ys**2) + 8*sth**2*vevhat**2*(6*cHl311*ee**2*LambdaSMEFT**2*MC**4 + 6*cHl322*ee**2*LambdaSMEFT**2*MC**4 - 3*cll1221*ee**2*LambdaSMEFT**2*MC**4 - 12*cHl311*ee**2*LambdaSMEFT**2*MC**2*MS**2 - 12*cHl322*ee**2*LambdaSMEFT**2*MC**2*MS**2 + 6*cll1221*ee**2*LambdaSMEFT**2*MC**2*MS**2 + 6*cHl311*ee**2*LambdaSMEFT**2*MS**4 + 6*cHl322*ee**2*LambdaSMEFT**2*MS**4 - 3*cll1221*ee**2*LambdaSMEFT**2*MS**4 + 6*cHl311*ee**2*LambdaSMEFT**2*MC**2*MW**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MC**2*MW**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MC**2*MW**2 + 6*cHl311*ee**2*LambdaSMEFT**2*MS**2*MW**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MS**2*MW**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MS**2*MW**2 - 12*cHl311*ee**2*LambdaSMEFT**2*MW**4 - 12*cHl322*ee**2*LambdaSMEFT**2*MW**4 + 6*cll1221*ee**2*LambdaSMEFT**2*MW**4 + 3*cHDD*ee**2*LambdaSMEFT**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 16*cHj3*ee**2*LambdaSMEFT**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 32*cuWIm**2*MC**4*MW**2*yc**2 + 32*cuWRe**2*MC**4*MW**2*yc**2 - 64*cuWIm**2*MC**2*MS**2*MW**2*yc**2 - 64*cuWRe**2*MC**2*MS**2*MW**2*yc**2 + 32*cuWIm**2*MS**4*MW**2*yc**2 + 32*cuWRe**2*MS**4*MW**2*yc**2 - 16*cuWIm**2*MC**2*MW**4*yc**2 - 16*cuWRe**2*MC**2*MW**4*yc**2 - 16*cuWIm**2*MS**2*MW**4*yc**2 - 16*cuWRe**2*MS**2*MW**4*yc**2 - 16*cuWIm**2*MW**6*yc**2 - 16*cuWRe**2*MW**6*yc**2 - 48*cHudRe*ee**2*LambdaSMEFT**2*MC*MS*MW**2*yc*ys + 192*cdWIm*cuWIm*MC*MS*MW**4*yc*ys - 192*cdWRe*cuWRe*MC*MS*MW**4*yc*ys + 32*cdWIm**2*MC**4*MW**2*ys**2 + 32*cdWRe**2*MC**4*MW**2*ys**2 - 64*cdWIm**2*MC**2*MS**2*MW**2*ys**2 - 64*cdWRe**2*MC**2*MS**2*MW**2*ys**2 + 32*cdWIm**2*MS**4*MW**2*ys**2 + 32*cdWRe**2*MS**4*MW**2*ys**2 - 16*cdWIm**2*MC**2*MW**4*ys**2 - 16*cdWRe**2*MC**2*MW**4*ys**2 - 16*cdWIm**2*MS**2*MW**4*ys**2 - 16*cdWRe**2*MS**2*MW**4*ys**2 - 16*cdWIm**2*MW**6*ys**2 - 16*cdWRe**2*MW**6*ys**2) - 512*MW**2*sth**6*vevhat**2*(cuWIm**2*(-2*MC**4 - 2*MS**4 + MS**2*MW**2 + MW**4 + MC**2*(4*MS**2 + MW**2))*yc**2 + cuWRe**2*(-2*MC**4 - 2*MS**4 + MS**2*MW**2 + MW**4 + MC**2*(4*MS**2 + MW**2))*yc**2 - 12*cdWIm*cuWIm*MC*MS*MW**2*yc*ys + 12*cdWRe*cuWRe*MC*MS*MW**2*yc*ys - (cdWIm**2 + cdWRe**2)*(2*MC**4 + 2*MS**4 - MS**2*MW**2 - MW**4 - MC**2*(4*MS**2 + MW**2))*ys**2) + ee**2*vevhat**4*(4*cHl311**2*MC**4 + 8*cHl311*cHl322*MC**4 + 4*cHl322**2*MC**4 - 4*cHl311*cll1221*MC**4 - 4*cHl322*cll1221*MC**4 + cll1221**2*MC**4 - 8*cHl311**2*MC**2*MS**2 - 16*cHl311*cHl322*MC**2*MS**2 - 8*cHl322**2*MC**2*MS**2 + 8*cHl311*cll1221*MC**2*MS**2 + 8*cHl322*cll1221*MC**2*MS**2 - 2*cll1221**2*MC**2*MS**2 + 4*cHl311**2*MS**4 + 8*cHl311*cHl322*MS**4 + 4*cHl322**2*MS**4 - 4*cHl311*cll1221*MS**4 - 4*cHl322*cll1221*MS**4 + cll1221**2*MS**4 + 4*cHl311**2*MC**2*MW**2 + 8*cHl311*cHl322*MC**2*MW**2 + 4*cHl322**2*MC**2*MW**2 - 4*cHl311*cll1221*MC**2*MW**2 - 4*cHl322*cll1221*MC**2*MW**2 + cll1221**2*MC**2*MW**2 + 4*cHl311**2*MS**2*MW**2 + 8*cHl311*cHl322*MS**2*MW**2 + 4*cHl322**2*MS**2*MW**2 - 4*cHl311*cll1221*MS**2*MW**2 - 4*cHl322*cll1221*MS**2*MW**2 + cll1221**2*MS**2*MW**2 - 8*cHl311**2*MW**4 - 16*cHl311*cHl322*MW**4 - 8*cHl322**2*MW**4 + 8*cHl311*cll1221*MW**4 + 8*cHl322*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 16*cHj3**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 48*cHl311*cHudRe*MC*MS*MW**2*yc*ys - 48*cHl322*cHudRe*MC*MS*MW**2*yc*ys + 24*cHudRe*cll1221*MC*MS*MW**2*yc*ys + 4*cHudIm**2*MC**4*yc**2*ys**2 + 4*cHudRe**2*MC**4*yc**2*ys**2 - 8*cHudIm**2*MC**2*MS**2*yc**2*ys**2 - 8*cHudRe**2*MC**2*MS**2*yc**2*ys**2 + 4*cHudIm**2*MS**4*yc**2*ys**2 + 4*cHudRe**2*MS**4*yc**2*ys**2 + 4*cHudIm**2*MC**2*MW**2*yc**2*ys**2 + 4*cHudRe**2*MC**2*MW**2*yc**2*ys**2 + 4*cHudIm**2*MS**2*MW**2*yc**2*ys**2 + 4*cHudRe**2*MS**2*MW**2*yc**2*ys**2 - 8*cHudIm**2*MW**4*yc**2*ys**2 - 8*cHudRe**2*MW**4*yc**2*ys**2 - 8*cHj3*(-(cll1221*MC**4) + 2*cll1221*MC**2*MS**2 - cll1221*MS**4 - cll1221*MC**2*MW**2 - cll1221*MS**2*MW**2 + 2*cll1221*MW**4 + 2*cHl311*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 2*cHl322*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 12*cHudRe*MC*MS*MW**2*yc*ys) - 2*cHDD*(-2*cHl322*MC**4 + cll1221*MC**4 + 4*cHl322*MC**2*MS**2 - 2*cll1221*MC**2*MS**2 - 2*cHl322*MS**4 + cll1221*MS**4 - 2*cHl322*MC**2*MW**2 + cll1221*MC**2*MW**2 - 2*cHl322*MS**2*MW**2 + cll1221*MS**2*MW**2 + 4*cHl322*MW**4 - 2*cll1221*MW**4 + 4*cHj3*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 2*cHl311*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 12*cHudRe*MC*MS*MW**2*yc*ys)) - 2*ee**2*sth**2*vevhat**4*(4*cHl311**2*MC**4 + 8*cHl311*cHl322*MC**4 + 4*cHl322**2*MC**4 - 8*cHWB**2*MC**4 - 4*cHl311*cll1221*MC**4 - 4*cHl322*cll1221*MC**4 + cll1221**2*MC**4 - 8*cHl311**2*MC**2*MS**2 - 16*cHl311*cHl322*MC**2*MS**2 - 8*cHl322**2*MC**2*MS**2 + 16*cHWB**2*MC**2*MS**2 + 8*cHl311*cll1221*MC**2*MS**2 + 8*cHl322*cll1221*MC**2*MS**2 - 2*cll1221**2*MC**2*MS**2 + 4*cHl311**2*MS**4 + 8*cHl311*cHl322*MS**4 + 4*cHl322**2*MS**4 - 8*cHWB**2*MS**4 - 4*cHl311*cll1221*MS**4 - 4*cHl322*cll1221*MS**4 + cll1221**2*MS**4 + 4*cHl311**2*MC**2*MW**2 + 8*cHl311*cHl322*MC**2*MW**2 + 4*cHl322**2*MC**2*MW**2 - 8*cHWB**2*MC**2*MW**2 - 4*cHl311*cll1221*MC**2*MW**2 - 4*cHl322*cll1221*MC**2*MW**2 + cll1221**2*MC**2*MW**2 + 4*cHl311**2*MS**2*MW**2 + 8*cHl311*cHl322*MS**2*MW**2 + 4*cHl322**2*MS**2*MW**2 - 8*cHWB**2*MS**2*MW**2 - 4*cHl311*cll1221*MS**2*MW**2 - 4*cHl322*cll1221*MS**2*MW**2 + cll1221**2*MS**2*MW**2 - 8*cHl311**2*MW**4 - 16*cHl311*cHl322*MW**4 - 8*cHl322**2*MW**4 + 16*cHWB**2*MW**4 + 8*cHl311*cll1221*MW**4 + 8*cHl322*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 32*cHj3**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 72*cHl311*cHudRe*MC*MS*MW**2*yc*ys - 72*cHl322*cHudRe*MC*MS*MW**2*yc*ys + 36*cHudRe*cll1221*MC*MS*MW**2*yc*ys + 8*cHudIm**2*MC**4*yc**2*ys**2 + 8*cHudRe**2*MC**4*yc**2*ys**2 - 16*cHudIm**2*MC**2*MS**2*yc**2*ys**2 - 16*cHudRe**2*MC**2*MS**2*yc**2*ys**2 + 8*cHudIm**2*MS**4*yc**2*ys**2 + 8*cHudRe**2*MS**4*yc**2*ys**2 + 8*cHudIm**2*MC**2*MW**2*yc**2*ys**2 + 8*cHudRe**2*MC**2*MW**2*yc**2*ys**2 + 8*cHudIm**2*MS**2*MW**2*yc**2*ys**2 + 8*cHudRe**2*MS**2*MW**2*yc**2*ys**2 - 16*cHudIm**2*MW**4*yc**2*ys**2 - 16*cHudRe**2*MW**4*yc**2*ys**2 - 12*cHj3*(-(cll1221*MC**4) + 2*cll1221*MC**2*MS**2 - cll1221*MS**4 - cll1221*MC**2*MW**2 - cll1221*MS**2*MW**2 + 2*cll1221*MW**4 + 2*cHl311*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 2*cHl322*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 16*cHudRe*MC*MS*MW**2*yc*ys) - 2*cHDD*(-2*cHl322*MC**4 + cll1221*MC**4 + 4*cHl322*MC**2*MS**2 - 2*cll1221*MC**2*MS**2 - 2*cHl322*MS**4 + cll1221*MS**4 - 2*cHl322*MC**2*MW**2 + cll1221*MC**2*MW**2 - 2*cHl322*MS**2*MW**2 + cll1221*MS**2*MW**2 + 4*cHl322*MW**4 - 2*cll1221*MW**4 + 6*cHj3*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 2*cHl311*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 18*cHudRe*MC*MS*MW**2*yc*ys)) + ee**2*sth**4*vevhat**4*(4*cHl311**2*MC**4 + 8*cHl311*cHl322*MC**4 + 4*cHl322**2*MC**4 - 16*cHWB**2*MC**4 - 4*cHl311*cll1221*MC**4 - 4*cHl322*cll1221*MC**4 + cll1221**2*MC**4 - 8*cHl311**2*MC**2*MS**2 - 16*cHl311*cHl322*MC**2*MS**2 - 8*cHl322**2*MC**2*MS**2 + 32*cHWB**2*MC**2*MS**2 + 8*cHl311*cll1221*MC**2*MS**2 + 8*cHl322*cll1221*MC**2*MS**2 - 2*cll1221**2*MC**2*MS**2 + 4*cHl311**2*MS**4 + 8*cHl311*cHl322*MS**4 + 4*cHl322**2*MS**4 - 16*cHWB**2*MS**4 - 4*cHl311*cll1221*MS**4 - 4*cHl322*cll1221*MS**4 + cll1221**2*MS**4 + 4*cHl311**2*MC**2*MW**2 + 8*cHl311*cHl322*MC**2*MW**2 + 4*cHl322**2*MC**2*MW**2 - 16*cHWB**2*MC**2*MW**2 - 4*cHl311*cll1221*MC**2*MW**2 - 4*cHl322*cll1221*MC**2*MW**2 + cll1221**2*MC**2*MW**2 + 4*cHl311**2*MS**2*MW**2 + 8*cHl311*cHl322*MS**2*MW**2 + 4*cHl322**2*MS**2*MW**2 - 16*cHWB**2*MS**2*MW**2 - 4*cHl311*cll1221*MS**2*MW**2 - 4*cHl322*cll1221*MS**2*MW**2 + cll1221**2*MS**2*MW**2 - 8*cHl311**2*MW**4 - 16*cHl311*cHl322*MW**4 - 8*cHl322**2*MW**4 + 32*cHWB**2*MW**4 + 8*cHl311*cll1221*MW**4 + 8*cHl322*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 64*cHj3**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 96*cHl311*cHudRe*MC*MS*MW**2*yc*ys - 96*cHl322*cHudRe*MC*MS*MW**2*yc*ys + 48*cHudRe*cll1221*MC*MS*MW**2*yc*ys + 16*cHudIm**2*MC**4*yc**2*ys**2 + 16*cHudRe**2*MC**4*yc**2*ys**2 - 32*cHudIm**2*MC**2*MS**2*yc**2*ys**2 - 32*cHudRe**2*MC**2*MS**2*yc**2*ys**2 + 16*cHudIm**2*MS**4*yc**2*ys**2 + 16*cHudRe**2*MS**4*yc**2*ys**2 + 16*cHudIm**2*MC**2*MW**2*yc**2*ys**2 + 16*cHudRe**2*MC**2*MW**2*yc**2*ys**2 + 16*cHudIm**2*MS**2*MW**2*yc**2*ys**2 + 16*cHudRe**2*MS**2*MW**2*yc**2*ys**2 - 32*cHudIm**2*MW**4*yc**2*ys**2 - 32*cHudRe**2*MW**4*yc**2*ys**2 - 16*cHj3*(-(cll1221*MC**4) + 2*cll1221*MC**2*MS**2 - cll1221*MS**4 - cll1221*MC**2*MW**2 - cll1221*MS**2*MW**2 + 2*cll1221*MW**4 + 2*cHl311*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 2*cHl322*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 24*cHudRe*MC*MS*MW**2*yc*ys) - 2*cHDD*(-2*cHl322*MC**4 + cll1221*MC**4 + 4*cHl322*MC**2*MS**2 - 2*cll1221*MC**2*MS**2 - 2*cHl322*MS**4 + cll1221*MS**4 - 2*cHl322*MC**2*MW**2 + cll1221*MC**2*MW**2 - 2*cHl322*MS**2*MW**2 + cll1221*MS**2*MW**2 + 4*cHl322*MW**4 - 2*cll1221*MW**4 + 8*cHj3*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 2*cHl311*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 24*cHudRe*MC*MS*MW**2*yc*ys)) + 192*ee*LambdaSMEFT**2*MW**2*sth*vevhat*(cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + cdWRe*MS*(MC**2 - MS**2 + MW**2)*ys)*cmath.sqrt(2) - 768*ee*LambdaSMEFT**2*MW**2*sth**3*vevhat*(cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + cdWRe*MS*(MC**2 - MS**2 + MW**2)*ys)*cmath.sqrt(2) + 768*ee*LambdaSMEFT**2*MW**2*sth**5*vevhat*(cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + cdWRe*MS*(MC**2 - MS**2 + MW**2)*ys)*cmath.sqrt(2) - 192*cHWB*cth*ee*MW**2*sth**2*vevhat**3*(cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + cdWRe*MS*(MC**2 - MS**2 + MW**2)*ys)*cmath.sqrt(2) + 384*cHWB*cth*ee*MW**2*sth**4*vevhat**3*(cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + cdWRe*MS*(MC**2 - MS**2 + MW**2)*ys)*cmath.sqrt(2) - 48*ee*MW**2*sth*vevhat**3*(-2*cHl311*cuWRe*MC**3*yc - 2*cHl322*cuWRe*MC**3*yc + cll1221*cuWRe*MC**3*yc + 2*cHl311*cuWRe*MC*MS**2*yc + 2*cHl322*cuWRe*MC*MS**2*yc - cll1221*cuWRe*MC*MS**2*yc + 2*cHl311*cuWRe*MC*MW**2*yc + 2*cHl322*cuWRe*MC*MW**2*yc - cll1221*cuWRe*MC*MW**2*yc + 4*cHj3*cuWRe*MC*(MC**2 - MS**2 - MW**2)*yc + cHDD*cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + 2*cdWRe*cHl311*MC**2*MS*ys + 2*cdWRe*cHl322*MC**2*MS*ys - cdWRe*cll1221*MC**2*MS*ys - 2*cdWRe*cHl311*MS**3*ys - 2*cdWRe*cHl322*MS**3*ys + cdWRe*cll1221*MS**3*ys + 2*cdWRe*cHl311*MS*MW**2*ys + 2*cdWRe*cHl322*MS*MW**2*ys - cdWRe*cll1221*MS*MW**2*ys + 4*cdWRe*cHj3*MS*(-MC**2 + MS**2 - MW**2)*ys + cdWRe*cHDD*MS*(MC**2 - MS**2 + MW**2)*ys - 2*cHudIm*cuWIm*MC**2*MS*yc**2*ys + 2*cHudRe*cuWRe*MC**2*MS*yc**2*ys + 2*cHudIm*cuWIm*MS**3*yc**2*ys - 2*cHudRe*cuWRe*MS**3*yc**2*ys - 2*cHudIm*cuWIm*MS*MW**2*yc**2*ys + 2*cHudRe*cuWRe*MS*MW**2*yc**2*ys - 2*cdWIm*cHudIm*MC**3*yc*ys**2 - 2*cdWRe*cHudRe*MC**3*yc*ys**2 + 2*cdWIm*cHudIm*MC*MS**2*yc*ys**2 + 2*cdWRe*cHudRe*MC*MS**2*yc*ys**2 + 2*cdWIm*cHudIm*MC*MW**2*yc*ys**2 + 2*cdWRe*cHudRe*MC*MW**2*yc*ys**2)*cmath.sqrt(2) - 96*ee*MW**2*sth**5*vevhat**3*(-2*cHl311*cuWRe*MC**3*yc - 2*cHl322*cuWRe*MC**3*yc + cll1221*cuWRe*MC**3*yc + 2*cHl311*cuWRe*MC*MS**2*yc + 2*cHl322*cuWRe*MC*MS**2*yc - cll1221*cuWRe*MC*MS**2*yc + 2*cHl311*cuWRe*MC*MW**2*yc + 2*cHl322*cuWRe*MC*MW**2*yc - cll1221*cuWRe*MC*MW**2*yc + 8*cHj3*cuWRe*MC*(MC**2 - MS**2 - MW**2)*yc + cHDD*cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + 2*cdWRe*cHl311*MC**2*MS*ys + 2*cdWRe*cHl322*MC**2*MS*ys - cdWRe*cll1221*MC**2*MS*ys - 2*cdWRe*cHl311*MS**3*ys - 2*cdWRe*cHl322*MS**3*ys + cdWRe*cll1221*MS**3*ys + 2*cdWRe*cHl311*MS*MW**2*ys + 2*cdWRe*cHl322*MS*MW**2*ys - cdWRe*cll1221*MS*MW**2*ys + 8*cdWRe*cHj3*MS*(-MC**2 + MS**2 - MW**2)*ys + cdWRe*cHDD*MS*(MC**2 - MS**2 + MW**2)*ys - 4*cHudIm*cuWIm*MC**2*MS*yc**2*ys + 4*cHudRe*cuWRe*MC**2*MS*yc**2*ys + 4*cHudIm*cuWIm*MS**3*yc**2*ys - 4*cHudRe*cuWRe*MS**3*yc**2*ys - 4*cHudIm*cuWIm*MS*MW**2*yc**2*ys + 4*cHudRe*cuWRe*MS*MW**2*yc**2*ys - 4*cdWIm*cHudIm*MC**3*yc*ys**2 - 4*cdWRe*cHudRe*MC**3*yc*ys**2 + 4*cdWIm*cHudIm*MC*MS**2*yc*ys**2 + 4*cdWRe*cHudRe*MC*MS**2*yc*ys**2 + 4*cdWIm*cHudIm*MC*MW**2*yc*ys**2 + 4*cdWRe*cHudRe*MC*MW**2*yc*ys**2)*cmath.sqrt(2) + 48*ee*MW**2*sth**3*vevhat**3*(-6*cHl311*cuWRe*MC**3*yc - 6*cHl322*cuWRe*MC**3*yc + 3*cll1221*cuWRe*MC**3*yc + 6*cHl311*cuWRe*MC*MS**2*yc + 6*cHl322*cuWRe*MC*MS**2*yc - 3*cll1221*cuWRe*MC*MS**2*yc + 6*cHl311*cuWRe*MC*MW**2*yc + 6*cHl322*cuWRe*MC*MW**2*yc - 3*cll1221*cuWRe*MC*MW**2*yc + 6*cdWRe*cHl311*MC**2*MS*ys + 6*cdWRe*cHl322*MC**2*MS*ys - 3*cdWRe*cll1221*MC**2*MS*ys - 6*cdWRe*cHl311*MS**3*ys - 6*cdWRe*cHl322*MS**3*ys + 3*cdWRe*cll1221*MS**3*ys + 6*cdWRe*cHl311*MS*MW**2*ys + 6*cdWRe*cHl322*MS*MW**2*ys - 3*cdWRe*cll1221*MS*MW**2*ys - 8*cHudIm*cuWIm*MC**2*MS*yc**2*ys + 8*cHudRe*cuWRe*MC**2*MS*yc**2*ys + 8*cHudIm*cuWIm*MS**3*yc**2*ys - 8*cHudRe*cuWRe*MS**3*yc**2*ys - 8*cHudIm*cuWIm*MS*MW**2*yc**2*ys + 8*cHudRe*cuWRe*MS*MW**2*yc**2*ys - 8*cdWIm*cHudIm*MC**3*yc*ys**2 - 8*cdWRe*cHudRe*MC**3*yc*ys**2 + 8*cdWIm*cHudIm*MC*MS**2*yc*ys**2 + 8*cdWRe*cHudRe*MC*MS**2*yc*ys**2 + 8*cdWIm*cHudIm*MC*MW**2*yc*ys**2 + 8*cdWRe*cHudRe*MC*MW**2*yc*ys**2 + 16*cHj3*(cuWRe*MC*(MC**2 - MS**2 - MW**2)*yc + cdWRe*MS*(-MC**2 + MS**2 - MW**2)*ys) + 3*cHDD*(cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + cdWRe*MS*(MC**2 - MS**2 + MW**2)*ys))*cmath.sqrt(2))*cmath.sqrt(MC**4 + (MS**2 - MW**2)**2 - 2*MC**2*(MS**2 + MW**2)))/(1024.*cmath.pi*LambdaSMEFT**4*MC**3*MW**2*sth**2*(1 - 2*sth**2)**2)'})
Decay_d = Decay(name = 'Decay_d',
particle = P.d,
partial_widths = {(P.W__minus__,P.u):'((16*ee**2*LambdaSMEFT**4*MD**4 - 32*ee**2*LambdaSMEFT**4*MD**2*MU**2 + 16*ee**2*LambdaSMEFT**4*MU**4 + 16*ee**2*LambdaSMEFT**4*MD**2*MW**2 + 16*ee**2*LambdaSMEFT**4*MU**2*MW**2 - 32*ee**2*LambdaSMEFT**4*MW**4 - 64*ee**2*LambdaSMEFT**4*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2))*sth**2 + 64*ee**2*LambdaSMEFT**4*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2))*sth**4 - 32*cHWB*cth*ee**2*LambdaSMEFT**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2))*sth*vevhat**2 + 64*cHWB*cth*ee**2*LambdaSMEFT**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2))*sth**3*vevhat**2 - 8*cHWB*cth*ee**2*sth**3*vevhat**4*(2*cHl311*MD**4 + 2*cHl322*MD**4 - cll1221*MD**4 - 4*cHl311*MD**2*MU**2 - 4*cHl322*MD**2*MU**2 + 2*cll1221*MD**2*MU**2 + 2*cHl311*MU**4 + 2*cHl322*MU**4 - cll1221*MU**4 + 2*cHl311*MD**2*MW**2 + 2*cHl322*MD**2*MW**2 - cll1221*MD**2*MW**2 + 2*cHl311*MU**2*MW**2 + 2*cHl322*MU**2*MW**2 - cll1221*MU**2*MW**2 - 4*cHl311*MW**4 - 4*cHl322*MW**4 + 2*cll1221*MW**4 + cHDD*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 8*cHj3*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 24*cHudRe*MD*MU*MW**2*ydo*yup) - 8*ee**2*LambdaSMEFT**2*vevhat**2*(2*cHl311*MD**4 + 2*cHl322*MD**4 - cll1221*MD**4 - 4*cHl311*MD**2*MU**2 - 4*cHl322*MD**2*MU**2 + 2*cll1221*MD**2*MU**2 + 2*cHl311*MU**4 + 2*cHl322*MU**4 - cll1221*MU**4 + 2*cHl311*MD**2*MW**2 + 2*cHl322*MD**2*MW**2 - cll1221*MD**2*MW**2 + 2*cHl311*MU**2*MW**2 + 2*cHl322*MU**2*MW**2 - cll1221*MU**2*MW**2 - 4*cHl311*MW**4 - 4*cHl322*MW**4 + 2*cll1221*MW**4 + cHDD*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 4*cHj3*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 12*cHudRe*MD*MU*MW**2*ydo*yup) + 8*cHWB*cth*ee**2*sth*vevhat**4*(2*cHl311*MD**4 + 2*cHl322*MD**4 - cll1221*MD**4 - 4*cHl311*MD**2*MU**2 - 4*cHl322*MD**2*MU**2 + 2*cll1221*MD**2*MU**2 + 2*cHl311*MU**4 + 2*cHl322*MU**4 - cll1221*MU**4 + 2*cHl311*MD**2*MW**2 + 2*cHl322*MD**2*MW**2 - cll1221*MD**2*MW**2 + 2*cHl311*MU**2*MW**2 + 2*cHl322*MU**2*MW**2 - cll1221*MU**2*MW**2 - 4*cHl311*MW**4 - 4*cHl322*MW**4 + 2*cll1221*MW**4 + cHDD*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 4*cHj3*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 12*cHudRe*MD*MU*MW**2*ydo*yup) - 16*sth**4*vevhat**2*(2*cHl311*ee**2*LambdaSMEFT**2*MD**4 + 2*cHl322*ee**2*LambdaSMEFT**2*MD**4 - cll1221*ee**2*LambdaSMEFT**2*MD**4 - 4*cHl311*ee**2*LambdaSMEFT**2*MD**2*MU**2 - 4*cHl322*ee**2*LambdaSMEFT**2*MD**2*MU**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MD**2*MU**2 + 2*cHl311*ee**2*LambdaSMEFT**2*MU**4 + 2*cHl322*ee**2*LambdaSMEFT**2*MU**4 - cll1221*ee**2*LambdaSMEFT**2*MU**4 + 2*cHl311*ee**2*LambdaSMEFT**2*MD**2*MW**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MD**2*MW**2 - cll1221*ee**2*LambdaSMEFT**2*MD**2*MW**2 + 2*cHl311*ee**2*LambdaSMEFT**2*MU**2*MW**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MU**2*MW**2 - cll1221*ee**2*LambdaSMEFT**2*MU**2*MW**2 - 4*cHl311*ee**2*LambdaSMEFT**2*MW**4 - 4*cHl322*ee**2*LambdaSMEFT**2*MW**4 + 2*cll1221*ee**2*LambdaSMEFT**2*MW**4 + cHDD*ee**2*LambdaSMEFT**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 8*cHj3*ee**2*LambdaSMEFT**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 64*cdWIm**2*MD**4*MW**2*ydo**2 + 64*cdWRe**2*MD**4*MW**2*ydo**2 - 128*cdWIm**2*MD**2*MU**2*MW**2*ydo**2 - 128*cdWRe**2*MD**2*MU**2*MW**2*ydo**2 + 64*cdWIm**2*MU**4*MW**2*ydo**2 + 64*cdWRe**2*MU**4*MW**2*ydo**2 - 32*cdWIm**2*MD**2*MW**4*ydo**2 - 32*cdWRe**2*MD**2*MW**4*ydo**2 - 32*cdWIm**2*MU**2*MW**4*ydo**2 - 32*cdWRe**2*MU**2*MW**4*ydo**2 - 32*cdWIm**2*MW**6*ydo**2 - 32*cdWRe**2*MW**6*ydo**2 - 24*cHudRe*ee**2*LambdaSMEFT**2*MD*MU*MW**2*ydo*yup + 384*cdWIm*cuWIm*MD*MU*MW**4*ydo*yup - 384*cdWRe*cuWRe*MD*MU*MW**4*ydo*yup + 64*cuWIm**2*MD**4*MW**2*yup**2 + 64*cuWRe**2*MD**4*MW**2*yup**2 - 128*cuWIm**2*MD**2*MU**2*MW**2*yup**2 - 128*cuWRe**2*MD**2*MU**2*MW**2*yup**2 + 64*cuWIm**2*MU**4*MW**2*yup**2 + 64*cuWRe**2*MU**4*MW**2*yup**2 - 32*cuWIm**2*MD**2*MW**4*yup**2 - 32*cuWRe**2*MD**2*MW**4*yup**2 - 32*cuWIm**2*MU**2*MW**4*yup**2 - 32*cuWRe**2*MU**2*MW**4*yup**2 - 32*cuWIm**2*MW**6*yup**2 - 32*cuWRe**2*MW**6*yup**2) + 8*sth**2*vevhat**2*(6*cHl311*ee**2*LambdaSMEFT**2*MD**4 + 6*cHl322*ee**2*LambdaSMEFT**2*MD**4 - 3*cll1221*ee**2*LambdaSMEFT**2*MD**4 - 12*cHl311*ee**2*LambdaSMEFT**2*MD**2*MU**2 - 12*cHl322*ee**2*LambdaSMEFT**2*MD**2*MU**2 + 6*cll1221*ee**2*LambdaSMEFT**2*MD**2*MU**2 + 6*cHl311*ee**2*LambdaSMEFT**2*MU**4 + 6*cHl322*ee**2*LambdaSMEFT**2*MU**4 - 3*cll1221*ee**2*LambdaSMEFT**2*MU**4 + 6*cHl311*ee**2*LambdaSMEFT**2*MD**2*MW**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MD**2*MW**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MD**2*MW**2 + 6*cHl311*ee**2*LambdaSMEFT**2*MU**2*MW**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MU**2*MW**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MU**2*MW**2 - 12*cHl311*ee**2*LambdaSMEFT**2*MW**4 - 12*cHl322*ee**2*LambdaSMEFT**2*MW**4 + 6*cll1221*ee**2*LambdaSMEFT**2*MW**4 + 3*cHDD*ee**2*LambdaSMEFT**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 16*cHj3*ee**2*LambdaSMEFT**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 32*cdWIm**2*MD**4*MW**2*ydo**2 + 32*cdWRe**2*MD**4*MW**2*ydo**2 - 64*cdWIm**2*MD**2*MU**2*MW**2*ydo**2 - 64*cdWRe**2*MD**2*MU**2*MW**2*ydo**2 + 32*cdWIm**2*MU**4*MW**2*ydo**2 + 32*cdWRe**2*MU**4*MW**2*ydo**2 - 16*cdWIm**2*MD**2*MW**4*ydo**2 - 16*cdWRe**2*MD**2*MW**4*ydo**2 - 16*cdWIm**2*MU**2*MW**4*ydo**2 - 16*cdWRe**2*MU**2*MW**4*ydo**2 - 16*cdWIm**2*MW**6*ydo**2 - 16*cdWRe**2*MW**6*ydo**2 - 48*cHudRe*ee**2*LambdaSMEFT**2*MD*MU*MW**2*ydo*yup + 192*cdWIm*cuWIm*MD*MU*MW**4*ydo*yup - 192*cdWRe*cuWRe*MD*MU*MW**4*ydo*yup + 32*cuWIm**2*MD**4*MW**2*yup**2 + 32*cuWRe**2*MD**4*MW**2*yup**2 - 64*cuWIm**2*MD**2*MU**2*MW**2*yup**2 - 64*cuWRe**2*MD**2*MU**2*MW**2*yup**2 + 32*cuWIm**2*MU**4*MW**2*yup**2 + 32*cuWRe**2*MU**4*MW**2*yup**2 - 16*cuWIm**2*MD**2*MW**4*yup**2 - 16*cuWRe**2*MD**2*MW**4*yup**2 - 16*cuWIm**2*MU**2*MW**4*yup**2 - 16*cuWRe**2*MU**2*MW**4*yup**2 - 16*cuWIm**2*MW**6*yup**2 - 16*cuWRe**2*MW**6*yup**2) - 512*MW**2*sth**6*vevhat**2*(cdWIm**2*(-2*MD**4 - 2*MU**4 + MU**2*MW**2 + MW**4 + MD**2*(4*MU**2 + MW**2))*ydo**2 + cdWRe**2*(-2*MD**4 - 2*MU**4 + MU**2*MW**2 + MW**4 + MD**2*(4*MU**2 + MW**2))*ydo**2 - 12*cdWIm*cuWIm*MD*MU*MW**2*ydo*yup + 12*cdWRe*cuWRe*MD*MU*MW**2*ydo*yup - (cuWIm**2 + cuWRe**2)*(2*MD**4 + 2*MU**4 - MU**2*MW**2 - MW**4 - MD**2*(4*MU**2 + MW**2))*yup**2) + ee**2*vevhat**4*(4*cHl311**2*MD**4 + 8*cHl311*cHl322*MD**4 + 4*cHl322**2*MD**4 - 4*cHl311*cll1221*MD**4 - 4*cHl322*cll1221*MD**4 + cll1221**2*MD**4 - 8*cHl311**2*MD**2*MU**2 - 16*cHl311*cHl322*MD**2*MU**2 - 8*cHl322**2*MD**2*MU**2 + 8*cHl311*cll1221*MD**2*MU**2 + 8*cHl322*cll1221*MD**2*MU**2 - 2*cll1221**2*MD**2*MU**2 + 4*cHl311**2*MU**4 + 8*cHl311*cHl322*MU**4 + 4*cHl322**2*MU**4 - 4*cHl311*cll1221*MU**4 - 4*cHl322*cll1221*MU**4 + cll1221**2*MU**4 + 4*cHl311**2*MD**2*MW**2 + 8*cHl311*cHl322*MD**2*MW**2 + 4*cHl322**2*MD**2*MW**2 - 4*cHl311*cll1221*MD**2*MW**2 - 4*cHl322*cll1221*MD**2*MW**2 + cll1221**2*MD**2*MW**2 + 4*cHl311**2*MU**2*MW**2 + 8*cHl311*cHl322*MU**2*MW**2 + 4*cHl322**2*MU**2*MW**2 - 4*cHl311*cll1221*MU**2*MW**2 - 4*cHl322*cll1221*MU**2*MW**2 + cll1221**2*MU**2*MW**2 - 8*cHl311**2*MW**4 - 16*cHl311*cHl322*MW**4 - 8*cHl322**2*MW**4 + 8*cHl311*cll1221*MW**4 + 8*cHl322*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 16*cHj3**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 48*cHl311*cHudRe*MD*MU*MW**2*ydo*yup - 48*cHl322*cHudRe*MD*MU*MW**2*ydo*yup + 24*cHudRe*cll1221*MD*MU*MW**2*ydo*yup + 4*cHudIm**2*MD**4*ydo**2*yup**2 + 4*cHudRe**2*MD**4*ydo**2*yup**2 - 8*cHudIm**2*MD**2*MU**2*ydo**2*yup**2 - 8*cHudRe**2*MD**2*MU**2*ydo**2*yup**2 + 4*cHudIm**2*MU**4*ydo**2*yup**2 + 4*cHudRe**2*MU**4*ydo**2*yup**2 + 4*cHudIm**2*MD**2*MW**2*ydo**2*yup**2 + 4*cHudRe**2*MD**2*MW**2*ydo**2*yup**2 + 4*cHudIm**2*MU**2*MW**2*ydo**2*yup**2 + 4*cHudRe**2*MU**2*MW**2*ydo**2*yup**2 - 8*cHudIm**2*MW**4*ydo**2*yup**2 - 8*cHudRe**2*MW**4*ydo**2*yup**2 - 8*cHj3*(-(cll1221*MD**4) + 2*cll1221*MD**2*MU**2 - cll1221*MU**4 - cll1221*MD**2*MW**2 - cll1221*MU**2*MW**2 + 2*cll1221*MW**4 + 2*cHl311*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 2*cHl322*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 12*cHudRe*MD*MU*MW**2*ydo*yup) - 2*cHDD*(-2*cHl322*MD**4 + cll1221*MD**4 + 4*cHl322*MD**2*MU**2 - 2*cll1221*MD**2*MU**2 - 2*cHl322*MU**4 + cll1221*MU**4 - 2*cHl322*MD**2*MW**2 + cll1221*MD**2*MW**2 - 2*cHl322*MU**2*MW**2 + cll1221*MU**2*MW**2 + 4*cHl322*MW**4 - 2*cll1221*MW**4 + 4*cHj3*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 2*cHl311*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 12*cHudRe*MD*MU*MW**2*ydo*yup)) - 2*ee**2*sth**2*vevhat**4*(4*cHl311**2*MD**4 + 8*cHl311*cHl322*MD**4 + 4*cHl322**2*MD**4 - 8*cHWB**2*MD**4 - 4*cHl311*cll1221*MD**4 - 4*cHl322*cll1221*MD**4 + cll1221**2*MD**4 - 8*cHl311**2*MD**2*MU**2 - 16*cHl311*cHl322*MD**2*MU**2 - 8*cHl322**2*MD**2*MU**2 + 16*cHWB**2*MD**2*MU**2 + 8*cHl311*cll1221*MD**2*MU**2 + 8*cHl322*cll1221*MD**2*MU**2 - 2*cll1221**2*MD**2*MU**2 + 4*cHl311**2*MU**4 + 8*cHl311*cHl322*MU**4 + 4*cHl322**2*MU**4 - 8*cHWB**2*MU**4 - 4*cHl311*cll1221*MU**4 - 4*cHl322*cll1221*MU**4 + cll1221**2*MU**4 + 4*cHl311**2*MD**2*MW**2 + 8*cHl311*cHl322*MD**2*MW**2 + 4*cHl322**2*MD**2*MW**2 - 8*cHWB**2*MD**2*MW**2 - 4*cHl311*cll1221*MD**2*MW**2 - 4*cHl322*cll1221*MD**2*MW**2 + cll1221**2*MD**2*MW**2 + 4*cHl311**2*MU**2*MW**2 + 8*cHl311*cHl322*MU**2*MW**2 + 4*cHl322**2*MU**2*MW**2 - 8*cHWB**2*MU**2*MW**2 - 4*cHl311*cll1221*MU**2*MW**2 - 4*cHl322*cll1221*MU**2*MW**2 + cll1221**2*MU**2*MW**2 - 8*cHl311**2*MW**4 - 16*cHl311*cHl322*MW**4 - 8*cHl322**2*MW**4 + 16*cHWB**2*MW**4 + 8*cHl311*cll1221*MW**4 + 8*cHl322*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 32*cHj3**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 72*cHl311*cHudRe*MD*MU*MW**2*ydo*yup - 72*cHl322*cHudRe*MD*MU*MW**2*ydo*yup + 36*cHudRe*cll1221*MD*MU*MW**2*ydo*yup + 8*cHudIm**2*MD**4*ydo**2*yup**2 + 8*cHudRe**2*MD**4*ydo**2*yup**2 - 16*cHudIm**2*MD**2*MU**2*ydo**2*yup**2 - 16*cHudRe**2*MD**2*MU**2*ydo**2*yup**2 + 8*cHudIm**2*MU**4*ydo**2*yup**2 + 8*cHudRe**2*MU**4*ydo**2*yup**2 + 8*cHudIm**2*MD**2*MW**2*ydo**2*yup**2 + 8*cHudRe**2*MD**2*MW**2*ydo**2*yup**2 + 8*cHudIm**2*MU**2*MW**2*ydo**2*yup**2 + 8*cHudRe**2*MU**2*MW**2*ydo**2*yup**2 - 16*cHudIm**2*MW**4*ydo**2*yup**2 - 16*cHudRe**2*MW**4*ydo**2*yup**2 - 12*cHj3*(-(cll1221*MD**4) + 2*cll1221*MD**2*MU**2 - cll1221*MU**4 - cll1221*MD**2*MW**2 - cll1221*MU**2*MW**2 + 2*cll1221*MW**4 + 2*cHl311*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 2*cHl322*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 16*cHudRe*MD*MU*MW**2*ydo*yup) - 2*cHDD*(-2*cHl322*MD**4 + cll1221*MD**4 + 4*cHl322*MD**2*MU**2 - 2*cll1221*MD**2*MU**2 - 2*cHl322*MU**4 + cll1221*MU**4 - 2*cHl322*MD**2*MW**2 + cll1221*MD**2*MW**2 - 2*cHl322*MU**2*MW**2 + cll1221*MU**2*MW**2 + 4*cHl322*MW**4 - 2*cll1221*MW**4 + 6*cHj3*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 2*cHl311*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 18*cHudRe*MD*MU*MW**2*ydo*yup)) + ee**2*sth**4*vevhat**4*(4*cHl311**2*MD**4 + 8*cHl311*cHl322*MD**4 + 4*cHl322**2*MD**4 - 16*cHWB**2*MD**4 - 4*cHl311*cll1221*MD**4 - 4*cHl322*cll1221*MD**4 + cll1221**2*MD**4 - 8*cHl311**2*MD**2*MU**2 - 16*cHl311*cHl322*MD**2*MU**2 - 8*cHl322**2*MD**2*MU**2 + 32*cHWB**2*MD**2*MU**2 + 8*cHl311*cll1221*MD**2*MU**2 + 8*cHl322*cll1221*MD**2*MU**2 - 2*cll1221**2*MD**2*MU**2 + 4*cHl311**2*MU**4 + 8*cHl311*cHl322*MU**4 + 4*cHl322**2*MU**4 - 16*cHWB**2*MU**4 - 4*cHl311*cll1221*MU**4 - 4*cHl322*cll1221*MU**4 + cll1221**2*MU**4 + 4*cHl311**2*MD**2*MW**2 + 8*cHl311*cHl322*MD**2*MW**2 + 4*cHl322**2*MD**2*MW**2 - 16*cHWB**2*MD**2*MW**2 - 4*cHl311*cll1221*MD**2*MW**2 - 4*cHl322*cll1221*MD**2*MW**2 + cll1221**2*MD**2*MW**2 + 4*cHl311**2*MU**2*MW**2 + 8*cHl311*cHl322*MU**2*MW**2 + 4*cHl322**2*MU**2*MW**2 - 16*cHWB**2*MU**2*MW**2 - 4*cHl311*cll1221*MU**2*MW**2 - 4*cHl322*cll1221*MU**2*MW**2 + cll1221**2*MU**2*MW**2 - 8*cHl311**2*MW**4 - 16*cHl311*cHl322*MW**4 - 8*cHl322**2*MW**4 + 32*cHWB**2*MW**4 + 8*cHl311*cll1221*MW**4 + 8*cHl322*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 64*cHj3**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 96*cHl311*cHudRe*MD*MU*MW**2*ydo*yup - 96*cHl322*cHudRe*MD*MU*MW**2*ydo*yup + 48*cHudRe*cll1221*MD*MU*MW**2*ydo*yup + 16*cHudIm**2*MD**4*ydo**2*yup**2 + 16*cHudRe**2*MD**4*ydo**2*yup**2 - 32*cHudIm**2*MD**2*MU**2*ydo**2*yup**2 - 32*cHudRe**2*MD**2*MU**2*ydo**2*yup**2 + 16*cHudIm**2*MU**4*ydo**2*yup**2 + 16*cHudRe**2*MU**4*ydo**2*yup**2 + 16*cHudIm**2*MD**2*MW**2*ydo**2*yup**2 + 16*cHudRe**2*MD**2*MW**2*ydo**2*yup**2 + 16*cHudIm**2*MU**2*MW**2*ydo**2*yup**2 + 16*cHudRe**2*MU**2*MW**2*ydo**2*yup**2 - 32*cHudIm**2*MW**4*ydo**2*yup**2 - 32*cHudRe**2*MW**4*ydo**2*yup**2 - 16*cHj3*(-(cll1221*MD**4) + 2*cll1221*MD**2*MU**2 - cll1221*MU**4 - cll1221*MD**2*MW**2 - cll1221*MU**2*MW**2 + 2*cll1221*MW**4 + 2*cHl311*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 2*cHl322*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 24*cHudRe*MD*MU*MW**2*ydo*yup) - 2*cHDD*(-2*cHl322*MD**4 + cll1221*MD**4 + 4*cHl322*MD**2*MU**2 - 2*cll1221*MD**2*MU**2 - 2*cHl322*MU**4 + cll1221*MU**4 - 2*cHl322*MD**2*MW**2 + cll1221*MD**2*MW**2 - 2*cHl322*MU**2*MW**2 + cll1221*MU**2*MW**2 + 4*cHl322*MW**4 - 2*cll1221*MW**4 + 8*cHj3*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 2*cHl311*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 24*cHudRe*MD*MU*MW**2*ydo*yup)) + 192*ee*LambdaSMEFT**2*MW**2*sth*vevhat*(cdWRe*MD*(-MD**2 + MU**2 + MW**2)*ydo + cuWRe*MU*(MD**2 - MU**2 + MW**2)*yup)*cmath.sqrt(2) - 768*ee*LambdaSMEFT**2*MW**2*sth**3*vevhat*(cdWRe*MD*(-MD**2 + MU**2 + MW**2)*ydo + cuWRe*MU*(MD**2 - MU**2 + MW**2)*yup)*cmath.sqrt(2) + 768*ee*LambdaSMEFT**2*MW**2*sth**5*vevhat*(cdWRe*MD*(-MD**2 + MU**2 + MW**2)*ydo + cuWRe*MU*(MD**2 - MU**2 + MW**2)*yup)*cmath.sqrt(2) - 192*cHWB*cth*ee*MW**2*sth**2*vevhat**3*(cdWRe*MD*(-MD**2 + MU**2 + MW**2)*ydo + cuWRe*MU*(MD**2 - MU**2 + MW**2)*yup)*cmath.sqrt(2) + 384*cHWB*cth*ee*MW**2*sth**4*vevhat**3*(cdWRe*MD*(-MD**2 + MU**2 + MW**2)*ydo + cuWRe*MU*(MD**2 - MU**2 + MW**2)*yup)*cmath.sqrt(2) - 48*ee*MW**2*sth*vevhat**3*(yup*(2*cHl311*cuWRe*MD**2*MU + 2*cHl322*cuWRe*MD**2*MU - cll1221*cuWRe*MD**2*MU - 2*cHl311*cuWRe*MU**3 - 2*cHl322*cuWRe*MU**3 + cll1221*cuWRe*MU**3 + 2*cHl311*cuWRe*MU*MW**2 + 2*cHl322*cuWRe*MU*MW**2 - cll1221*cuWRe*MU*MW**2 + cHDD*cuWRe*MU*(MD**2 - MU**2 + MW**2) - 4*cHj3*cuWRe*MU*(MD**2 - MU**2 + MW**2) + 2*cdWIm*cHudIm*MD**2*MU*ydo**2 - 2*cdWIm*cHudIm*MU**3*ydo**2 + 2*cdWIm*cHudIm*MU*MW**2*ydo**2 + 2*cHudIm*cuWIm*MD**3*ydo*yup - 2*cHudRe*cuWRe*MD**3*ydo*yup - 2*cHudIm*cuWIm*MD*MU**2*ydo*yup + 2*cHudRe*cuWRe*MD*MU**2*ydo*yup - 2*cHudIm*cuWIm*MD*MW**2*ydo*yup + 2*cHudRe*cuWRe*MD*MW**2*ydo*yup) + cdWRe*ydo*(-2*cHl311*MD**3 - 2*cHl322*MD**3 + cll1221*MD**3 + 2*cHl311*MD*MU**2 + 2*cHl322*MD*MU**2 - cll1221*MD*MU**2 + 2*cHl311*MD*MW**2 + 2*cHl322*MD*MW**2 - cll1221*MD*MW**2 + 4*cHj3*MD*(MD**2 - MU**2 - MW**2) + cHDD*MD*(-MD**2 + MU**2 + MW**2) + 2*cHudRe*MD**2*MU*ydo*yup - 2*cHudRe*MU**3*ydo*yup + 2*cHudRe*MU*MW**2*ydo*yup))*cmath.sqrt(2) - 96*ee*MW**2*sth**5*vevhat**3*(yup*(2*cHl311*cuWRe*MD**2*MU + 2*cHl322*cuWRe*MD**2*MU - cll1221*cuWRe*MD**2*MU - 2*cHl311*cuWRe*MU**3 - 2*cHl322*cuWRe*MU**3 + cll1221*cuWRe*MU**3 + 2*cHl311*cuWRe*MU*MW**2 + 2*cHl322*cuWRe*MU*MW**2 - cll1221*cuWRe*MU*MW**2 + cHDD*cuWRe*MU*(MD**2 - MU**2 + MW**2) - 8*cHj3*cuWRe*MU*(MD**2 - MU**2 + MW**2) + 4*cdWIm*cHudIm*MD**2*MU*ydo**2 - 4*cdWIm*cHudIm*MU**3*ydo**2 + 4*cdWIm*cHudIm*MU*MW**2*ydo**2 + 4*cHudIm*cuWIm*MD**3*ydo*yup - 4*cHudRe*cuWRe*MD**3*ydo*yup - 4*cHudIm*cuWIm*MD*MU**2*ydo*yup + 4*cHudRe*cuWRe*MD*MU**2*ydo*yup - 4*cHudIm*cuWIm*MD*MW**2*ydo*yup + 4*cHudRe*cuWRe*MD*MW**2*ydo*yup) + cdWRe*ydo*(-2*cHl311*MD**3 - 2*cHl322*MD**3 + cll1221*MD**3 + 2*cHl311*MD*MU**2 + 2*cHl322*MD*MU**2 - cll1221*MD*MU**2 + 2*cHl311*MD*MW**2 + 2*cHl322*MD*MW**2 - cll1221*MD*MW**2 + 8*cHj3*MD*(MD**2 - MU**2 - MW**2) + cHDD*MD*(-MD**2 + MU**2 + MW**2) + 4*cHudRe*MD**2*MU*ydo*yup - 4*cHudRe*MU**3*ydo*yup + 4*cHudRe*MU*MW**2*ydo*yup))*cmath.sqrt(2) + 48*ee*MW**2*sth**3*vevhat**3*(yup*(6*cHl311*cuWRe*MD**2*MU + 6*cHl322*cuWRe*MD**2*MU - 3*cll1221*cuWRe*MD**2*MU - 6*cHl311*cuWRe*MU**3 - 6*cHl322*cuWRe*MU**3 + 3*cll1221*cuWRe*MU**3 + 6*cHl311*cuWRe*MU*MW**2 + 6*cHl322*cuWRe*MU*MW**2 - 3*cll1221*cuWRe*MU*MW**2 + 3*cHDD*cuWRe*MU*(MD**2 - MU**2 + MW**2) - 16*cHj3*cuWRe*MU*(MD**2 - MU**2 + MW**2) + 8*cdWIm*cHudIm*MD**2*MU*ydo**2 - 8*cdWIm*cHudIm*MU**3*ydo**2 + 8*cdWIm*cHudIm*MU*MW**2*ydo**2 + 8*cHudIm*cuWIm*MD**3*ydo*yup - 8*cHudRe*cuWRe*MD**3*ydo*yup - 8*cHudIm*cuWIm*MD*MU**2*ydo*yup + 8*cHudRe*cuWRe*MD*MU**2*ydo*yup - 8*cHudIm*cuWIm*MD*MW**2*ydo*yup + 8*cHudRe*cuWRe*MD*MW**2*ydo*yup) + cdWRe*ydo*(-6*cHl311*MD**3 - 6*cHl322*MD**3 + 3*cll1221*MD**3 + 6*cHl311*MD*MU**2 + 6*cHl322*MD*MU**2 - 3*cll1221*MD*MU**2 + 6*cHl311*MD*MW**2 + 6*cHl322*MD*MW**2 - 3*cll1221*MD*MW**2 + 16*cHj3*MD*(MD**2 - MU**2 - MW**2) + 3*cHDD*MD*(-MD**2 + MU**2 + MW**2) + 8*cHudRe*MD**2*MU*ydo*yup - 8*cHudRe*MU**3*ydo*yup + 8*cHudRe*MU*MW**2*ydo*yup))*cmath.sqrt(2))*cmath.sqrt(MD**4 + (MU**2 - MW**2)**2 - 2*MD**2*(MU**2 + MW**2)))/(1024.*cmath.pi*LambdaSMEFT**4*MD**3*MW**2*sth**2*(1 - 2*sth**2)**2)'})
Decay_e__minus__ = Decay(name = 'Decay_e__minus__',
particle = P.e__minus__,
partial_widths = {(P.W__minus__,P.ve):'((Me**2 - MW**2)**2*(16*ee**2*LambdaSMEFT**4*Me**2 + 32*ee**2*LambdaSMEFT**4*MW**2 - 64*ee**2*LambdaSMEFT**4*(Me**2 + 2*MW**2)*sth**2 + 64*ee**2*LambdaSMEFT**4*(Me**2 + 2*MW**2)*sth**4 - 8*(cHDD - 2*cHl311 + 2*cHl322 - cll1221)*ee**2*LambdaSMEFT**2*(Me**2 + 2*MW**2)*vevhat**2 - 32*cHWB*cth*ee**2*LambdaSMEFT**2*(Me**2 + 2*MW**2)*sth*vevhat**2 + 8*(6*cHl322*ee**2*LambdaSMEFT**2*Me**2 - 3*cll1221*ee**2*LambdaSMEFT**2*Me**2 + 12*cHl322*ee**2*LambdaSMEFT**2*MW**2 - 6*cll1221*ee**2*LambdaSMEFT**2*MW**2 + 32*ceWIm11**2*Me**2*MW**2 + 32*ceWRe11**2*Me**2*MW**2 + 16*ceWIm11**2*MW**4 + 16*ceWRe11**2*MW**4 + 3*cHDD*ee**2*LambdaSMEFT**2*(Me**2 + 2*MW**2) - 10*cHl311*ee**2*LambdaSMEFT**2*(Me**2 + 2*MW**2))*sth**2*vevhat**2 + 64*cHWB*cth*ee**2*LambdaSMEFT**2*(Me**2 + 2*MW**2)*sth**3*vevhat**2 - 16*(2*cHl322*ee**2*LambdaSMEFT**2*Me**2 - cll1221*ee**2*LambdaSMEFT**2*Me**2 + 4*cHl322*ee**2*LambdaSMEFT**2*MW**2 - 2*cll1221*ee**2*LambdaSMEFT**2*MW**2 + 64*ceWIm11**2*Me**2*MW**2 + 64*ceWRe11**2*Me**2*MW**2 + 32*ceWIm11**2*MW**4 + 32*ceWRe11**2*MW**4 + cHDD*ee**2*LambdaSMEFT**2*(Me**2 + 2*MW**2) - 6*cHl311*ee**2*LambdaSMEFT**2*(Me**2 + 2*MW**2))*sth**4*vevhat**2 + 512*(ceWIm11**2 + ceWRe11**2)*MW**2*(2*Me**2 + MW**2)*sth**6*vevhat**2 + (cHDD - 2*cHl311 + 2*cHl322 - cll1221)**2*ee**2*(Me**2 + 2*MW**2)*vevhat**4 + 8*cHWB*(cHDD - 2*cHl311 + 2*cHl322 - cll1221)*cth*ee**2*(Me**2 + 2*MW**2)*sth*vevhat**4 - 2*(cHDD**2 + 12*cHl311**2 + 4*cHl322**2 - 8*cHWB**2 - 4*cHl322*cll1221 + cll1221**2 + 8*cHl311*(-2*cHl322 + cll1221) - 2*cHDD*(4*cHl311 - 2*cHl322 + cll1221))*ee**2*(Me**2 + 2*MW**2)*sth**2*vevhat**4 - 8*cHWB*(cHDD - 6*cHl311 + 2*cHl322 - cll1221)*cth*ee**2*(Me**2 + 2*MW**2)*sth**3*vevhat**4 + (cHDD**2 + 36*cHl311**2 + 4*cHl322**2 - 16*cHWB**2 - 4*cHl322*cll1221 + cll1221**2 + 12*cHl311*(-2*cHl322 + cll1221) - 2*cHDD*(6*cHl311 - 2*cHl322 + cll1221))*ee**2*(Me**2 + 2*MW**2)*sth**4*vevhat**4 - 192*ceWRe11*ee*LambdaSMEFT**2*Me*MW**2*sth*vevhat*cmath.sqrt(2) + 768*ceWRe11*ee*LambdaSMEFT**2*Me*MW**2*sth**3*vevhat*cmath.sqrt(2) - 768*ceWRe11*ee*LambdaSMEFT**2*Me*MW**2*sth**5*vevhat*cmath.sqrt(2) + 48*ceWRe11*(cHDD - 2*cHl311 + 2*cHl322 - cll1221)*ee*Me*MW**2*sth*vevhat**3*cmath.sqrt(2) + 192*ceWRe11*cHWB*cth*ee*Me*MW**2*sth**2*vevhat**3*cmath.sqrt(2) - 48*ceWRe11*(3*cHDD - 10*cHl311 + 6*cHl322 - 3*cll1221)*ee*Me*MW**2*sth**3*vevhat**3*cmath.sqrt(2) - 384*ceWRe11*cHWB*cth*ee*Me*MW**2*sth**4*vevhat**3*cmath.sqrt(2) + 96*ceWRe11*(cHDD - 6*cHl311 + 2*cHl322 - cll1221)*ee*Me*MW**2*sth**5*vevhat**3*cmath.sqrt(2)))/(1024.*cmath.pi*LambdaSMEFT**4*Me**3*MW**2*sth**2*(1 - 2*sth**2)**2)'})
Decay_H = Decay(name = 'Decay_H',
particle = P.H,
partial_widths = {(P.a,P.a):'(MH**3*(gHaa**2*LambdaSMEFT**4 - 2*gHaa*LambdaSMEFT**2*(-cHB + cHWB*cth*sth + (cHB - cHW)*sth**2)*vevhat**2 + (2*cHB*sth*(cHWB*cth - cHW*sth)*(-1 + sth**2) + 2*cHBtil*sth*(cHWBtil*cth - cHWtil*sth)*(-1 + sth**2) + cHB**2*(-1 + sth**2)**2 + cHBtil**2*(-1 + sth**2)**2 + sth**2*(cHWB**2 + cHWBtil**2 - 2*(cHW*cHWB + cHWBtil*cHWtil)*cth*sth + (cHW**2 - cHWB**2 - cHWBtil**2 + cHWtil**2)*sth**2))*vevhat**4))/(4.*cmath.pi*LambdaSMEFT**4*vevhat**2)',
(P.a,P.Z):'((MH**2 - MZ**2)**3*(gHza**2*LambdaSMEFT**4 - 2*gHza*LambdaSMEFT**2*(cHWB + 2*(cHB - cHW)*cth*sth - 2*cHWB*sth**2)*vevhat**2 + (cHWB**2*(1 - 2*sth**2)**2 + cHWBtil**2*(1 - 2*sth**2)**2 - 4*(cHB**2 + cHBtil**2 - 2*cHB*cHW + cHW**2 - 2*cHBtil*cHWtil + cHWtil**2)*sth**2*(-1 + sth**2) - 4*(cHB - cHW)*cHWB*cth*sth*(-1 + 2*sth**2) - 4*cHWBtil*(cHBtil - cHWtil)*cth*sth*(-1 + 2*sth**2))*vevhat**4))/(8.*cmath.pi*LambdaSMEFT**4*MH**3*vevhat**2)',
(P.b,P.b__tilde__):'(3*(-64*LambdaSMEFT**4*MB**2*yb**2 + 16*LambdaSMEFT**4*MH**2*yb**2 + 8*LambdaSMEFT**2*(4*MB**2 - MH**2)*vevhat**2*yb*(4*cbHRe + (-4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)*yb) + vevhat**4*(16*cbHIm**2*MH**2 + 16*cbHRe**2*(-4*MB**2 + MH**2) + 8*cbHRe*(4*cHbox - cHDD - 2*cHl311 - 2*cHl322 + cll1221)*(4*MB**2 - MH**2)*yb - (-4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)**2*(4*MB**2 - MH**2)*yb**2))*cmath.sqrt(-4*MB**2 + MH**2))/(256.*cmath.pi*LambdaSMEFT**4*MH**2)',
(P.c,P.c__tilde__):'(3*(-64*LambdaSMEFT**4*MC**2 + 16*LambdaSMEFT**4*MH**2 - 8*(4*cHbox - cHDD - 2*cHl311 - 2*cHl322 + cll1221 - 4*cuHRe)*LambdaSMEFT**2*(4*MC**2 - MH**2)*vevhat**2 + (-16*cHl311**2*MC**2 - 32*cHl311*cHl322*MC**2 - 16*cHl322**2*MC**2 + 16*cHl311*cll1221*MC**2 + 16*cHl322*cll1221*MC**2 - 4*cll1221**2*MC**2 - 64*cHl311*cuHRe*MC**2 - 64*cHl322*cuHRe*MC**2 + 32*cll1221*cuHRe*MC**2 - 64*cuHRe**2*MC**2 + 4*cHl311**2*MH**2 + 8*cHl311*cHl322*MH**2 + 4*cHl322**2*MH**2 - 4*cHl311*cll1221*MH**2 - 4*cHl322*cll1221*MH**2 + cll1221**2*MH**2 + 16*cuHIm**2*MH**2 + 16*cHl311*cuHRe*MH**2 + 16*cHl322*cuHRe*MH**2 - 8*cll1221*cuHRe*MH**2 + 16*cuHRe**2*MH**2 - 2*cHDD*(2*cHl311 + 2*cHl322 - cll1221 + 4*cuHRe)*(4*MC**2 - MH**2) + 8*cHbox*(cHDD + 2*cHl311 + 2*cHl322 - cll1221 + 4*cuHRe)*(4*MC**2 - MH**2) + 16*cHbox**2*(-4*MC**2 + MH**2) + cHDD**2*(-4*MC**2 + MH**2))*vevhat**4)*yc**2*cmath.sqrt(-4*MC**2 + MH**2))/(256.*cmath.pi*LambdaSMEFT**4*MH**2)',
(P.d,P.d__tilde__):'(3*(-64*LambdaSMEFT**4*MD**2 + 16*LambdaSMEFT**4*MH**2 + 8*(4*cdHRe - 4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)*LambdaSMEFT**2*(4*MD**2 - MH**2)*vevhat**2 + (-4*cHDD**2*MD**2 - 16*cHDD*cHl311*MD**2 - 16*cHl311**2*MD**2 - 16*cHDD*cHl322*MD**2 - 32*cHl311*cHl322*MD**2 - 16*cHl322**2*MD**2 + 8*cHDD*cll1221*MD**2 + 16*cHl311*cll1221*MD**2 + 16*cHl322*cll1221*MD**2 - 4*cll1221**2*MD**2 + 16*cdHIm**2*MH**2 + cHDD**2*MH**2 + 4*cHDD*cHl311*MH**2 + 4*cHl311**2*MH**2 + 4*cHDD*cHl322*MH**2 + 8*cHl311*cHl322*MH**2 + 4*cHl322**2*MH**2 - 2*cHDD*cll1221*MH**2 - 4*cHl311*cll1221*MH**2 - 4*cHl322*cll1221*MH**2 + cll1221**2*MH**2 + 8*cHbox*(cHDD + 2*cHl311 + 2*cHl322 - cll1221)*(4*MD**2 - MH**2) + 8*cdHRe*(4*cHbox - cHDD - 2*cHl311 - 2*cHl322 + cll1221)*(4*MD**2 - MH**2) + 16*cdHRe**2*(-4*MD**2 + MH**2) + 16*cHbox**2*(-4*MD**2 + MH**2))*vevhat**4)*ydo**2*cmath.sqrt(-4*MD**2 + MH**2))/(256.*cmath.pi*LambdaSMEFT**4*MH**2)',
(P.e__minus__,P.e__plus__):'((-64*LambdaSMEFT**4*Me**2*ye**2 + 16*LambdaSMEFT**4*MH**2*ye**2 + 8*LambdaSMEFT**2*(4*Me**2 - MH**2)*vevhat**2*ye*(4*ceHRe11 + (-4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)*ye) + vevhat**4*(16*ceHIm11**2*MH**2 + 16*ceHRe11**2*(-4*Me**2 + MH**2) + 8*ceHRe11*(4*cHbox - cHDD - 2*cHl311 - 2*cHl322 + cll1221)*(4*Me**2 - MH**2)*ye - (-4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)**2*(4*Me**2 - MH**2)*ye**2))*cmath.sqrt(-4*Me**2 + MH**2))/(256.*cmath.pi*LambdaSMEFT**4*MH**2)',
(P.g,P.g):'(MH**3*(gHgg2**2*LambdaSMEFT**4*MH**4 - 4*gHgg2*LambdaSMEFT**2*MH**2*MT**2*(gHgg1*LambdaSMEFT**2 + cHG*vevhat**2) + 4*MT**4*(gHgg1**2*LambdaSMEFT**4 + 2*cHG*gHgg1*LambdaSMEFT**2*vevhat**2 + (cHG**2 + cHGtil**2)*vevhat**4)))/(2.*cmath.pi*LambdaSMEFT**4*MT**4*vevhat**2)',
(P.mu__minus__,P.mu__plus__):'((16*LambdaSMEFT**4*MH**2*ym**2 - 64*LambdaSMEFT**4*MMU**2*ym**2 - 8*LambdaSMEFT**2*(MH**2 - 4*MMU**2)*vevhat**2*ym*(4*ceHRe22 + (-4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)*ym) + vevhat**4*(16*ceHIm22**2*MH**2 + (MH**2 - 4*MMU**2)*(4*ceHRe22 + (-4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)*ym)**2))*cmath.sqrt(MH**2 - 4*MMU**2))/(256.*cmath.pi*LambdaSMEFT**4*MH**2)',
(P.s,P.s__tilde__):'(3*(16*LambdaSMEFT**4*MH**2 - 64*LambdaSMEFT**4*MS**2 - 8*(4*cdHRe - 4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)*LambdaSMEFT**2*(MH**2 - 4*MS**2)*vevhat**2 + (16*cdHIm**2*MH**2 + (4*cdHRe - 4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)**2*(MH**2 - 4*MS**2))*vevhat**4)*ys**2*cmath.sqrt(MH**2 - 4*MS**2))/(256.*cmath.pi*LambdaSMEFT**4*MH**2)',
(P.ta__minus__,P.ta__plus__):'((16*LambdaSMEFT**4*MH**2*ytau**2 - 64*LambdaSMEFT**4*MTA**2*ytau**2 - 8*LambdaSMEFT**2*(MH**2 - 4*MTA**2)*vevhat**2*ytau*(4*ceHRe33 + (-4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)*ytau) + vevhat**4*(16*ceHIm33**2*MH**2 + (MH**2 - 4*MTA**2)*(4*ceHRe33 + (-4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)*ytau)**2))*cmath.sqrt(MH**2 - 4*MTA**2))/(256.*cmath.pi*LambdaSMEFT**4*MH**2)',
(P.t,P.t__tilde__):'(3*(16*LambdaSMEFT**4*MH**2*yt**2 - 64*LambdaSMEFT**4*MT**2*yt**2 - 8*LambdaSMEFT**2*(MH**2 - 4*MT**2)*vevhat**2*yt*(4*ctHRe + (-4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)*yt) + vevhat**4*(16*ctHIm**2*MH**2 + (MH**2 - 4*MT**2)*(4*ctHRe + (-4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)*yt)**2))*cmath.sqrt(MH**4 - 4*MH**2*MT**2))/(256.*cmath.pi*LambdaSMEFT**4*MH**3)',
(P.u,P.u__tilde__):'(3*(16*LambdaSMEFT**4*MH**2 - 64*LambdaSMEFT**4*MU**2 + 8*(4*cHbox - cHDD - 2*cHl311 - 2*cHl322 + cll1221 - 4*cuHRe)*LambdaSMEFT**2*(MH**2 - 4*MU**2)*vevhat**2 + (4*cHl311**2*MH**2 + 8*cHl311*cHl322*MH**2 + 4*cHl322**2*MH**2 - 4*cHl311*cll1221*MH**2 - 4*cHl322*cll1221*MH**2 + cll1221**2*MH**2 + 16*cuHIm**2*MH**2 + 16*cHl311*cuHRe*MH**2 + 16*cHl322*cuHRe*MH**2 - 8*cll1221*cuHRe*MH**2 + 16*cuHRe**2*MH**2 - 16*cHl311**2*MU**2 - 32*cHl311*cHl322*MU**2 - 16*cHl322**2*MU**2 + 16*cHl311*cll1221*MU**2 + 16*cHl322*cll1221*MU**2 - 4*cll1221**2*MU**2 - 64*cHl311*cuHRe*MU**2 - 64*cHl322*cuHRe*MU**2 + 32*cll1221*cuHRe*MU**2 - 64*cuHRe**2*MU**2 + 16*cHbox**2*(MH**2 - 4*MU**2) + cHDD**2*(MH**2 - 4*MU**2) + 2*cHDD*(2*cHl311 + 2*cHl322 - cll1221 + 4*cuHRe)*(MH**2 - 4*MU**2) - 8*cHbox*(cHDD + 2*cHl311 + 2*cHl322 - cll1221 + 4*cuHRe)*(MH**2 - 4*MU**2))*vevhat**4)*yup**2*cmath.sqrt(MH**2 - 4*MU**2))/(256.*cmath.pi*LambdaSMEFT**4*MH**2)',
(P.W__minus__,P.W__plus__):'(vevhat**2*(16*ee**4*LambdaSMEFT**4*MH**4 - 64*ee**4*LambdaSMEFT**4*MH**2*MW**2 + 192*ee**4*LambdaSMEFT**4*MW**4 - 64*ee**2*LambdaSMEFT**2*(24*cHW*MW**4*(MH**2 - 2*MW**2) + ee**2*LambdaSMEFT**2*(MH**4 - 4*MH**2*MW**2 + 12*MW**4))*sth**2 + 64*(96*cHW*ee**2*LambdaSMEFT**2*MW**4*(MH**2 - 2*MW**2) + ee**4*LambdaSMEFT**4*(MH**4 - 4*MH**2*MW**2 + 12*MW**4) + 32*MW**4*(cHWtil**2*MH**2*(MH**2 - 4*MW**2) + cHW**2*(MH**4 - 4*MH**2*MW**2 + 6*MW**4)))*sth**4 - 2048*MW**4*(4*cHWtil**2*MH**2*(MH**2 - 4*MW**2) + 3*cHW*ee**2*LambdaSMEFT**2*(MH**2 - 2*MW**2) + 4*cHW**2*(MH**4 - 4*MH**2*MW**2 + 6*MW**4))*sth**6 + 8192*MW**4*(cHWtil**2*MH**2*(MH**2 - 4*MW**2) + cHW**2*(MH**4 - 4*MH**2*MW**2 + 6*MW**4))*sth**8 + 8*(4*cHbox - 3*cHDD - 2*cHl311 - 2*cHl322 + cll1221)*ee**4*LambdaSMEFT**2*(MH**4 - 4*MH**2*MW**2 + 12*MW**4)*vevhat**2 - 64*cHWB*cth*ee**4*LambdaSMEFT**2*(MH**4 - 4*MH**2*MW**2 + 12*MW**4)*sth*vevhat**2 - 16*ee**2*(cHDD*(72*cHW*MW**4*(-MH**2 + 2*MW**2) - 5*ee**2*LambdaSMEFT**2*(MH**4 - 4*MH**2*MW**2 + 12*MW**4)) + 8*cHbox*(12*cHW*MW**4*(MH**2 - 2*MW**2) + ee**2*LambdaSMEFT**2*(MH**4 - 4*MH**2*MW**2 + 12*MW**4)) - (2*cHl311 + 2*cHl322 - cll1221)*(24*cHW*MW**4*(MH**2 - 2*MW**2) + ee**2*LambdaSMEFT**2*(MH**4 - 4*MH**2*MW**2 + 12*MW**4)))*sth**2*vevhat**2 + 128*cHWB*cth*ee**2*(24*cHW*MW**4*(MH**2 - 2*MW**2) + ee**2*LambdaSMEFT**2*(MH**4 - 4*MH**2*MW**2 + 12*MW**4))*sth**3*vevhat**2 + 64*ee**2*(96*cHbox*cHW*MW**4*(MH**2 - 2*MW**2) - 60*cHDD*cHW*MW**4*(MH**2 - 2*MW**2) - 12*cHW*(2*cHl311 + 2*cHl322 - cll1221)*MW**4*(MH**2 - 2*MW**2) + 2*cHbox*ee**2*LambdaSMEFT**2*(MH**4 - 4*MH**2*MW**2 + 12*MW**4) - cHDD*ee**2*LambdaSMEFT**2*(MH**4 - 4*MH**2*MW**2 + 12*MW**4))*sth**4*vevhat**2 - 6144*cHW*cHWB*cth*ee**2*MW**4*(MH**2 - 2*MW**2)*sth**5*vevhat**2 - 3072*(2*cHbox - cHDD)*cHW*ee**2*MW**4*(MH**2 - 2*MW**2)*sth**6*vevhat**2 + (4*cHbox - 3*cHDD - 2*cHl311 - 2*cHl322 + cll1221)**2*ee**4*(MH**4 - 4*MH**2*MW**2 + 12*MW**4)*vevhat**4 - 16*cHWB*(4*cHbox - 3*cHDD - 2*cHl311 - 2*cHl322 + cll1221)*cth*ee**4*(MH**4 - 4*MH**2*MW**2 + 12*MW**4)*sth*vevhat**4 - 8*(8*cHbox**2 + 3*cHDD**2 - 8*cHWB**2 + cHDD*(2*cHl311 + 2*cHl322 - cll1221) - 2*cHbox*(5*cHDD + 2*cHl311 + 2*cHl322 - cll1221))*ee**4*(MH**4 - 4*MH**2*MW**2 + 12*MW**4)*sth**2*vevhat**4 + 64*(2*cHbox - cHDD)*cHWB*cth*ee**4*(MH**4 - 4*MH**2*MW**2 + 12*MW**4)*sth**3*vevhat**4 + 16*(4*cHbox**2 - 4*cHbox*cHDD + cHDD**2 - 4*cHWB**2)*ee**4*(MH**4 - 4*MH**2*MW**2 + 12*MW**4)*sth**4*vevhat**4)*cmath.sqrt(MH**2 - 4*MW**2))/(4096.*cmath.pi*LambdaSMEFT**4*MW**4*sth**4*(MH - 2*MH*sth**2)**2)',
(P.Z,P.Z):'(vevhat**2*(16*ee**4*LambdaSMEFT**4*MH**4 - 64*ee**4*LambdaSMEFT**4*MH**2*MZ**2 + 192*ee**4*LambdaSMEFT**4*MZ**4 - 1536*cHW*ee**2*LambdaSMEFT**2*MZ**4*(MH**2 - 2*MZ**2)*sth**2 - 1536*cHWB*cth*ee**2*LambdaSMEFT**2*MZ**4*(MH**2 - 2*MZ**2)*sth**3 + 512*MZ**4*(4*cHWtil**2*MH**2*(MH**2 - 4*MZ**2) - 3*cHB*ee**2*LambdaSMEFT**2*(MH**2 - 2*MZ**2) + 6*cHW*ee**2*LambdaSMEFT**2*(MH**2 - 2*MZ**2) + 4*cHW**2*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4))*sth**4 + 512*cth*MZ**4*(8*cHWBtil*cHWtil*MH**2*(MH**2 - 4*MZ**2) + 3*cHWB*ee**2*LambdaSMEFT**2*(MH**2 - 2*MZ**2) + 8*cHW*cHWB*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4))*sth**5 + 512*MZ**4*(4*(cHWBtil**2 + 2*(cHBtil - 2*cHWtil)*cHWtil)*MH**2*(MH**2 - 4*MZ**2) - 3*cHW*ee**2*LambdaSMEFT**2*(MH**2 - 2*MZ**2) - 16*cHW**2*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4) + 4*cHWB**2*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4) + cHB*(3*ee**2*LambdaSMEFT**2*(MH**2 - 2*MZ**2) + 8*cHW*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4)))*sth**6 + 4096*cth*MZ**4*(cHWBtil*(cHBtil - 3*cHWtil)*MH**2*(MH**2 - 4*MZ**2) + cHB*cHWB*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4) - 3*cHW*cHWB*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4))*sth**7 + 2048*MZ**4*(-6*cHBtil*cHWtil*MH**2*(MH**2 - 4*MZ**2) + cHBtil**2*(MH**4 - 4*MH**2*MZ**2) + cHB**2*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4) - 6*cHB*cHW*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4) + 6*cHW**2*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4) - 3*((cHWBtil**2 - 2*cHWtil**2)*MH**2*(MH**2 - 4*MZ**2) + cHWB**2*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4)))*sth**8 - 4096*cth*MZ**4*(cHWBtil*(2*cHBtil - 3*cHWtil)*MH**2*(MH**2 - 4*MZ**2) + 2*cHB*cHWB*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4) - 3*cHW*cHWB*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4))*sth**9 - 2048*MZ**4*(4*cHW**2*MH**4 - 3*cHWB**2*MH**4 - 3*cHWBtil**2*MH**4 + 4*cHWtil**2*MH**4 - 16*cHW**2*MH**2*MZ**2 + 12*cHWB**2*MH**2*MZ**2 + 12*cHWBtil**2*MH**2*MZ**2 - 16*cHWtil**2*MH**2*MZ**2 + 24*cHW**2*MZ**4 - 18*cHWB**2*MZ**4 - 6*cHBtil*cHWtil*MH**2*(MH**2 - 4*MZ**2) + 2*cHBtil**2*(MH**4 - 4*MH**2*MZ**2) + 2*cHB**2*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4) - 6*cHB*cHW*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4))*sth**10 + 4096*cth*MZ**4*(cHWBtil*(cHBtil - cHWtil)*MH**2*(MH**2 - 4*MZ**2) + cHB*cHWB*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4) - cHW*cHWB*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4))*sth**11 + 2048*MZ**4*(cHW**2*MH**4 - cHWB**2*MH**4 - cHWBtil**2*MH**4 + cHWtil**2*MH**4 - 4*cHW**2*MH**2*MZ**2 + 4*cHWB**2*MH**2*MZ**2 + 4*cHWBtil**2*MH**2*MZ**2 - 4*cHWtil**2*MH**2*MZ**2 + 6*cHW**2*MZ**4 - 6*cHWB**2*MZ**4 - 2*cHBtil*cHWtil*MH**2*(MH**2 - 4*MZ**2) + cHBtil**2*(MH**4 - 4*MH**2*MZ**2) + cHB**2*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4) - 2*cHB*cHW*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4))*sth**12 + 8*(4*cHbox + cHDD - 2*cHl311 - 2*cHl322 + cll1221)*ee**4*LambdaSMEFT**2*(MH**4 - 4*MH**2*MZ**2 + 12*MZ**4)*vevhat**2 - 384*cHW*(4*cHbox + cHDD - 2*cHl311 - 2*cHl322 + cll1221)*ee**2*MZ**4*(MH**2 - 2*MZ**2)*sth**2*vevhat**2 - 384*cHWB*(4*cHbox + cHDD - 2*cHl311 - 2*cHl322 + cll1221)*cth*ee**2*MZ**4*(MH**2 - 2*MZ**2)*sth**3*vevhat**2 - 384*(cHB - 2*cHW)*(4*cHbox + cHDD - 2*cHl311 - 2*cHl322 + cll1221)*ee**2*MZ**4*(MH**2 - 2*MZ**2)*sth**4*vevhat**2 + 384*cHWB*(4*cHbox + cHDD - 2*cHl311 - 2*cHl322 + cll1221)*cth*ee**2*MZ**4*(MH**2 - 2*MZ**2)*sth**5*vevhat**2 + 384*(cHB - cHW)*(4*cHbox + cHDD - 2*cHl311 - 2*cHl322 + cll1221)*ee**2*MZ**4*(MH**2 - 2*MZ**2)*sth**6*vevhat**2 + (4*cHbox + cHDD - 2*cHl311 - 2*cHl322 + cll1221)**2*ee**4*(MH**4 - 4*MH**2*MZ**2 + 12*MZ**4)*vevhat**4)*cmath.sqrt(MH**2 - 4*MZ**2))/(8192.*cth**4*cmath.pi*LambdaSMEFT**4*MH**2*MZ**4*sth**4)'})
Decay_mu__minus__ = Decay(name = 'Decay_mu__minus__',
particle = P.mu__minus__,
partial_widths = {(P.W__minus__,P.vm):'((MMU**2 - MW**2)**2*(16*ee**2*LambdaSMEFT**4*MMU**2 + 32*ee**2*LambdaSMEFT**4*MW**2 - 64*ee**2*LambdaSMEFT**4*(MMU**2 + 2*MW**2)*sth**2 + 64*ee**2*LambdaSMEFT**4*(MMU**2 + 2*MW**2)*sth**4 - 8*(cHDD + 2*cHl311 - 2*cHl322 - cll1221)*ee**2*LambdaSMEFT**2*(MMU**2 + 2*MW**2)*vevhat**2 - 32*cHWB*cth*ee**2*LambdaSMEFT**2*(MMU**2 + 2*MW**2)*sth*vevhat**2 + 8*(-10*cHl322*ee**2*LambdaSMEFT**2*MMU**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MMU**2 - 20*cHl322*ee**2*LambdaSMEFT**2*MW**2 - 6*cll1221*ee**2*LambdaSMEFT**2*MW**2 + 32*ceWIm22**2*MMU**2*MW**2 + 32*ceWRe22**2*MMU**2*MW**2 + 16*ceWIm22**2*MW**4 + 16*ceWRe22**2*MW**4 + 3*cHDD*ee**2*LambdaSMEFT**2*(MMU**2 + 2*MW**2) + 6*cHl311*ee**2*LambdaSMEFT**2*(MMU**2 + 2*MW**2))*sth**2*vevhat**2 + 64*cHWB*cth*ee**2*LambdaSMEFT**2*(MMU**2 + 2*MW**2)*sth**3*vevhat**2 - 16*(-6*cHl322*ee**2*LambdaSMEFT**2*MMU**2 - cll1221*ee**2*LambdaSMEFT**2*MMU**2 - 12*cHl322*ee**2*LambdaSMEFT**2*MW**2 - 2*cll1221*ee**2*LambdaSMEFT**2*MW**2 + 64*ceWIm22**2*MMU**2*MW**2 + 64*ceWRe22**2*MMU**2*MW**2 + 32*ceWIm22**2*MW**4 + 32*ceWRe22**2*MW**4 + cHDD*ee**2*LambdaSMEFT**2*(MMU**2 + 2*MW**2) + 2*cHl311*ee**2*LambdaSMEFT**2*(MMU**2 + 2*MW**2))*sth**4*vevhat**2 + 512*(ceWIm22**2 + ceWRe22**2)*MW**2*(2*MMU**2 + MW**2)*sth**6*vevhat**2 + (cHDD + 2*cHl311 - 2*cHl322 - cll1221)**2*ee**2*(MMU**2 + 2*MW**2)*vevhat**4 + 8*cHWB*(cHDD + 2*cHl311 - 2*cHl322 - cll1221)*cth*ee**2*(MMU**2 + 2*MW**2)*sth*vevhat**4 - 2*(cHDD**2 + 4*cHDD*cHl311 + 4*cHl311**2 + 12*cHl322**2 - 8*cHWB**2 + 8*cHl322*cll1221 + cll1221**2 - 2*cHDD*(4*cHl322 + cll1221) - 4*cHl311*(4*cHl322 + cll1221))*ee**2*(MMU**2 + 2*MW**2)*sth**2*vevhat**4 - 8*cHWB*(cHDD + 2*cHl311 - 6*cHl322 - cll1221)*cth*ee**2*(MMU**2 + 2*MW**2)*sth**3*vevhat**4 + (cHDD**2 + 4*cHl311**2 + 36*cHl322**2 - 16*cHWB**2 + 2*cHDD*(2*cHl311 - 6*cHl322 - cll1221) + 12*cHl322*cll1221 + cll1221**2 - 4*cHl311*(6*cHl322 + cll1221))*ee**2*(MMU**2 + 2*MW**2)*sth**4*vevhat**4 - 192*ceWRe22*ee*LambdaSMEFT**2*MMU*MW**2*sth*vevhat*cmath.sqrt(2) + 768*ceWRe22*ee*LambdaSMEFT**2*MMU*MW**2*sth**3*vevhat*cmath.sqrt(2) - 768*ceWRe22*ee*LambdaSMEFT**2*MMU*MW**2*sth**5*vevhat*cmath.sqrt(2) + 48*ceWRe22*(cHDD + 2*cHl311 - 2*cHl322 - cll1221)*ee*MMU*MW**2*sth*vevhat**3*cmath.sqrt(2) + 192*ceWRe22*cHWB*cth*ee*MMU*MW**2*sth**2*vevhat**3*cmath.sqrt(2) - 48*ceWRe22*(3*cHDD + 6*cHl311 - 10*cHl322 - 3*cll1221)*ee*MMU*MW**2*sth**3*vevhat**3*cmath.sqrt(2) - 384*ceWRe22*cHWB*cth*ee*MMU*MW**2*sth**4*vevhat**3*cmath.sqrt(2) + 96*ceWRe22*(cHDD + 2*cHl311 - 6*cHl322 - cll1221)*ee*MMU*MW**2*sth**5*vevhat**3*cmath.sqrt(2)))/(1024.*cmath.pi*LambdaSMEFT**4*MMU**3*MW**2*sth**2*(1 - 2*sth**2)**2)'})
Decay_s = Decay(name = 'Decay_s',
particle = P.s,
partial_widths = {(P.W__minus__,P.c):'((16*ee**2*LambdaSMEFT**4*MC**4 - 32*ee**2*LambdaSMEFT**4*MC**2*MS**2 + 16*ee**2*LambdaSMEFT**4*MS**4 + 16*ee**2*LambdaSMEFT**4*MC**2*MW**2 + 16*ee**2*LambdaSMEFT**4*MS**2*MW**2 - 32*ee**2*LambdaSMEFT**4*MW**4 - 64*ee**2*LambdaSMEFT**4*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2))*sth**2 + 64*ee**2*LambdaSMEFT**4*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2))*sth**4 - 32*cHWB*cth*ee**2*LambdaSMEFT**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2))*sth*vevhat**2 + 64*cHWB*cth*ee**2*LambdaSMEFT**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2))*sth**3*vevhat**2 - 8*cHWB*cth*ee**2*sth**3*vevhat**4*(2*cHl311*MC**4 + 2*cHl322*MC**4 - cll1221*MC**4 - 4*cHl311*MC**2*MS**2 - 4*cHl322*MC**2*MS**2 + 2*cll1221*MC**2*MS**2 + 2*cHl311*MS**4 + 2*cHl322*MS**4 - cll1221*MS**4 + 2*cHl311*MC**2*MW**2 + 2*cHl322*MC**2*MW**2 - cll1221*MC**2*MW**2 + 2*cHl311*MS**2*MW**2 + 2*cHl322*MS**2*MW**2 - cll1221*MS**2*MW**2 - 4*cHl311*MW**4 - 4*cHl322*MW**4 + 2*cll1221*MW**4 + cHDD*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 8*cHj3*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 24*cHudRe*MC*MS*MW**2*yc*ys) - 8*ee**2*LambdaSMEFT**2*vevhat**2*(2*cHl311*MC**4 + 2*cHl322*MC**4 - cll1221*MC**4 - 4*cHl311*MC**2*MS**2 - 4*cHl322*MC**2*MS**2 + 2*cll1221*MC**2*MS**2 + 2*cHl311*MS**4 + 2*cHl322*MS**4 - cll1221*MS**4 + 2*cHl311*MC**2*MW**2 + 2*cHl322*MC**2*MW**2 - cll1221*MC**2*MW**2 + 2*cHl311*MS**2*MW**2 + 2*cHl322*MS**2*MW**2 - cll1221*MS**2*MW**2 - 4*cHl311*MW**4 - 4*cHl322*MW**4 + 2*cll1221*MW**4 + cHDD*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 4*cHj3*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 12*cHudRe*MC*MS*MW**2*yc*ys) + 8*cHWB*cth*ee**2*sth*vevhat**4*(2*cHl311*MC**4 + 2*cHl322*MC**4 - cll1221*MC**4 - 4*cHl311*MC**2*MS**2 - 4*cHl322*MC**2*MS**2 + 2*cll1221*MC**2*MS**2 + 2*cHl311*MS**4 + 2*cHl322*MS**4 - cll1221*MS**4 + 2*cHl311*MC**2*MW**2 + 2*cHl322*MC**2*MW**2 - cll1221*MC**2*MW**2 + 2*cHl311*MS**2*MW**2 + 2*cHl322*MS**2*MW**2 - cll1221*MS**2*MW**2 - 4*cHl311*MW**4 - 4*cHl322*MW**4 + 2*cll1221*MW**4 + cHDD*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 4*cHj3*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 12*cHudRe*MC*MS*MW**2*yc*ys) - 16*sth**4*vevhat**2*(2*cHl311*ee**2*LambdaSMEFT**2*MC**4 + 2*cHl322*ee**2*LambdaSMEFT**2*MC**4 - cll1221*ee**2*LambdaSMEFT**2*MC**4 - 4*cHl311*ee**2*LambdaSMEFT**2*MC**2*MS**2 - 4*cHl322*ee**2*LambdaSMEFT**2*MC**2*MS**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MC**2*MS**2 + 2*cHl311*ee**2*LambdaSMEFT**2*MS**4 + 2*cHl322*ee**2*LambdaSMEFT**2*MS**4 - cll1221*ee**2*LambdaSMEFT**2*MS**4 + 2*cHl311*ee**2*LambdaSMEFT**2*MC**2*MW**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MC**2*MW**2 - cll1221*ee**2*LambdaSMEFT**2*MC**2*MW**2 + 2*cHl311*ee**2*LambdaSMEFT**2*MS**2*MW**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MS**2*MW**2 - cll1221*ee**2*LambdaSMEFT**2*MS**2*MW**2 - 4*cHl311*ee**2*LambdaSMEFT**2*MW**4 - 4*cHl322*ee**2*LambdaSMEFT**2*MW**4 + 2*cll1221*ee**2*LambdaSMEFT**2*MW**4 + cHDD*ee**2*LambdaSMEFT**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 8*cHj3*ee**2*LambdaSMEFT**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 64*cuWIm**2*MC**4*MW**2*yc**2 + 64*cuWRe**2*MC**4*MW**2*yc**2 - 128*cuWIm**2*MC**2*MS**2*MW**2*yc**2 - 128*cuWRe**2*MC**2*MS**2*MW**2*yc**2 + 64*cuWIm**2*MS**4*MW**2*yc**2 + 64*cuWRe**2*MS**4*MW**2*yc**2 - 32*cuWIm**2*MC**2*MW**4*yc**2 - 32*cuWRe**2*MC**2*MW**4*yc**2 - 32*cuWIm**2*MS**2*MW**4*yc**2 - 32*cuWRe**2*MS**2*MW**4*yc**2 - 32*cuWIm**2*MW**6*yc**2 - 32*cuWRe**2*MW**6*yc**2 - 24*cHudRe*ee**2*LambdaSMEFT**2*MC*MS*MW**2*yc*ys + 384*cdWIm*cuWIm*MC*MS*MW**4*yc*ys - 384*cdWRe*cuWRe*MC*MS*MW**4*yc*ys + 64*cdWIm**2*MC**4*MW**2*ys**2 + 64*cdWRe**2*MC**4*MW**2*ys**2 - 128*cdWIm**2*MC**2*MS**2*MW**2*ys**2 - 128*cdWRe**2*MC**2*MS**2*MW**2*ys**2 + 64*cdWIm**2*MS**4*MW**2*ys**2 + 64*cdWRe**2*MS**4*MW**2*ys**2 - 32*cdWIm**2*MC**2*MW**4*ys**2 - 32*cdWRe**2*MC**2*MW**4*ys**2 - 32*cdWIm**2*MS**2*MW**4*ys**2 - 32*cdWRe**2*MS**2*MW**4*ys**2 - 32*cdWIm**2*MW**6*ys**2 - 32*cdWRe**2*MW**6*ys**2) + 8*sth**2*vevhat**2*(6*cHl311*ee**2*LambdaSMEFT**2*MC**4 + 6*cHl322*ee**2*LambdaSMEFT**2*MC**4 - 3*cll1221*ee**2*LambdaSMEFT**2*MC**4 - 12*cHl311*ee**2*LambdaSMEFT**2*MC**2*MS**2 - 12*cHl322*ee**2*LambdaSMEFT**2*MC**2*MS**2 + 6*cll1221*ee**2*LambdaSMEFT**2*MC**2*MS**2 + 6*cHl311*ee**2*LambdaSMEFT**2*MS**4 + 6*cHl322*ee**2*LambdaSMEFT**2*MS**4 - 3*cll1221*ee**2*LambdaSMEFT**2*MS**4 + 6*cHl311*ee**2*LambdaSMEFT**2*MC**2*MW**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MC**2*MW**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MC**2*MW**2 + 6*cHl311*ee**2*LambdaSMEFT**2*MS**2*MW**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MS**2*MW**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MS**2*MW**2 - 12*cHl311*ee**2*LambdaSMEFT**2*MW**4 - 12*cHl322*ee**2*LambdaSMEFT**2*MW**4 + 6*cll1221*ee**2*LambdaSMEFT**2*MW**4 + 3*cHDD*ee**2*LambdaSMEFT**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 16*cHj3*ee**2*LambdaSMEFT**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 32*cuWIm**2*MC**4*MW**2*yc**2 + 32*cuWRe**2*MC**4*MW**2*yc**2 - 64*cuWIm**2*MC**2*MS**2*MW**2*yc**2 - 64*cuWRe**2*MC**2*MS**2*MW**2*yc**2 + 32*cuWIm**2*MS**4*MW**2*yc**2 + 32*cuWRe**2*MS**4*MW**2*yc**2 - 16*cuWIm**2*MC**2*MW**4*yc**2 - 16*cuWRe**2*MC**2*MW**4*yc**2 - 16*cuWIm**2*MS**2*MW**4*yc**2 - 16*cuWRe**2*MS**2*MW**4*yc**2 - 16*cuWIm**2*MW**6*yc**2 - 16*cuWRe**2*MW**6*yc**2 - 48*cHudRe*ee**2*LambdaSMEFT**2*MC*MS*MW**2*yc*ys + 192*cdWIm*cuWIm*MC*MS*MW**4*yc*ys - 192*cdWRe*cuWRe*MC*MS*MW**4*yc*ys + 32*cdWIm**2*MC**4*MW**2*ys**2 + 32*cdWRe**2*MC**4*MW**2*ys**2 - 64*cdWIm**2*MC**2*MS**2*MW**2*ys**2 - 64*cdWRe**2*MC**2*MS**2*MW**2*ys**2 + 32*cdWIm**2*MS**4*MW**2*ys**2 + 32*cdWRe**2*MS**4*MW**2*ys**2 - 16*cdWIm**2*MC**2*MW**4*ys**2 - 16*cdWRe**2*MC**2*MW**4*ys**2 - 16*cdWIm**2*MS**2*MW**4*ys**2 - 16*cdWRe**2*MS**2*MW**4*ys**2 - 16*cdWIm**2*MW**6*ys**2 - 16*cdWRe**2*MW**6*ys**2) - 512*MW**2*sth**6*vevhat**2*(cuWIm**2*(-2*MC**4 - 2*MS**4 + MS**2*MW**2 + MW**4 + MC**2*(4*MS**2 + MW**2))*yc**2 + cuWRe**2*(-2*MC**4 - 2*MS**4 + MS**2*MW**2 + MW**4 + MC**2*(4*MS**2 + MW**2))*yc**2 - 12*cdWIm*cuWIm*MC*MS*MW**2*yc*ys + 12*cdWRe*cuWRe*MC*MS*MW**2*yc*ys - (cdWIm**2 + cdWRe**2)*(2*MC**4 + 2*MS**4 - MS**2*MW**2 - MW**4 - MC**2*(4*MS**2 + MW**2))*ys**2) + ee**2*vevhat**4*(4*cHl311**2*MC**4 + 8*cHl311*cHl322*MC**4 + 4*cHl322**2*MC**4 - 4*cHl311*cll1221*MC**4 - 4*cHl322*cll1221*MC**4 + cll1221**2*MC**4 - 8*cHl311**2*MC**2*MS**2 - 16*cHl311*cHl322*MC**2*MS**2 - 8*cHl322**2*MC**2*MS**2 + 8*cHl311*cll1221*MC**2*MS**2 + 8*cHl322*cll1221*MC**2*MS**2 - 2*cll1221**2*MC**2*MS**2 + 4*cHl311**2*MS**4 + 8*cHl311*cHl322*MS**4 + 4*cHl322**2*MS**4 - 4*cHl311*cll1221*MS**4 - 4*cHl322*cll1221*MS**4 + cll1221**2*MS**4 + 4*cHl311**2*MC**2*MW**2 + 8*cHl311*cHl322*MC**2*MW**2 + 4*cHl322**2*MC**2*MW**2 - 4*cHl311*cll1221*MC**2*MW**2 - 4*cHl322*cll1221*MC**2*MW**2 + cll1221**2*MC**2*MW**2 + 4*cHl311**2*MS**2*MW**2 + 8*cHl311*cHl322*MS**2*MW**2 + 4*cHl322**2*MS**2*MW**2 - 4*cHl311*cll1221*MS**2*MW**2 - 4*cHl322*cll1221*MS**2*MW**2 + cll1221**2*MS**2*MW**2 - 8*cHl311**2*MW**4 - 16*cHl311*cHl322*MW**4 - 8*cHl322**2*MW**4 + 8*cHl311*cll1221*MW**4 + 8*cHl322*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 16*cHj3**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 48*cHl311*cHudRe*MC*MS*MW**2*yc*ys - 48*cHl322*cHudRe*MC*MS*MW**2*yc*ys + 24*cHudRe*cll1221*MC*MS*MW**2*yc*ys + 4*cHudIm**2*MC**4*yc**2*ys**2 + 4*cHudRe**2*MC**4*yc**2*ys**2 - 8*cHudIm**2*MC**2*MS**2*yc**2*ys**2 - 8*cHudRe**2*MC**2*MS**2*yc**2*ys**2 + 4*cHudIm**2*MS**4*yc**2*ys**2 + 4*cHudRe**2*MS**4*yc**2*ys**2 + 4*cHudIm**2*MC**2*MW**2*yc**2*ys**2 + 4*cHudRe**2*MC**2*MW**2*yc**2*ys**2 + 4*cHudIm**2*MS**2*MW**2*yc**2*ys**2 + 4*cHudRe**2*MS**2*MW**2*yc**2*ys**2 - 8*cHudIm**2*MW**4*yc**2*ys**2 - 8*cHudRe**2*MW**4*yc**2*ys**2 - 8*cHj3*(-(cll1221*MC**4) + 2*cll1221*MC**2*MS**2 - cll1221*MS**4 - cll1221*MC**2*MW**2 - cll1221*MS**2*MW**2 + 2*cll1221*MW**4 + 2*cHl311*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 2*cHl322*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 12*cHudRe*MC*MS*MW**2*yc*ys) - 2*cHDD*(-2*cHl322*MC**4 + cll1221*MC**4 + 4*cHl322*MC**2*MS**2 - 2*cll1221*MC**2*MS**2 - 2*cHl322*MS**4 + cll1221*MS**4 - 2*cHl322*MC**2*MW**2 + cll1221*MC**2*MW**2 - 2*cHl322*MS**2*MW**2 + cll1221*MS**2*MW**2 + 4*cHl322*MW**4 - 2*cll1221*MW**4 + 4*cHj3*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 2*cHl311*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 12*cHudRe*MC*MS*MW**2*yc*ys)) - 2*ee**2*sth**2*vevhat**4*(4*cHl311**2*MC**4 + 8*cHl311*cHl322*MC**4 + 4*cHl322**2*MC**4 - 8*cHWB**2*MC**4 - 4*cHl311*cll1221*MC**4 - 4*cHl322*cll1221*MC**4 + cll1221**2*MC**4 - 8*cHl311**2*MC**2*MS**2 - 16*cHl311*cHl322*MC**2*MS**2 - 8*cHl322**2*MC**2*MS**2 + 16*cHWB**2*MC**2*MS**2 + 8*cHl311*cll1221*MC**2*MS**2 + 8*cHl322*cll1221*MC**2*MS**2 - 2*cll1221**2*MC**2*MS**2 + 4*cHl311**2*MS**4 + 8*cHl311*cHl322*MS**4 + 4*cHl322**2*MS**4 - 8*cHWB**2*MS**4 - 4*cHl311*cll1221*MS**4 - 4*cHl322*cll1221*MS**4 + cll1221**2*MS**4 + 4*cHl311**2*MC**2*MW**2 + 8*cHl311*cHl322*MC**2*MW**2 + 4*cHl322**2*MC**2*MW**2 - 8*cHWB**2*MC**2*MW**2 - 4*cHl311*cll1221*MC**2*MW**2 - 4*cHl322*cll1221*MC**2*MW**2 + cll1221**2*MC**2*MW**2 + 4*cHl311**2*MS**2*MW**2 + 8*cHl311*cHl322*MS**2*MW**2 + 4*cHl322**2*MS**2*MW**2 - 8*cHWB**2*MS**2*MW**2 - 4*cHl311*cll1221*MS**2*MW**2 - 4*cHl322*cll1221*MS**2*MW**2 + cll1221**2*MS**2*MW**2 - 8*cHl311**2*MW**4 - 16*cHl311*cHl322*MW**4 - 8*cHl322**2*MW**4 + 16*cHWB**2*MW**4 + 8*cHl311*cll1221*MW**4 + 8*cHl322*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 32*cHj3**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 72*cHl311*cHudRe*MC*MS*MW**2*yc*ys - 72*cHl322*cHudRe*MC*MS*MW**2*yc*ys + 36*cHudRe*cll1221*MC*MS*MW**2*yc*ys + 8*cHudIm**2*MC**4*yc**2*ys**2 + 8*cHudRe**2*MC**4*yc**2*ys**2 - 16*cHudIm**2*MC**2*MS**2*yc**2*ys**2 - 16*cHudRe**2*MC**2*MS**2*yc**2*ys**2 + 8*cHudIm**2*MS**4*yc**2*ys**2 + 8*cHudRe**2*MS**4*yc**2*ys**2 + 8*cHudIm**2*MC**2*MW**2*yc**2*ys**2 + 8*cHudRe**2*MC**2*MW**2*yc**2*ys**2 + 8*cHudIm**2*MS**2*MW**2*yc**2*ys**2 + 8*cHudRe**2*MS**2*MW**2*yc**2*ys**2 - 16*cHudIm**2*MW**4*yc**2*ys**2 - 16*cHudRe**2*MW**4*yc**2*ys**2 - 12*cHj3*(-(cll1221*MC**4) + 2*cll1221*MC**2*MS**2 - cll1221*MS**4 - cll1221*MC**2*MW**2 - cll1221*MS**2*MW**2 + 2*cll1221*MW**4 + 2*cHl311*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 2*cHl322*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 16*cHudRe*MC*MS*MW**2*yc*ys) - 2*cHDD*(-2*cHl322*MC**4 + cll1221*MC**4 + 4*cHl322*MC**2*MS**2 - 2*cll1221*MC**2*MS**2 - 2*cHl322*MS**4 + cll1221*MS**4 - 2*cHl322*MC**2*MW**2 + cll1221*MC**2*MW**2 - 2*cHl322*MS**2*MW**2 + cll1221*MS**2*MW**2 + 4*cHl322*MW**4 - 2*cll1221*MW**4 + 6*cHj3*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 2*cHl311*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 18*cHudRe*MC*MS*MW**2*yc*ys)) + ee**2*sth**4*vevhat**4*(4*cHl311**2*MC**4 + 8*cHl311*cHl322*MC**4 + 4*cHl322**2*MC**4 - 16*cHWB**2*MC**4 - 4*cHl311*cll1221*MC**4 - 4*cHl322*cll1221*MC**4 + cll1221**2*MC**4 - 8*cHl311**2*MC**2*MS**2 - 16*cHl311*cHl322*MC**2*MS**2 - 8*cHl322**2*MC**2*MS**2 + 32*cHWB**2*MC**2*MS**2 + 8*cHl311*cll1221*MC**2*MS**2 + 8*cHl322*cll1221*MC**2*MS**2 - 2*cll1221**2*MC**2*MS**2 + 4*cHl311**2*MS**4 + 8*cHl311*cHl322*MS**4 + 4*cHl322**2*MS**4 - 16*cHWB**2*MS**4 - 4*cHl311*cll1221*MS**4 - 4*cHl322*cll1221*MS**4 + cll1221**2*MS**4 + 4*cHl311**2*MC**2*MW**2 + 8*cHl311*cHl322*MC**2*MW**2 + 4*cHl322**2*MC**2*MW**2 - 16*cHWB**2*MC**2*MW**2 - 4*cHl311*cll1221*MC**2*MW**2 - 4*cHl322*cll1221*MC**2*MW**2 + cll1221**2*MC**2*MW**2 + 4*cHl311**2*MS**2*MW**2 + 8*cHl311*cHl322*MS**2*MW**2 + 4*cHl322**2*MS**2*MW**2 - 16*cHWB**2*MS**2*MW**2 - 4*cHl311*cll1221*MS**2*MW**2 - 4*cHl322*cll1221*MS**2*MW**2 + cll1221**2*MS**2*MW**2 - 8*cHl311**2*MW**4 - 16*cHl311*cHl322*MW**4 - 8*cHl322**2*MW**4 + 32*cHWB**2*MW**4 + 8*cHl311*cll1221*MW**4 + 8*cHl322*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 64*cHj3**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 96*cHl311*cHudRe*MC*MS*MW**2*yc*ys - 96*cHl322*cHudRe*MC*MS*MW**2*yc*ys + 48*cHudRe*cll1221*MC*MS*MW**2*yc*ys + 16*cHudIm**2*MC**4*yc**2*ys**2 + 16*cHudRe**2*MC**4*yc**2*ys**2 - 32*cHudIm**2*MC**2*MS**2*yc**2*ys**2 - 32*cHudRe**2*MC**2*MS**2*yc**2*ys**2 + 16*cHudIm**2*MS**4*yc**2*ys**2 + 16*cHudRe**2*MS**4*yc**2*ys**2 + 16*cHudIm**2*MC**2*MW**2*yc**2*ys**2 + 16*cHudRe**2*MC**2*MW**2*yc**2*ys**2 + 16*cHudIm**2*MS**2*MW**2*yc**2*ys**2 + 16*cHudRe**2*MS**2*MW**2*yc**2*ys**2 - 32*cHudIm**2*MW**4*yc**2*ys**2 - 32*cHudRe**2*MW**4*yc**2*ys**2 - 16*cHj3*(-(cll1221*MC**4) + 2*cll1221*MC**2*MS**2 - cll1221*MS**4 - cll1221*MC**2*MW**2 - cll1221*MS**2*MW**2 + 2*cll1221*MW**4 + 2*cHl311*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 2*cHl322*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 24*cHudRe*MC*MS*MW**2*yc*ys) - 2*cHDD*(-2*cHl322*MC**4 + cll1221*MC**4 + 4*cHl322*MC**2*MS**2 - 2*cll1221*MC**2*MS**2 - 2*cHl322*MS**4 + cll1221*MS**4 - 2*cHl322*MC**2*MW**2 + cll1221*MC**2*MW**2 - 2*cHl322*MS**2*MW**2 + cll1221*MS**2*MW**2 + 4*cHl322*MW**4 - 2*cll1221*MW**4 + 8*cHj3*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 2*cHl311*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 24*cHudRe*MC*MS*MW**2*yc*ys)) + 192*ee*LambdaSMEFT**2*MW**2*sth*vevhat*(cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + cdWRe*MS*(MC**2 - MS**2 + MW**2)*ys)*cmath.sqrt(2) - 768*ee*LambdaSMEFT**2*MW**2*sth**3*vevhat*(cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + cdWRe*MS*(MC**2 - MS**2 + MW**2)*ys)*cmath.sqrt(2) + 768*ee*LambdaSMEFT**2*MW**2*sth**5*vevhat*(cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + cdWRe*MS*(MC**2 - MS**2 + MW**2)*ys)*cmath.sqrt(2) - 192*cHWB*cth*ee*MW**2*sth**2*vevhat**3*(cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + cdWRe*MS*(MC**2 - MS**2 + MW**2)*ys)*cmath.sqrt(2) + 384*cHWB*cth*ee*MW**2*sth**4*vevhat**3*(cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + cdWRe*MS*(MC**2 - MS**2 + MW**2)*ys)*cmath.sqrt(2) - 48*ee*MW**2*sth*vevhat**3*(-2*cHl311*cuWRe*MC**3*yc - 2*cHl322*cuWRe*MC**3*yc + cll1221*cuWRe*MC**3*yc + 2*cHl311*cuWRe*MC*MS**2*yc + 2*cHl322*cuWRe*MC*MS**2*yc - cll1221*cuWRe*MC*MS**2*yc + 2*cHl311*cuWRe*MC*MW**2*yc + 2*cHl322*cuWRe*MC*MW**2*yc - cll1221*cuWRe*MC*MW**2*yc + 4*cHj3*cuWRe*MC*(MC**2 - MS**2 - MW**2)*yc + cHDD*cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + 2*cdWRe*cHl311*MC**2*MS*ys + 2*cdWRe*cHl322*MC**2*MS*ys - cdWRe*cll1221*MC**2*MS*ys - 2*cdWRe*cHl311*MS**3*ys - 2*cdWRe*cHl322*MS**3*ys + cdWRe*cll1221*MS**3*ys + 2*cdWRe*cHl311*MS*MW**2*ys + 2*cdWRe*cHl322*MS*MW**2*ys - cdWRe*cll1221*MS*MW**2*ys + 4*cdWRe*cHj3*MS*(-MC**2 + MS**2 - MW**2)*ys + cdWRe*cHDD*MS*(MC**2 - MS**2 + MW**2)*ys - 2*cHudIm*cuWIm*MC**2*MS*yc**2*ys + 2*cHudRe*cuWRe*MC**2*MS*yc**2*ys + 2*cHudIm*cuWIm*MS**3*yc**2*ys - 2*cHudRe*cuWRe*MS**3*yc**2*ys - 2*cHudIm*cuWIm*MS*MW**2*yc**2*ys + 2*cHudRe*cuWRe*MS*MW**2*yc**2*ys - 2*cdWIm*cHudIm*MC**3*yc*ys**2 - 2*cdWRe*cHudRe*MC**3*yc*ys**2 + 2*cdWIm*cHudIm*MC*MS**2*yc*ys**2 + 2*cdWRe*cHudRe*MC*MS**2*yc*ys**2 + 2*cdWIm*cHudIm*MC*MW**2*yc*ys**2 + 2*cdWRe*cHudRe*MC*MW**2*yc*ys**2)*cmath.sqrt(2) - 96*ee*MW**2*sth**5*vevhat**3*(-2*cHl311*cuWRe*MC**3*yc - 2*cHl322*cuWRe*MC**3*yc + cll1221*cuWRe*MC**3*yc + 2*cHl311*cuWRe*MC*MS**2*yc + 2*cHl322*cuWRe*MC*MS**2*yc - cll1221*cuWRe*MC*MS**2*yc + 2*cHl311*cuWRe*MC*MW**2*yc + 2*cHl322*cuWRe*MC*MW**2*yc - cll1221*cuWRe*MC*MW**2*yc + 8*cHj3*cuWRe*MC*(MC**2 - MS**2 - MW**2)*yc + cHDD*cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + 2*cdWRe*cHl311*MC**2*MS*ys + 2*cdWRe*cHl322*MC**2*MS*ys - cdWRe*cll1221*MC**2*MS*ys - 2*cdWRe*cHl311*MS**3*ys - 2*cdWRe*cHl322*MS**3*ys + cdWRe*cll1221*MS**3*ys + 2*cdWRe*cHl311*MS*MW**2*ys + 2*cdWRe*cHl322*MS*MW**2*ys - cdWRe*cll1221*MS*MW**2*ys + 8*cdWRe*cHj3*MS*(-MC**2 + MS**2 - MW**2)*ys + cdWRe*cHDD*MS*(MC**2 - MS**2 + MW**2)*ys - 4*cHudIm*cuWIm*MC**2*MS*yc**2*ys + 4*cHudRe*cuWRe*MC**2*MS*yc**2*ys + 4*cHudIm*cuWIm*MS**3*yc**2*ys - 4*cHudRe*cuWRe*MS**3*yc**2*ys - 4*cHudIm*cuWIm*MS*MW**2*yc**2*ys + 4*cHudRe*cuWRe*MS*MW**2*yc**2*ys - 4*cdWIm*cHudIm*MC**3*yc*ys**2 - 4*cdWRe*cHudRe*MC**3*yc*ys**2 + 4*cdWIm*cHudIm*MC*MS**2*yc*ys**2 + 4*cdWRe*cHudRe*MC*MS**2*yc*ys**2 + 4*cdWIm*cHudIm*MC*MW**2*yc*ys**2 + 4*cdWRe*cHudRe*MC*MW**2*yc*ys**2)*cmath.sqrt(2) + 48*ee*MW**2*sth**3*vevhat**3*(-6*cHl311*cuWRe*MC**3*yc - 6*cHl322*cuWRe*MC**3*yc + 3*cll1221*cuWRe*MC**3*yc + 6*cHl311*cuWRe*MC*MS**2*yc + 6*cHl322*cuWRe*MC*MS**2*yc - 3*cll1221*cuWRe*MC*MS**2*yc + 6*cHl311*cuWRe*MC*MW**2*yc + 6*cHl322*cuWRe*MC*MW**2*yc - 3*cll1221*cuWRe*MC*MW**2*yc + 6*cdWRe*cHl311*MC**2*MS*ys + 6*cdWRe*cHl322*MC**2*MS*ys - 3*cdWRe*cll1221*MC**2*MS*ys - 6*cdWRe*cHl311*MS**3*ys - 6*cdWRe*cHl322*MS**3*ys + 3*cdWRe*cll1221*MS**3*ys + 6*cdWRe*cHl311*MS*MW**2*ys + 6*cdWRe*cHl322*MS*MW**2*ys - 3*cdWRe*cll1221*MS*MW**2*ys - 8*cHudIm*cuWIm*MC**2*MS*yc**2*ys + 8*cHudRe*cuWRe*MC**2*MS*yc**2*ys + 8*cHudIm*cuWIm*MS**3*yc**2*ys - 8*cHudRe*cuWRe*MS**3*yc**2*ys - 8*cHudIm*cuWIm*MS*MW**2*yc**2*ys + 8*cHudRe*cuWRe*MS*MW**2*yc**2*ys - 8*cdWIm*cHudIm*MC**3*yc*ys**2 - 8*cdWRe*cHudRe*MC**3*yc*ys**2 + 8*cdWIm*cHudIm*MC*MS**2*yc*ys**2 + 8*cdWRe*cHudRe*MC*MS**2*yc*ys**2 + 8*cdWIm*cHudIm*MC*MW**2*yc*ys**2 + 8*cdWRe*cHudRe*MC*MW**2*yc*ys**2 + 16*cHj3*(cuWRe*MC*(MC**2 - MS**2 - MW**2)*yc + cdWRe*MS*(-MC**2 + MS**2 - MW**2)*ys) + 3*cHDD*(cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + cdWRe*MS*(MC**2 - MS**2 + MW**2)*ys))*cmath.sqrt(2))*cmath.sqrt(MC**4 + (MS**2 - MW**2)**2 - 2*MC**2*(MS**2 + MW**2)))/(1024.*cmath.pi*LambdaSMEFT**4*MS**3*MW**2*sth**2*(1 - 2*sth**2)**2)'})
Decay_ta__minus__ = Decay(name = 'Decay_ta__minus__',
particle = P.ta__minus__,
partial_widths = {(P.W__minus__,P.vt):'((MTA**2 - MW**2)**2*(16*ee**2*LambdaSMEFT**4*MTA**2 + 32*ee**2*LambdaSMEFT**4*MW**2 - 64*ee**2*LambdaSMEFT**4*(MTA**2 + 2*MW**2)*sth**2 + 64*ee**2*LambdaSMEFT**4*(MTA**2 + 2*MW**2)*sth**4 - 8*(cHDD + 2*cHl311 + 2*cHl322 - 4*cHl333 - cll1221)*ee**2*LambdaSMEFT**2*(MTA**2 + 2*MW**2)*vevhat**2 - 32*cHWB*cth*ee**2*LambdaSMEFT**2*(MTA**2 + 2*MW**2)*sth*vevhat**2 + 8*(6*cHl322*ee**2*LambdaSMEFT**2*MTA**2 - 16*cHl333*ee**2*LambdaSMEFT**2*MTA**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MTA**2 + 12*cHl322*ee**2*LambdaSMEFT**2*MW**2 - 32*cHl333*ee**2*LambdaSMEFT**2*MW**2 - 6*cll1221*ee**2*LambdaSMEFT**2*MW**2 + 32*ceWIm33**2*MTA**2*MW**2 + 32*ceWRe33**2*MTA**2*MW**2 + 16*ceWIm33**2*MW**4 + 16*ceWRe33**2*MW**4 + 3*cHDD*ee**2*LambdaSMEFT**2*(MTA**2 + 2*MW**2) + 6*cHl311*ee**2*LambdaSMEFT**2*(MTA**2 + 2*MW**2))*sth**2*vevhat**2 + 64*cHWB*cth*ee**2*LambdaSMEFT**2*(MTA**2 + 2*MW**2)*sth**3*vevhat**2 - 16*(2*cHl322*ee**2*LambdaSMEFT**2*MTA**2 - 8*cHl333*ee**2*LambdaSMEFT**2*MTA**2 - cll1221*ee**2*LambdaSMEFT**2*MTA**2 + 4*cHl322*ee**2*LambdaSMEFT**2*MW**2 - 16*cHl333*ee**2*LambdaSMEFT**2*MW**2 - 2*cll1221*ee**2*LambdaSMEFT**2*MW**2 + 64*ceWIm33**2*MTA**2*MW**2 + 64*ceWRe33**2*MTA**2*MW**2 + 32*ceWIm33**2*MW**4 + 32*ceWRe33**2*MW**4 + cHDD*ee**2*LambdaSMEFT**2*(MTA**2 + 2*MW**2) + 2*cHl311*ee**2*LambdaSMEFT**2*(MTA**2 + 2*MW**2))*sth**4*vevhat**2 + 512*(ceWIm33**2 + ceWRe33**2)*MW**2*(2*MTA**2 + MW**2)*sth**6*vevhat**2 + (cHDD + 2*cHl311 + 2*cHl322 - 4*cHl333 - cll1221)**2*ee**2*(MTA**2 + 2*MW**2)*vevhat**4 + 8*cHWB*(cHDD + 2*cHl311 + 2*cHl322 - 4*cHl333 - cll1221)*cth*ee**2*(MTA**2 + 2*MW**2)*sth*vevhat**4 - 2*(cHDD**2 + 4*cHl311**2 + 4*cHl322**2 - 24*cHl322*cHl333 + 32*cHl333**2 - 8*cHWB**2 + 4*cHl311*(2*cHl322 - 6*cHl333 - cll1221) + 2*cHDD*(2*cHl311 + 2*cHl322 - 6*cHl333 - cll1221) - 4*cHl322*cll1221 + 12*cHl333*cll1221 + cll1221**2)*ee**2*(MTA**2 + 2*MW**2)*sth**2*vevhat**4 - 8*cHWB*(cHDD + 2*cHl311 + 2*cHl322 - 8*cHl333 - cll1221)*cth*ee**2*(MTA**2 + 2*MW**2)*sth**3*vevhat**4 + (cHDD**2 + 4*cHl311**2 + 4*cHl322**2 - 32*cHl322*cHl333 + 64*cHl333**2 - 16*cHWB**2 + 4*cHl311*(2*cHl322 - 8*cHl333 - cll1221) + 2*cHDD*(2*cHl311 + 2*cHl322 - 8*cHl333 - cll1221) - 4*cHl322*cll1221 + 16*cHl333*cll1221 + cll1221**2)*ee**2*(MTA**2 + 2*MW**2)*sth**4*vevhat**4 - 192*ceWRe33*ee*LambdaSMEFT**2*MTA*MW**2*sth*vevhat*cmath.sqrt(2) + 768*ceWRe33*ee*LambdaSMEFT**2*MTA*MW**2*sth**3*vevhat*cmath.sqrt(2) - 768*ceWRe33*ee*LambdaSMEFT**2*MTA*MW**2*sth**5*vevhat*cmath.sqrt(2) + 48*ceWRe33*(cHDD + 2*cHl311 + 2*cHl322 - 4*cHl333 - cll1221)*ee*MTA*MW**2*sth*vevhat**3*cmath.sqrt(2) + 192*ceWRe33*cHWB*cth*ee*MTA*MW**2*sth**2*vevhat**3*cmath.sqrt(2) - 48*ceWRe33*(3*cHDD + 6*cHl311 + 6*cHl322 - 16*cHl333 - 3*cll1221)*ee*MTA*MW**2*sth**3*vevhat**3*cmath.sqrt(2) - 384*ceWRe33*cHWB*cth*ee*MTA*MW**2*sth**4*vevhat**3*cmath.sqrt(2) + 96*ceWRe33*(cHDD + 2*cHl311 + 2*cHl322 - 8*cHl333 - cll1221)*ee*MTA*MW**2*sth**5*vevhat**3*cmath.sqrt(2)))/(1024.*cmath.pi*LambdaSMEFT**4*MTA**3*MW**2*sth**2*(1 - 2*sth**2)**2)'})
Decay_t = Decay(name = 'Decay_t',
particle = P.t,
partial_widths = {(P.W__plus__,P.b):'((16*ee**2*LambdaSMEFT**4*MB**4 - 32*ee**2*LambdaSMEFT**4*MB**2*MT**2 + 16*ee**2*LambdaSMEFT**4*MT**4 + 16*ee**2*LambdaSMEFT**4*MB**2*MW**2 + 16*ee**2*LambdaSMEFT**4*MT**2*MW**2 - 32*ee**2*LambdaSMEFT**4*MW**4 - 64*ee**2*LambdaSMEFT**4*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))*sth**2 + 64*ee**2*LambdaSMEFT**4*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))*sth**4 - 8*ee**2*LambdaSMEFT**2*(2*cHl322*MB**4 - 4*cHQ3*MB**4 - cll1221*MB**4 - 4*cHl322*MB**2*MT**2 + 8*cHQ3*MB**2*MT**2 + 2*cll1221*MB**2*MT**2 + 2*cHl322*MT**4 - 4*cHQ3*MT**4 - cll1221*MT**4 + 2*cHl322*MB**2*MW**2 - 4*cHQ3*MB**2*MW**2 - cll1221*MB**2*MW**2 - 12*cHtbRe*MB*MT*MW**2 + 2*cHl322*MT**2*MW**2 - 4*cHQ3*MT**2*MW**2 - cll1221*MT**2*MW**2 - 4*cHl322*MW**4 + 8*cHQ3*MW**4 + 2*cll1221*MW**4 + cHDD*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHl311*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)))*vevhat**2 - 32*cHWB*cth*ee**2*LambdaSMEFT**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))*sth*vevhat**2 + 8*(6*cHl322*ee**2*LambdaSMEFT**2*MB**4 - 16*cHQ3*ee**2*LambdaSMEFT**2*MB**4 - 3*cll1221*ee**2*LambdaSMEFT**2*MB**4 - 12*cHl322*ee**2*LambdaSMEFT**2*MB**2*MT**2 + 32*cHQ3*ee**2*LambdaSMEFT**2*MB**2*MT**2 + 6*cll1221*ee**2*LambdaSMEFT**2*MB**2*MT**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MT**4 - 16*cHQ3*ee**2*LambdaSMEFT**2*MT**4 - 3*cll1221*ee**2*LambdaSMEFT**2*MT**4 + 6*cHl322*ee**2*LambdaSMEFT**2*MB**2*MW**2 - 16*cHQ3*ee**2*LambdaSMEFT**2*MB**2*MW**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MB**2*MW**2 + 32*cbWIm**2*MB**4*MW**2 + 32*cbWRe**2*MB**4*MW**2 + 32*ctWIm**2*MB**4*MW**2 + 32*ctWRe**2*MB**4*MW**2 - 48*cHtbRe*ee**2*LambdaSMEFT**2*MB*MT*MW**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MT**2*MW**2 - 16*cHQ3*ee**2*LambdaSMEFT**2*MT**2*MW**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MT**2*MW**2 - 64*cbWIm**2*MB**2*MT**2*MW**2 - 64*cbWRe**2*MB**2*MT**2*MW**2 - 64*ctWIm**2*MB**2*MT**2*MW**2 - 64*ctWRe**2*MB**2*MT**2*MW**2 + 32*cbWIm**2*MT**4*MW**2 + 32*cbWRe**2*MT**4*MW**2 + 32*ctWIm**2*MT**4*MW**2 + 32*ctWRe**2*MT**4*MW**2 - 12*cHl322*ee**2*LambdaSMEFT**2*MW**4 + 32*cHQ3*ee**2*LambdaSMEFT**2*MW**4 + 6*cll1221*ee**2*LambdaSMEFT**2*MW**4 - 16*cbWIm**2*MB**2*MW**4 - 16*cbWRe**2*MB**2*MW**4 - 16*ctWIm**2*MB**2*MW**4 - 16*ctWRe**2*MB**2*MW**4 + 192*cbWIm*ctWIm*MB*MT*MW**4 - 192*cbWRe*ctWRe*MB*MT*MW**4 - 16*cbWIm**2*MT**2*MW**4 - 16*cbWRe**2*MT**2*MW**4 - 16*ctWIm**2*MT**2*MW**4 - 16*ctWRe**2*MT**2*MW**4 - 16*cbWIm**2*MW**6 - 16*cbWRe**2*MW**6 - 16*ctWIm**2*MW**6 - 16*ctWRe**2*MW**6 + 3*cHDD*ee**2*LambdaSMEFT**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 6*cHl311*ee**2*LambdaSMEFT**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)))*sth**2*vevhat**2 + 64*cHWB*cth*ee**2*LambdaSMEFT**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))*sth**3*vevhat**2 - 16*(2*cHl322*ee**2*LambdaSMEFT**2*MB**4 - 8*cHQ3*ee**2*LambdaSMEFT**2*MB**4 - cll1221*ee**2*LambdaSMEFT**2*MB**4 - 4*cHl322*ee**2*LambdaSMEFT**2*MB**2*MT**2 + 16*cHQ3*ee**2*LambdaSMEFT**2*MB**2*MT**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MB**2*MT**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MT**4 - 8*cHQ3*ee**2*LambdaSMEFT**2*MT**4 - cll1221*ee**2*LambdaSMEFT**2*MT**4 + 2*cHl322*ee**2*LambdaSMEFT**2*MB**2*MW**2 - 8*cHQ3*ee**2*LambdaSMEFT**2*MB**2*MW**2 - cll1221*ee**2*LambdaSMEFT**2*MB**2*MW**2 + 64*cbWIm**2*MB**4*MW**2 + 64*cbWRe**2*MB**4*MW**2 + 64*ctWIm**2*MB**4*MW**2 + 64*ctWRe**2*MB**4*MW**2 - 24*cHtbRe*ee**2*LambdaSMEFT**2*MB*MT*MW**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MT**2*MW**2 - 8*cHQ3*ee**2*LambdaSMEFT**2*MT**2*MW**2 - cll1221*ee**2*LambdaSMEFT**2*MT**2*MW**2 - 128*cbWIm**2*MB**2*MT**2*MW**2 - 128*cbWRe**2*MB**2*MT**2*MW**2 - 128*ctWIm**2*MB**2*MT**2*MW**2 - 128*ctWRe**2*MB**2*MT**2*MW**2 + 64*cbWIm**2*MT**4*MW**2 + 64*cbWRe**2*MT**4*MW**2 + 64*ctWIm**2*MT**4*MW**2 + 64*ctWRe**2*MT**4*MW**2 - 4*cHl322*ee**2*LambdaSMEFT**2*MW**4 + 16*cHQ3*ee**2*LambdaSMEFT**2*MW**4 + 2*cll1221*ee**2*LambdaSMEFT**2*MW**4 - 32*cbWIm**2*MB**2*MW**4 - 32*cbWRe**2*MB**2*MW**4 - 32*ctWIm**2*MB**2*MW**4 - 32*ctWRe**2*MB**2*MW**4 + 384*cbWIm*ctWIm*MB*MT*MW**4 - 384*cbWRe*ctWRe*MB*MT*MW**4 - 32*cbWIm**2*MT**2*MW**4 - 32*cbWRe**2*MT**2*MW**4 - 32*ctWIm**2*MT**2*MW**4 - 32*ctWRe**2*MT**2*MW**4 - 32*cbWIm**2*MW**6 - 32*cbWRe**2*MW**6 - 32*ctWIm**2*MW**6 - 32*ctWRe**2*MW**6 + cHDD*ee**2*LambdaSMEFT**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHl311*ee**2*LambdaSMEFT**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)))*sth**4*vevhat**2 - 512*MW**2*(-12*cbWIm*ctWIm*MB*MT*MW**2 + 12*cbWRe*ctWRe*MB*MT*MW**2 - (ctWIm**2 + ctWRe**2)*(2*MB**4 + 2*MT**4 - MT**2*MW**2 - MW**4 - MB**2*(4*MT**2 + MW**2)) + cbWIm**2*(-2*MB**4 - 2*MT**4 + MT**2*MW**2 + MW**4 + MB**2*(4*MT**2 + MW**2)) + cbWRe**2*(-2*MB**4 - 2*MT**4 + MT**2*MW**2 + MW**4 + MB**2*(4*MT**2 + MW**2)))*sth**6*vevhat**2 + ee**2*(4*cHl322**2*MB**4 - 16*cHl322*cHQ3*MB**4 + 16*cHQ3**2*MB**4 + 4*cHtbIm**2*MB**4 + 4*cHtbRe**2*MB**4 - 4*cHl322*cll1221*MB**4 + 8*cHQ3*cll1221*MB**4 + cll1221**2*MB**4 - 8*cHl322**2*MB**2*MT**2 + 32*cHl322*cHQ3*MB**2*MT**2 - 32*cHQ3**2*MB**2*MT**2 - 8*cHtbIm**2*MB**2*MT**2 - 8*cHtbRe**2*MB**2*MT**2 + 8*cHl322*cll1221*MB**2*MT**2 - 16*cHQ3*cll1221*MB**2*MT**2 - 2*cll1221**2*MB**2*MT**2 + 4*cHl322**2*MT**4 - 16*cHl322*cHQ3*MT**4 + 16*cHQ3**2*MT**4 + 4*cHtbIm**2*MT**4 + 4*cHtbRe**2*MT**4 - 4*cHl322*cll1221*MT**4 + 8*cHQ3*cll1221*MT**4 + cll1221**2*MT**4 + 4*cHl322**2*MB**2*MW**2 - 16*cHl322*cHQ3*MB**2*MW**2 + 16*cHQ3**2*MB**2*MW**2 + 4*cHtbIm**2*MB**2*MW**2 + 4*cHtbRe**2*MB**2*MW**2 - 4*cHl322*cll1221*MB**2*MW**2 + 8*cHQ3*cll1221*MB**2*MW**2 + cll1221**2*MB**2*MW**2 - 48*cHl322*cHtbRe*MB*MT*MW**2 + 96*cHQ3*cHtbRe*MB*MT*MW**2 + 24*cHtbRe*cll1221*MB*MT*MW**2 + 4*cHl322**2*MT**2*MW**2 - 16*cHl322*cHQ3*MT**2*MW**2 + 16*cHQ3**2*MT**2*MW**2 + 4*cHtbIm**2*MT**2*MW**2 + 4*cHtbRe**2*MT**2*MW**2 - 4*cHl322*cll1221*MT**2*MW**2 + 8*cHQ3*cll1221*MT**2*MW**2 + cll1221**2*MT**2*MW**2 - 8*cHl322**2*MW**4 + 32*cHl322*cHQ3*MW**4 - 32*cHQ3**2*MW**4 - 8*cHtbIm**2*MW**4 - 8*cHtbRe**2*MW**4 + 8*cHl322*cll1221*MW**4 - 16*cHQ3*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 4*cHl311**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHDD*(-4*cHQ3*MB**4 - cll1221*MB**4 + 8*cHQ3*MB**2*MT**2 + 2*cll1221*MB**2*MT**2 - 4*cHQ3*MT**4 - cll1221*MT**4 - 4*cHQ3*MB**2*MW**2 - cll1221*MB**2*MW**2 - 12*cHtbRe*MB*MT*MW**2 - 4*cHQ3*MT**2*MW**2 - cll1221*MT**2*MW**2 + 8*cHQ3*MW**4 + 2*cll1221*MW**4 + 2*cHl311*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHl322*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))) + 4*cHl311*(-(cll1221*MB**4) + 2*cll1221*MB**2*MT**2 - cll1221*MT**4 - cll1221*MB**2*MW**2 - 12*cHtbRe*MB*MT*MW**2 - cll1221*MT**2*MW**2 + 2*cll1221*MW**4 + 2*cHl322*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) - 4*cHQ3*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))))*vevhat**4 + 8*cHWB*cth*ee**2*(2*cHl322*MB**4 - 4*cHQ3*MB**4 - cll1221*MB**4 - 4*cHl322*MB**2*MT**2 + 8*cHQ3*MB**2*MT**2 + 2*cll1221*MB**2*MT**2 + 2*cHl322*MT**4 - 4*cHQ3*MT**4 - cll1221*MT**4 + 2*cHl322*MB**2*MW**2 - 4*cHQ3*MB**2*MW**2 - cll1221*MB**2*MW**2 - 12*cHtbRe*MB*MT*MW**2 + 2*cHl322*MT**2*MW**2 - 4*cHQ3*MT**2*MW**2 - cll1221*MT**2*MW**2 - 4*cHl322*MW**4 + 8*cHQ3*MW**4 + 2*cll1221*MW**4 + cHDD*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHl311*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)))*sth*vevhat**4 - 2*ee**2*(4*cHl322**2*MB**4 - 24*cHl322*cHQ3*MB**4 + 32*cHQ3**2*MB**4 + 8*cHtbIm**2*MB**4 + 8*cHtbRe**2*MB**4 - 8*cHWB**2*MB**4 - 4*cHl322*cll1221*MB**4 + 12*cHQ3*cll1221*MB**4 + cll1221**2*MB**4 - 8*cHl322**2*MB**2*MT**2 + 48*cHl322*cHQ3*MB**2*MT**2 - 64*cHQ3**2*MB**2*MT**2 - 16*cHtbIm**2*MB**2*MT**2 - 16*cHtbRe**2*MB**2*MT**2 + 16*cHWB**2*MB**2*MT**2 + 8*cHl322*cll1221*MB**2*MT**2 - 24*cHQ3*cll1221*MB**2*MT**2 - 2*cll1221**2*MB**2*MT**2 + 4*cHl322**2*MT**4 - 24*cHl322*cHQ3*MT**4 + 32*cHQ3**2*MT**4 + 8*cHtbIm**2*MT**4 + 8*cHtbRe**2*MT**4 - 8*cHWB**2*MT**4 - 4*cHl322*cll1221*MT**4 + 12*cHQ3*cll1221*MT**4 + cll1221**2*MT**4 + 4*cHl322**2*MB**2*MW**2 - 24*cHl322*cHQ3*MB**2*MW**2 + 32*cHQ3**2*MB**2*MW**2 + 8*cHtbIm**2*MB**2*MW**2 + 8*cHtbRe**2*MB**2*MW**2 - 8*cHWB**2*MB**2*MW**2 - 4*cHl322*cll1221*MB**2*MW**2 + 12*cHQ3*cll1221*MB**2*MW**2 + cll1221**2*MB**2*MW**2 - 72*cHl322*cHtbRe*MB*MT*MW**2 + 192*cHQ3*cHtbRe*MB*MT*MW**2 + 36*cHtbRe*cll1221*MB*MT*MW**2 + 4*cHl322**2*MT**2*MW**2 - 24*cHl322*cHQ3*MT**2*MW**2 + 32*cHQ3**2*MT**2*MW**2 + 8*cHtbIm**2*MT**2*MW**2 + 8*cHtbRe**2*MT**2*MW**2 - 8*cHWB**2*MT**2*MW**2 - 4*cHl322*cll1221*MT**2*MW**2 + 12*cHQ3*cll1221*MT**2*MW**2 + cll1221**2*MT**2*MW**2 - 8*cHl322**2*MW**4 + 48*cHl322*cHQ3*MW**4 - 64*cHQ3**2*MW**4 - 16*cHtbIm**2*MW**4 - 16*cHtbRe**2*MW**4 + 16*cHWB**2*MW**4 + 8*cHl322*cll1221*MW**4 - 24*cHQ3*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 4*cHl311**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHDD*(-6*cHQ3*MB**4 - cll1221*MB**4 + 12*cHQ3*MB**2*MT**2 + 2*cll1221*MB**2*MT**2 - 6*cHQ3*MT**4 - cll1221*MT**4 - 6*cHQ3*MB**2*MW**2 - cll1221*MB**2*MW**2 - 18*cHtbRe*MB*MT*MW**2 - 6*cHQ3*MT**2*MW**2 - cll1221*MT**2*MW**2 + 12*cHQ3*MW**4 + 2*cll1221*MW**4 + 2*cHl311*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHl322*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))) + 4*cHl311*(-(cll1221*MB**4) + 2*cll1221*MB**2*MT**2 - cll1221*MT**4 - cll1221*MB**2*MW**2 - 18*cHtbRe*MB*MT*MW**2 - cll1221*MT**2*MW**2 + 2*cll1221*MW**4 + 2*cHl322*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) - 6*cHQ3*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))))*sth**2*vevhat**4 - 8*cHWB*cth*ee**2*(2*cHl322*MB**4 - 8*cHQ3*MB**4 - cll1221*MB**4 - 4*cHl322*MB**2*MT**2 + 16*cHQ3*MB**2*MT**2 + 2*cll1221*MB**2*MT**2 + 2*cHl322*MT**4 - 8*cHQ3*MT**4 - cll1221*MT**4 + 2*cHl322*MB**2*MW**2 - 8*cHQ3*MB**2*MW**2 - cll1221*MB**2*MW**2 - 24*cHtbRe*MB*MT*MW**2 + 2*cHl322*MT**2*MW**2 - 8*cHQ3*MT**2*MW**2 - cll1221*MT**2*MW**2 - 4*cHl322*MW**4 + 16*cHQ3*MW**4 + 2*cll1221*MW**4 + cHDD*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHl311*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)))*sth**3*vevhat**4 + ee**2*(4*cHl322**2*MB**4 - 32*cHl322*cHQ3*MB**4 + 64*cHQ3**2*MB**4 + 16*cHtbIm**2*MB**4 + 16*cHtbRe**2*MB**4 - 16*cHWB**2*MB**4 - 4*cHl322*cll1221*MB**4 + 16*cHQ3*cll1221*MB**4 + cll1221**2*MB**4 - 8*cHl322**2*MB**2*MT**2 + 64*cHl322*cHQ3*MB**2*MT**2 - 128*cHQ3**2*MB**2*MT**2 - 32*cHtbIm**2*MB**2*MT**2 - 32*cHtbRe**2*MB**2*MT**2 + 32*cHWB**2*MB**2*MT**2 + 8*cHl322*cll1221*MB**2*MT**2 - 32*cHQ3*cll1221*MB**2*MT**2 - 2*cll1221**2*MB**2*MT**2 + 4*cHl322**2*MT**4 - 32*cHl322*cHQ3*MT**4 + 64*cHQ3**2*MT**4 + 16*cHtbIm**2*MT**4 + 16*cHtbRe**2*MT**4 - 16*cHWB**2*MT**4 - 4*cHl322*cll1221*MT**4 + 16*cHQ3*cll1221*MT**4 + cll1221**2*MT**4 + 4*cHl322**2*MB**2*MW**2 - 32*cHl322*cHQ3*MB**2*MW**2 + 64*cHQ3**2*MB**2*MW**2 + 16*cHtbIm**2*MB**2*MW**2 + 16*cHtbRe**2*MB**2*MW**2 - 16*cHWB**2*MB**2*MW**2 - 4*cHl322*cll1221*MB**2*MW**2 + 16*cHQ3*cll1221*MB**2*MW**2 + cll1221**2*MB**2*MW**2 - 96*cHl322*cHtbRe*MB*MT*MW**2 + 384*cHQ3*cHtbRe*MB*MT*MW**2 + 48*cHtbRe*cll1221*MB*MT*MW**2 + 4*cHl322**2*MT**2*MW**2 - 32*cHl322*cHQ3*MT**2*MW**2 + 64*cHQ3**2*MT**2*MW**2 + 16*cHtbIm**2*MT**2*MW**2 + 16*cHtbRe**2*MT**2*MW**2 - 16*cHWB**2*MT**2*MW**2 - 4*cHl322*cll1221*MT**2*MW**2 + 16*cHQ3*cll1221*MT**2*MW**2 + cll1221**2*MT**2*MW**2 - 8*cHl322**2*MW**4 + 64*cHl322*cHQ3*MW**4 - 128*cHQ3**2*MW**4 - 32*cHtbIm**2*MW**4 - 32*cHtbRe**2*MW**4 + 32*cHWB**2*MW**4 + 8*cHl322*cll1221*MW**4 - 32*cHQ3*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 4*cHl311**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHDD*(-8*cHQ3*MB**4 - cll1221*MB**4 + 16*cHQ3*MB**2*MT**2 + 2*cll1221*MB**2*MT**2 - 8*cHQ3*MT**4 - cll1221*MT**4 - 8*cHQ3*MB**2*MW**2 - cll1221*MB**2*MW**2 - 24*cHtbRe*MB*MT*MW**2 - 8*cHQ3*MT**2*MW**2 - cll1221*MT**2*MW**2 + 16*cHQ3*MW**4 + 2*cll1221*MW**4 + 2*cHl311*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHl322*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))) + 4*cHl311*(-(cll1221*MB**4) + 2*cll1221*MB**2*MT**2 - cll1221*MT**4 - cll1221*MB**2*MW**2 - 24*cHtbRe*MB*MT*MW**2 - cll1221*MT**2*MW**2 + 2*cll1221*MW**4 + 2*cHl322*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) - 8*cHQ3*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))))*sth**4*vevhat**4 + 192*ee*LambdaSMEFT**2*MW**2*(ctWRe*MT*(MB**2 - MT**2 + MW**2) + cbWRe*MB*(-MB**2 + MT**2 + MW**2))*sth*vevhat*cmath.sqrt(2) - 768*ee*LambdaSMEFT**2*MW**2*(ctWRe*MT*(MB**2 - MT**2 + MW**2) + cbWRe*MB*(-MB**2 + MT**2 + MW**2))*sth**3*vevhat*cmath.sqrt(2) + 768*ee*LambdaSMEFT**2*MW**2*(ctWRe*MT*(MB**2 - MT**2 + MW**2) + cbWRe*MB*(-MB**2 + MT**2 + MW**2))*sth**5*vevhat*cmath.sqrt(2) - 48*ee*MW**2*(2*cHtbIm*ctWIm*MB*(MB**2 - MT**2 - MW**2) + 2*cbWIm*cHtbIm*MT*(MB**2 - MT**2 + MW**2) + (cHDD + 2*cHl311 + 2*cHl322 - 4*cHQ3 - cll1221)*ctWRe*MT*(MB**2 - MT**2 + MW**2) + 2*cHtbRe*ctWRe*MB*(-MB**2 + MT**2 + MW**2) + cbWRe*(-2*cHl322*MB**3 + 4*cHQ3*MB**3 + cll1221*MB**3 + 2*cHtbRe*MB**2*MT + 2*cHl322*MB*MT**2 - 4*cHQ3*MB*MT**2 - cll1221*MB*MT**2 - 2*cHtbRe*MT**3 + 2*cHl322*MB*MW**2 - 4*cHQ3*MB*MW**2 - cll1221*MB*MW**2 + 2*cHtbRe*MT*MW**2 + cHDD*MB*(-MB**2 + MT**2 + MW**2) + 2*cHl311*MB*(-MB**2 + MT**2 + MW**2)))*sth*vevhat**3*cmath.sqrt(2) - 192*cHWB*cth*ee*MW**2*(ctWRe*MT*(MB**2 - MT**2 + MW**2) + cbWRe*MB*(-MB**2 + MT**2 + MW**2))*sth**2*vevhat**3*cmath.sqrt(2) + 48*ee*MW**2*(8*cHtbIm*ctWIm*MB*(MB**2 - MT**2 - MW**2) + 8*cbWIm*cHtbIm*MT*(MB**2 - MT**2 + MW**2) + (3*cHDD + 6*cHl311 + 6*cHl322 - 16*cHQ3 - 3*cll1221)*ctWRe*MT*(MB**2 - MT**2 + MW**2) + 8*cHtbRe*ctWRe*MB*(-MB**2 + MT**2 + MW**2) + cbWRe*(-6*cHl322*MB**3 + 16*cHQ3*MB**3 + 3*cll1221*MB**3 + 8*cHtbRe*MB**2*MT + 6*cHl322*MB*MT**2 - 16*cHQ3*MB*MT**2 - 3*cll1221*MB*MT**2 - 8*cHtbRe*MT**3 + 6*cHl322*MB*MW**2 - 16*cHQ3*MB*MW**2 - 3*cll1221*MB*MW**2 + 8*cHtbRe*MT*MW**2 + 3*cHDD*MB*(-MB**2 + MT**2 + MW**2) + 6*cHl311*MB*(-MB**2 + MT**2 + MW**2)))*sth**3*vevhat**3*cmath.sqrt(2) + 384*cHWB*cth*ee*MW**2*(ctWRe*MT*(MB**2 - MT**2 + MW**2) + cbWRe*MB*(-MB**2 + MT**2 + MW**2))*sth**4*vevhat**3*cmath.sqrt(2) - 96*ee*MW**2*(4*cHtbIm*ctWIm*MB*(MB**2 - MT**2 - MW**2) + 4*cbWIm*cHtbIm*MT*(MB**2 - MT**2 + MW**2) + (cHDD + 2*cHl311 + 2*cHl322 - 8*cHQ3 - cll1221)*ctWRe*MT*(MB**2 - MT**2 + MW**2) + 4*cHtbRe*ctWRe*MB*(-MB**2 + MT**2 + MW**2) + cbWRe*(-2*cHl322*MB**3 + 8*cHQ3*MB**3 + cll1221*MB**3 + 4*cHtbRe*MB**2*MT + 2*cHl322*MB*MT**2 - 8*cHQ3*MB*MT**2 - cll1221*MB*MT**2 - 4*cHtbRe*MT**3 + 2*cHl322*MB*MW**2 - 8*cHQ3*MB*MW**2 - cll1221*MB*MW**2 + 4*cHtbRe*MT*MW**2 + cHDD*MB*(-MB**2 + MT**2 + MW**2) + 2*cHl311*MB*(-MB**2 + MT**2 + MW**2)))*sth**5*vevhat**3*cmath.sqrt(2))*cmath.sqrt(MB**4 + (MT**2 - MW**2)**2 - 2*MB**2*(MT**2 + MW**2)))/(1024.*cmath.pi*LambdaSMEFT**4*MT**3*MW**2*sth**2*(1 - 2*sth**2)**2)'})
Decay_u = Decay(name = 'Decay_u',
particle = P.u,
partial_widths = {(P.W__plus__,P.d):'((16*ee**2*LambdaSMEFT**4*MD**4 - 32*ee**2*LambdaSMEFT**4*MD**2*MU**2 + 16*ee**2*LambdaSMEFT**4*MU**4 + 16*ee**2*LambdaSMEFT**4*MD**2*MW**2 + 16*ee**2*LambdaSMEFT**4*MU**2*MW**2 - 32*ee**2*LambdaSMEFT**4*MW**4 - 64*ee**2*LambdaSMEFT**4*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2))*sth**2 + 64*ee**2*LambdaSMEFT**4*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2))*sth**4 - 32*cHWB*cth*ee**2*LambdaSMEFT**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2))*sth*vevhat**2 + 64*cHWB*cth*ee**2*LambdaSMEFT**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2))*sth**3*vevhat**2 - 8*cHWB*cth*ee**2*sth**3*vevhat**4*(2*cHl311*MD**4 + 2*cHl322*MD**4 - cll1221*MD**4 - 4*cHl311*MD**2*MU**2 - 4*cHl322*MD**2*MU**2 + 2*cll1221*MD**2*MU**2 + 2*cHl311*MU**4 + 2*cHl322*MU**4 - cll1221*MU**4 + 2*cHl311*MD**2*MW**2 + 2*cHl322*MD**2*MW**2 - cll1221*MD**2*MW**2 + 2*cHl311*MU**2*MW**2 + 2*cHl322*MU**2*MW**2 - cll1221*MU**2*MW**2 - 4*cHl311*MW**4 - 4*cHl322*MW**4 + 2*cll1221*MW**4 + cHDD*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 8*cHj3*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 24*cHudRe*MD*MU*MW**2*ydo*yup) - 8*ee**2*LambdaSMEFT**2*vevhat**2*(2*cHl311*MD**4 + 2*cHl322*MD**4 - cll1221*MD**4 - 4*cHl311*MD**2*MU**2 - 4*cHl322*MD**2*MU**2 + 2*cll1221*MD**2*MU**2 + 2*cHl311*MU**4 + 2*cHl322*MU**4 - cll1221*MU**4 + 2*cHl311*MD**2*MW**2 + 2*cHl322*MD**2*MW**2 - cll1221*MD**2*MW**2 + 2*cHl311*MU**2*MW**2 + 2*cHl322*MU**2*MW**2 - cll1221*MU**2*MW**2 - 4*cHl311*MW**4 - 4*cHl322*MW**4 + 2*cll1221*MW**4 + cHDD*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 4*cHj3*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 12*cHudRe*MD*MU*MW**2*ydo*yup) + 8*cHWB*cth*ee**2*sth*vevhat**4*(2*cHl311*MD**4 + 2*cHl322*MD**4 - cll1221*MD**4 - 4*cHl311*MD**2*MU**2 - 4*cHl322*MD**2*MU**2 + 2*cll1221*MD**2*MU**2 + 2*cHl311*MU**4 + 2*cHl322*MU**4 - cll1221*MU**4 + 2*cHl311*MD**2*MW**2 + 2*cHl322*MD**2*MW**2 - cll1221*MD**2*MW**2 + 2*cHl311*MU**2*MW**2 + 2*cHl322*MU**2*MW**2 - cll1221*MU**2*MW**2 - 4*cHl311*MW**4 - 4*cHl322*MW**4 + 2*cll1221*MW**4 + cHDD*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 4*cHj3*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 12*cHudRe*MD*MU*MW**2*ydo*yup) - 16*sth**4*vevhat**2*(2*cHl311*ee**2*LambdaSMEFT**2*MD**4 + 2*cHl322*ee**2*LambdaSMEFT**2*MD**4 - cll1221*ee**2*LambdaSMEFT**2*MD**4 - 4*cHl311*ee**2*LambdaSMEFT**2*MD**2*MU**2 - 4*cHl322*ee**2*LambdaSMEFT**2*MD**2*MU**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MD**2*MU**2 + 2*cHl311*ee**2*LambdaSMEFT**2*MU**4 + 2*cHl322*ee**2*LambdaSMEFT**2*MU**4 - cll1221*ee**2*LambdaSMEFT**2*MU**4 + 2*cHl311*ee**2*LambdaSMEFT**2*MD**2*MW**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MD**2*MW**2 - cll1221*ee**2*LambdaSMEFT**2*MD**2*MW**2 + 2*cHl311*ee**2*LambdaSMEFT**2*MU**2*MW**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MU**2*MW**2 - cll1221*ee**2*LambdaSMEFT**2*MU**2*MW**2 - 4*cHl311*ee**2*LambdaSMEFT**2*MW**4 - 4*cHl322*ee**2*LambdaSMEFT**2*MW**4 + 2*cll1221*ee**2*LambdaSMEFT**2*MW**4 + cHDD*ee**2*LambdaSMEFT**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 8*cHj3*ee**2*LambdaSMEFT**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 64*cdWIm**2*MD**4*MW**2*ydo**2 + 64*cdWRe**2*MD**4*MW**2*ydo**2 - 128*cdWIm**2*MD**2*MU**2*MW**2*ydo**2 - 128*cdWRe**2*MD**2*MU**2*MW**2*ydo**2 + 64*cdWIm**2*MU**4*MW**2*ydo**2 + 64*cdWRe**2*MU**4*MW**2*ydo**2 - 32*cdWIm**2*MD**2*MW**4*ydo**2 - 32*cdWRe**2*MD**2*MW**4*ydo**2 - 32*cdWIm**2*MU**2*MW**4*ydo**2 - 32*cdWRe**2*MU**2*MW**4*ydo**2 - 32*cdWIm**2*MW**6*ydo**2 - 32*cdWRe**2*MW**6*ydo**2 - 24*cHudRe*ee**2*LambdaSMEFT**2*MD*MU*MW**2*ydo*yup + 384*cdWIm*cuWIm*MD*MU*MW**4*ydo*yup - 384*cdWRe*cuWRe*MD*MU*MW**4*ydo*yup + 64*cuWIm**2*MD**4*MW**2*yup**2 + 64*cuWRe**2*MD**4*MW**2*yup**2 - 128*cuWIm**2*MD**2*MU**2*MW**2*yup**2 - 128*cuWRe**2*MD**2*MU**2*MW**2*yup**2 + 64*cuWIm**2*MU**4*MW**2*yup**2 + 64*cuWRe**2*MU**4*MW**2*yup**2 - 32*cuWIm**2*MD**2*MW**4*yup**2 - 32*cuWRe**2*MD**2*MW**4*yup**2 - 32*cuWIm**2*MU**2*MW**4*yup**2 - 32*cuWRe**2*MU**2*MW**4*yup**2 - 32*cuWIm**2*MW**6*yup**2 - 32*cuWRe**2*MW**6*yup**2) + 8*sth**2*vevhat**2*(6*cHl311*ee**2*LambdaSMEFT**2*MD**4 + 6*cHl322*ee**2*LambdaSMEFT**2*MD**4 - 3*cll1221*ee**2*LambdaSMEFT**2*MD**4 - 12*cHl311*ee**2*LambdaSMEFT**2*MD**2*MU**2 - 12*cHl322*ee**2*LambdaSMEFT**2*MD**2*MU**2 + 6*cll1221*ee**2*LambdaSMEFT**2*MD**2*MU**2 + 6*cHl311*ee**2*LambdaSMEFT**2*MU**4 + 6*cHl322*ee**2*LambdaSMEFT**2*MU**4 - 3*cll1221*ee**2*LambdaSMEFT**2*MU**4 + 6*cHl311*ee**2*LambdaSMEFT**2*MD**2*MW**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MD**2*MW**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MD**2*MW**2 + 6*cHl311*ee**2*LambdaSMEFT**2*MU**2*MW**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MU**2*MW**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MU**2*MW**2 - 12*cHl311*ee**2*LambdaSMEFT**2*MW**4 - 12*cHl322*ee**2*LambdaSMEFT**2*MW**4 + 6*cll1221*ee**2*LambdaSMEFT**2*MW**4 + 3*cHDD*ee**2*LambdaSMEFT**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 16*cHj3*ee**2*LambdaSMEFT**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 32*cdWIm**2*MD**4*MW**2*ydo**2 + 32*cdWRe**2*MD**4*MW**2*ydo**2 - 64*cdWIm**2*MD**2*MU**2*MW**2*ydo**2 - 64*cdWRe**2*MD**2*MU**2*MW**2*ydo**2 + 32*cdWIm**2*MU**4*MW**2*ydo**2 + 32*cdWRe**2*MU**4*MW**2*ydo**2 - 16*cdWIm**2*MD**2*MW**4*ydo**2 - 16*cdWRe**2*MD**2*MW**4*ydo**2 - 16*cdWIm**2*MU**2*MW**4*ydo**2 - 16*cdWRe**2*MU**2*MW**4*ydo**2 - 16*cdWIm**2*MW**6*ydo**2 - 16*cdWRe**2*MW**6*ydo**2 - 48*cHudRe*ee**2*LambdaSMEFT**2*MD*MU*MW**2*ydo*yup + 192*cdWIm*cuWIm*MD*MU*MW**4*ydo*yup - 192*cdWRe*cuWRe*MD*MU*MW**4*ydo*yup + 32*cuWIm**2*MD**4*MW**2*yup**2 + 32*cuWRe**2*MD**4*MW**2*yup**2 - 64*cuWIm**2*MD**2*MU**2*MW**2*yup**2 - 64*cuWRe**2*MD**2*MU**2*MW**2*yup**2 + 32*cuWIm**2*MU**4*MW**2*yup**2 + 32*cuWRe**2*MU**4*MW**2*yup**2 - 16*cuWIm**2*MD**2*MW**4*yup**2 - 16*cuWRe**2*MD**2*MW**4*yup**2 - 16*cuWIm**2*MU**2*MW**4*yup**2 - 16*cuWRe**2*MU**2*MW**4*yup**2 - 16*cuWIm**2*MW**6*yup**2 - 16*cuWRe**2*MW**6*yup**2) - 512*MW**2*sth**6*vevhat**2*(cdWIm**2*(-2*MD**4 - 2*MU**4 + MU**2*MW**2 + MW**4 + MD**2*(4*MU**2 + MW**2))*ydo**2 + cdWRe**2*(-2*MD**4 - 2*MU**4 + MU**2*MW**2 + MW**4 + MD**2*(4*MU**2 + MW**2))*ydo**2 - 12*cdWIm*cuWIm*MD*MU*MW**2*ydo*yup + 12*cdWRe*cuWRe*MD*MU*MW**2*ydo*yup - (cuWIm**2 + cuWRe**2)*(2*MD**4 + 2*MU**4 - MU**2*MW**2 - MW**4 - MD**2*(4*MU**2 + MW**2))*yup**2) + ee**2*vevhat**4*(4*cHl311**2*MD**4 + 8*cHl311*cHl322*MD**4 + 4*cHl322**2*MD**4 - 4*cHl311*cll1221*MD**4 - 4*cHl322*cll1221*MD**4 + cll1221**2*MD**4 - 8*cHl311**2*MD**2*MU**2 - 16*cHl311*cHl322*MD**2*MU**2 - 8*cHl322**2*MD**2*MU**2 + 8*cHl311*cll1221*MD**2*MU**2 + 8*cHl322*cll1221*MD**2*MU**2 - 2*cll1221**2*MD**2*MU**2 + 4*cHl311**2*MU**4 + 8*cHl311*cHl322*MU**4 + 4*cHl322**2*MU**4 - 4*cHl311*cll1221*MU**4 - 4*cHl322*cll1221*MU**4 + cll1221**2*MU**4 + 4*cHl311**2*MD**2*MW**2 + 8*cHl311*cHl322*MD**2*MW**2 + 4*cHl322**2*MD**2*MW**2 - 4*cHl311*cll1221*MD**2*MW**2 - 4*cHl322*cll1221*MD**2*MW**2 + cll1221**2*MD**2*MW**2 + 4*cHl311**2*MU**2*MW**2 + 8*cHl311*cHl322*MU**2*MW**2 + 4*cHl322**2*MU**2*MW**2 - 4*cHl311*cll1221*MU**2*MW**2 - 4*cHl322*cll1221*MU**2*MW**2 + cll1221**2*MU**2*MW**2 - 8*cHl311**2*MW**4 - 16*cHl311*cHl322*MW**4 - 8*cHl322**2*MW**4 + 8*cHl311*cll1221*MW**4 + 8*cHl322*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 16*cHj3**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 48*cHl311*cHudRe*MD*MU*MW**2*ydo*yup - 48*cHl322*cHudRe*MD*MU*MW**2*ydo*yup + 24*cHudRe*cll1221*MD*MU*MW**2*ydo*yup + 4*cHudIm**2*MD**4*ydo**2*yup**2 + 4*cHudRe**2*MD**4*ydo**2*yup**2 - 8*cHudIm**2*MD**2*MU**2*ydo**2*yup**2 - 8*cHudRe**2*MD**2*MU**2*ydo**2*yup**2 + 4*cHudIm**2*MU**4*ydo**2*yup**2 + 4*cHudRe**2*MU**4*ydo**2*yup**2 + 4*cHudIm**2*MD**2*MW**2*ydo**2*yup**2 + 4*cHudRe**2*MD**2*MW**2*ydo**2*yup**2 + 4*cHudIm**2*MU**2*MW**2*ydo**2*yup**2 + 4*cHudRe**2*MU**2*MW**2*ydo**2*yup**2 - 8*cHudIm**2*MW**4*ydo**2*yup**2 - 8*cHudRe**2*MW**4*ydo**2*yup**2 - 8*cHj3*(-(cll1221*MD**4) + 2*cll1221*MD**2*MU**2 - cll1221*MU**4 - cll1221*MD**2*MW**2 - cll1221*MU**2*MW**2 + 2*cll1221*MW**4 + 2*cHl311*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 2*cHl322*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 12*cHudRe*MD*MU*MW**2*ydo*yup) - 2*cHDD*(-2*cHl322*MD**4 + cll1221*MD**4 + 4*cHl322*MD**2*MU**2 - 2*cll1221*MD**2*MU**2 - 2*cHl322*MU**4 + cll1221*MU**4 - 2*cHl322*MD**2*MW**2 + cll1221*MD**2*MW**2 - 2*cHl322*MU**2*MW**2 + cll1221*MU**2*MW**2 + 4*cHl322*MW**4 - 2*cll1221*MW**4 + 4*cHj3*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 2*cHl311*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 12*cHudRe*MD*MU*MW**2*ydo*yup)) - 2*ee**2*sth**2*vevhat**4*(4*cHl311**2*MD**4 + 8*cHl311*cHl322*MD**4 + 4*cHl322**2*MD**4 - 8*cHWB**2*MD**4 - 4*cHl311*cll1221*MD**4 - 4*cHl322*cll1221*MD**4 + cll1221**2*MD**4 - 8*cHl311**2*MD**2*MU**2 - 16*cHl311*cHl322*MD**2*MU**2 - 8*cHl322**2*MD**2*MU**2 + 16*cHWB**2*MD**2*MU**2 + 8*cHl311*cll1221*MD**2*MU**2 + 8*cHl322*cll1221*MD**2*MU**2 - 2*cll1221**2*MD**2*MU**2 + 4*cHl311**2*MU**4 + 8*cHl311*cHl322*MU**4 + 4*cHl322**2*MU**4 - 8*cHWB**2*MU**4 - 4*cHl311*cll1221*MU**4 - 4*cHl322*cll1221*MU**4 + cll1221**2*MU**4 + 4*cHl311**2*MD**2*MW**2 + 8*cHl311*cHl322*MD**2*MW**2 + 4*cHl322**2*MD**2*MW**2 - 8*cHWB**2*MD**2*MW**2 - 4*cHl311*cll1221*MD**2*MW**2 - 4*cHl322*cll1221*MD**2*MW**2 + cll1221**2*MD**2*MW**2 + 4*cHl311**2*MU**2*MW**2 + 8*cHl311*cHl322*MU**2*MW**2 + 4*cHl322**2*MU**2*MW**2 - 8*cHWB**2*MU**2*MW**2 - 4*cHl311*cll1221*MU**2*MW**2 - 4*cHl322*cll1221*MU**2*MW**2 + cll1221**2*MU**2*MW**2 - 8*cHl311**2*MW**4 - 16*cHl311*cHl322*MW**4 - 8*cHl322**2*MW**4 + 16*cHWB**2*MW**4 + 8*cHl311*cll1221*MW**4 + 8*cHl322*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 32*cHj3**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 72*cHl311*cHudRe*MD*MU*MW**2*ydo*yup - 72*cHl322*cHudRe*MD*MU*MW**2*ydo*yup + 36*cHudRe*cll1221*MD*MU*MW**2*ydo*yup + 8*cHudIm**2*MD**4*ydo**2*yup**2 + 8*cHudRe**2*MD**4*ydo**2*yup**2 - 16*cHudIm**2*MD**2*MU**2*ydo**2*yup**2 - 16*cHudRe**2*MD**2*MU**2*ydo**2*yup**2 + 8*cHudIm**2*MU**4*ydo**2*yup**2 + 8*cHudRe**2*MU**4*ydo**2*yup**2 + 8*cHudIm**2*MD**2*MW**2*ydo**2*yup**2 + 8*cHudRe**2*MD**2*MW**2*ydo**2*yup**2 + 8*cHudIm**2*MU**2*MW**2*ydo**2*yup**2 + 8*cHudRe**2*MU**2*MW**2*ydo**2*yup**2 - 16*cHudIm**2*MW**4*ydo**2*yup**2 - 16*cHudRe**2*MW**4*ydo**2*yup**2 - 12*cHj3*(-(cll1221*MD**4) + 2*cll1221*MD**2*MU**2 - cll1221*MU**4 - cll1221*MD**2*MW**2 - cll1221*MU**2*MW**2 + 2*cll1221*MW**4 + 2*cHl311*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 2*cHl322*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 16*cHudRe*MD*MU*MW**2*ydo*yup) - 2*cHDD*(-2*cHl322*MD**4 + cll1221*MD**4 + 4*cHl322*MD**2*MU**2 - 2*cll1221*MD**2*MU**2 - 2*cHl322*MU**4 + cll1221*MU**4 - 2*cHl322*MD**2*MW**2 + cll1221*MD**2*MW**2 - 2*cHl322*MU**2*MW**2 + cll1221*MU**2*MW**2 + 4*cHl322*MW**4 - 2*cll1221*MW**4 + 6*cHj3*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 2*cHl311*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 18*cHudRe*MD*MU*MW**2*ydo*yup)) + ee**2*sth**4*vevhat**4*(4*cHl311**2*MD**4 + 8*cHl311*cHl322*MD**4 + 4*cHl322**2*MD**4 - 16*cHWB**2*MD**4 - 4*cHl311*cll1221*MD**4 - 4*cHl322*cll1221*MD**4 + cll1221**2*MD**4 - 8*cHl311**2*MD**2*MU**2 - 16*cHl311*cHl322*MD**2*MU**2 - 8*cHl322**2*MD**2*MU**2 + 32*cHWB**2*MD**2*MU**2 + 8*cHl311*cll1221*MD**2*MU**2 + 8*cHl322*cll1221*MD**2*MU**2 - 2*cll1221**2*MD**2*MU**2 + 4*cHl311**2*MU**4 + 8*cHl311*cHl322*MU**4 + 4*cHl322**2*MU**4 - 16*cHWB**2*MU**4 - 4*cHl311*cll1221*MU**4 - 4*cHl322*cll1221*MU**4 + cll1221**2*MU**4 + 4*cHl311**2*MD**2*MW**2 + 8*cHl311*cHl322*MD**2*MW**2 + 4*cHl322**2*MD**2*MW**2 - 16*cHWB**2*MD**2*MW**2 - 4*cHl311*cll1221*MD**2*MW**2 - 4*cHl322*cll1221*MD**2*MW**2 + cll1221**2*MD**2*MW**2 + 4*cHl311**2*MU**2*MW**2 + 8*cHl311*cHl322*MU**2*MW**2 + 4*cHl322**2*MU**2*MW**2 - 16*cHWB**2*MU**2*MW**2 - 4*cHl311*cll1221*MU**2*MW**2 - 4*cHl322*cll1221*MU**2*MW**2 + cll1221**2*MU**2*MW**2 - 8*cHl311**2*MW**4 - 16*cHl311*cHl322*MW**4 - 8*cHl322**2*MW**4 + 32*cHWB**2*MW**4 + 8*cHl311*cll1221*MW**4 + 8*cHl322*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 64*cHj3**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 96*cHl311*cHudRe*MD*MU*MW**2*ydo*yup - 96*cHl322*cHudRe*MD*MU*MW**2*ydo*yup + 48*cHudRe*cll1221*MD*MU*MW**2*ydo*yup + 16*cHudIm**2*MD**4*ydo**2*yup**2 + 16*cHudRe**2*MD**4*ydo**2*yup**2 - 32*cHudIm**2*MD**2*MU**2*ydo**2*yup**2 - 32*cHudRe**2*MD**2*MU**2*ydo**2*yup**2 + 16*cHudIm**2*MU**4*ydo**2*yup**2 + 16*cHudRe**2*MU**4*ydo**2*yup**2 + 16*cHudIm**2*MD**2*MW**2*ydo**2*yup**2 + 16*cHudRe**2*MD**2*MW**2*ydo**2*yup**2 + 16*cHudIm**2*MU**2*MW**2*ydo**2*yup**2 + 16*cHudRe**2*MU**2*MW**2*ydo**2*yup**2 - 32*cHudIm**2*MW**4*ydo**2*yup**2 - 32*cHudRe**2*MW**4*ydo**2*yup**2 - 16*cHj3*(-(cll1221*MD**4) + 2*cll1221*MD**2*MU**2 - cll1221*MU**4 - cll1221*MD**2*MW**2 - cll1221*MU**2*MW**2 + 2*cll1221*MW**4 + 2*cHl311*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 2*cHl322*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 24*cHudRe*MD*MU*MW**2*ydo*yup) - 2*cHDD*(-2*cHl322*MD**4 + cll1221*MD**4 + 4*cHl322*MD**2*MU**2 - 2*cll1221*MD**2*MU**2 - 2*cHl322*MU**4 + cll1221*MU**4 - 2*cHl322*MD**2*MW**2 + cll1221*MD**2*MW**2 - 2*cHl322*MU**2*MW**2 + cll1221*MU**2*MW**2 + 4*cHl322*MW**4 - 2*cll1221*MW**4 + 8*cHj3*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 2*cHl311*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 24*cHudRe*MD*MU*MW**2*ydo*yup)) + 192*ee*LambdaSMEFT**2*MW**2*sth*vevhat*(cdWRe*MD*(-MD**2 + MU**2 + MW**2)*ydo + cuWRe*MU*(MD**2 - MU**2 + MW**2)*yup)*cmath.sqrt(2) - 768*ee*LambdaSMEFT**2*MW**2*sth**3*vevhat*(cdWRe*MD*(-MD**2 + MU**2 + MW**2)*ydo + cuWRe*MU*(MD**2 - MU**2 + MW**2)*yup)*cmath.sqrt(2) + 768*ee*LambdaSMEFT**2*MW**2*sth**5*vevhat*(cdWRe*MD*(-MD**2 + MU**2 + MW**2)*ydo + cuWRe*MU*(MD**2 - MU**2 + MW**2)*yup)*cmath.sqrt(2) - 192*cHWB*cth*ee*MW**2*sth**2*vevhat**3*(cdWRe*MD*(-MD**2 + MU**2 + MW**2)*ydo + cuWRe*MU*(MD**2 - MU**2 + MW**2)*yup)*cmath.sqrt(2) + 384*cHWB*cth*ee*MW**2*sth**4*vevhat**3*(cdWRe*MD*(-MD**2 + MU**2 + MW**2)*ydo + cuWRe*MU*(MD**2 - MU**2 + MW**2)*yup)*cmath.sqrt(2) - 48*ee*MW**2*sth*vevhat**3*(yup*(2*cHl311*cuWRe*MD**2*MU + 2*cHl322*cuWRe*MD**2*MU - cll1221*cuWRe*MD**2*MU - 2*cHl311*cuWRe*MU**3 - 2*cHl322*cuWRe*MU**3 + cll1221*cuWRe*MU**3 + 2*cHl311*cuWRe*MU*MW**2 + 2*cHl322*cuWRe*MU*MW**2 - cll1221*cuWRe*MU*MW**2 + cHDD*cuWRe*MU*(MD**2 - MU**2 + MW**2) - 4*cHj3*cuWRe*MU*(MD**2 - MU**2 + MW**2) + 2*cdWIm*cHudIm*MD**2*MU*ydo**2 - 2*cdWIm*cHudIm*MU**3*ydo**2 + 2*cdWIm*cHudIm*MU*MW**2*ydo**2 + 2*cHudIm*cuWIm*MD**3*ydo*yup - 2*cHudRe*cuWRe*MD**3*ydo*yup - 2*cHudIm*cuWIm*MD*MU**2*ydo*yup + 2*cHudRe*cuWRe*MD*MU**2*ydo*yup - 2*cHudIm*cuWIm*MD*MW**2*ydo*yup + 2*cHudRe*cuWRe*MD*MW**2*ydo*yup) + cdWRe*ydo*(-2*cHl311*MD**3 - 2*cHl322*MD**3 + cll1221*MD**3 + 2*cHl311*MD*MU**2 + 2*cHl322*MD*MU**2 - cll1221*MD*MU**2 + 2*cHl311*MD*MW**2 + 2*cHl322*MD*MW**2 - cll1221*MD*MW**2 + 4*cHj3*MD*(MD**2 - MU**2 - MW**2) + cHDD*MD*(-MD**2 + MU**2 + MW**2) + 2*cHudRe*MD**2*MU*ydo*yup - 2*cHudRe*MU**3*ydo*yup + 2*cHudRe*MU*MW**2*ydo*yup))*cmath.sqrt(2) - 96*ee*MW**2*sth**5*vevhat**3*(yup*(2*cHl311*cuWRe*MD**2*MU + 2*cHl322*cuWRe*MD**2*MU - cll1221*cuWRe*MD**2*MU - 2*cHl311*cuWRe*MU**3 - 2*cHl322*cuWRe*MU**3 + cll1221*cuWRe*MU**3 + 2*cHl311*cuWRe*MU*MW**2 + 2*cHl322*cuWRe*MU*MW**2 - cll1221*cuWRe*MU*MW**2 + cHDD*cuWRe*MU*(MD**2 - MU**2 + MW**2) - 8*cHj3*cuWRe*MU*(MD**2 - MU**2 + MW**2) + 4*cdWIm*cHudIm*MD**2*MU*ydo**2 - 4*cdWIm*cHudIm*MU**3*ydo**2 + 4*cdWIm*cHudIm*MU*MW**2*ydo**2 + 4*cHudIm*cuWIm*MD**3*ydo*yup - 4*cHudRe*cuWRe*MD**3*ydo*yup - 4*cHudIm*cuWIm*MD*MU**2*ydo*yup + 4*cHudRe*cuWRe*MD*MU**2*ydo*yup - 4*cHudIm*cuWIm*MD*MW**2*ydo*yup + 4*cHudRe*cuWRe*MD*MW**2*ydo*yup) + cdWRe*ydo*(-2*cHl311*MD**3 - 2*cHl322*MD**3 + cll1221*MD**3 + 2*cHl311*MD*MU**2 + 2*cHl322*MD*MU**2 - cll1221*MD*MU**2 + 2*cHl311*MD*MW**2 + 2*cHl322*MD*MW**2 - cll1221*MD*MW**2 + 8*cHj3*MD*(MD**2 - MU**2 - MW**2) + cHDD*MD*(-MD**2 + MU**2 + MW**2) + 4*cHudRe*MD**2*MU*ydo*yup - 4*cHudRe*MU**3*ydo*yup + 4*cHudRe*MU*MW**2*ydo*yup))*cmath.sqrt(2) + 48*ee*MW**2*sth**3*vevhat**3*(yup*(6*cHl311*cuWRe*MD**2*MU + 6*cHl322*cuWRe*MD**2*MU - 3*cll1221*cuWRe*MD**2*MU - 6*cHl311*cuWRe*MU**3 - 6*cHl322*cuWRe*MU**3 + 3*cll1221*cuWRe*MU**3 + 6*cHl311*cuWRe*MU*MW**2 + 6*cHl322*cuWRe*MU*MW**2 - 3*cll1221*cuWRe*MU*MW**2 + 3*cHDD*cuWRe*MU*(MD**2 - MU**2 + MW**2) - 16*cHj3*cuWRe*MU*(MD**2 - MU**2 + MW**2) + 8*cdWIm*cHudIm*MD**2*MU*ydo**2 - 8*cdWIm*cHudIm*MU**3*ydo**2 + 8*cdWIm*cHudIm*MU*MW**2*ydo**2 + 8*cHudIm*cuWIm*MD**3*ydo*yup - 8*cHudRe*cuWRe*MD**3*ydo*yup - 8*cHudIm*cuWIm*MD*MU**2*ydo*yup + 8*cHudRe*cuWRe*MD*MU**2*ydo*yup - 8*cHudIm*cuWIm*MD*MW**2*ydo*yup + 8*cHudRe*cuWRe*MD*MW**2*ydo*yup) + cdWRe*ydo*(-6*cHl311*MD**3 - 6*cHl322*MD**3 + 3*cll1221*MD**3 + 6*cHl311*MD*MU**2 + 6*cHl322*MD*MU**2 - 3*cll1221*MD*MU**2 + 6*cHl311*MD*MW**2 + 6*cHl322*MD*MW**2 - 3*cll1221*MD*MW**2 + 16*cHj3*MD*(MD**2 - MU**2 - MW**2) + 3*cHDD*MD*(-MD**2 + MU**2 + MW**2) + 8*cHudRe*MD**2*MU*ydo*yup - 8*cHudRe*MU**3*ydo*yup + 8*cHudRe*MU*MW**2*ydo*yup))*cmath.sqrt(2))*cmath.sqrt(MD**4 + (MU**2 - MW**2)**2 - 2*MD**2*(MU**2 + MW**2)))/(1024.*cmath.pi*LambdaSMEFT**4*MU**3*MW**2*sth**2*(1 - 2*sth**2)**2)'})
Decay_W__plus__ = Decay(name = 'Decay_W__plus__',
particle = P.W__plus__,
partial_widths = {(P.c,P.s__tilde__):'-((16*ee**2*LambdaSMEFT**4*MC**4 - 32*ee**2*LambdaSMEFT**4*MC**2*MS**2 + 16*ee**2*LambdaSMEFT**4*MS**4 + 16*ee**2*LambdaSMEFT**4*MC**2*MW**2 + 16*ee**2*LambdaSMEFT**4*MS**2*MW**2 - 32*ee**2*LambdaSMEFT**4*MW**4 - 64*ee**2*LambdaSMEFT**4*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2))*sth**2 + 64*ee**2*LambdaSMEFT**4*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2))*sth**4 - 32*cHWB*cth*ee**2*LambdaSMEFT**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2))*sth*vevhat**2 + 64*cHWB*cth*ee**2*LambdaSMEFT**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2))*sth**3*vevhat**2 - 8*cHWB*cth*ee**2*sth**3*vevhat**4*(2*cHl311*MC**4 + 2*cHl322*MC**4 - cll1221*MC**4 - 4*cHl311*MC**2*MS**2 - 4*cHl322*MC**2*MS**2 + 2*cll1221*MC**2*MS**2 + 2*cHl311*MS**4 + 2*cHl322*MS**4 - cll1221*MS**4 + 2*cHl311*MC**2*MW**2 + 2*cHl322*MC**2*MW**2 - cll1221*MC**2*MW**2 + 2*cHl311*MS**2*MW**2 + 2*cHl322*MS**2*MW**2 - cll1221*MS**2*MW**2 - 4*cHl311*MW**4 - 4*cHl322*MW**4 + 2*cll1221*MW**4 + cHDD*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 8*cHj3*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 24*cHudRe*MC*MS*MW**2*yc*ys) - 8*ee**2*LambdaSMEFT**2*vevhat**2*(2*cHl311*MC**4 + 2*cHl322*MC**4 - cll1221*MC**4 - 4*cHl311*MC**2*MS**2 - 4*cHl322*MC**2*MS**2 + 2*cll1221*MC**2*MS**2 + 2*cHl311*MS**4 + 2*cHl322*MS**4 - cll1221*MS**4 + 2*cHl311*MC**2*MW**2 + 2*cHl322*MC**2*MW**2 - cll1221*MC**2*MW**2 + 2*cHl311*MS**2*MW**2 + 2*cHl322*MS**2*MW**2 - cll1221*MS**2*MW**2 - 4*cHl311*MW**4 - 4*cHl322*MW**4 + 2*cll1221*MW**4 + cHDD*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 4*cHj3*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 12*cHudRe*MC*MS*MW**2*yc*ys) + 8*cHWB*cth*ee**2*sth*vevhat**4*(2*cHl311*MC**4 + 2*cHl322*MC**4 - cll1221*MC**4 - 4*cHl311*MC**2*MS**2 - 4*cHl322*MC**2*MS**2 + 2*cll1221*MC**2*MS**2 + 2*cHl311*MS**4 + 2*cHl322*MS**4 - cll1221*MS**4 + 2*cHl311*MC**2*MW**2 + 2*cHl322*MC**2*MW**2 - cll1221*MC**2*MW**2 + 2*cHl311*MS**2*MW**2 + 2*cHl322*MS**2*MW**2 - cll1221*MS**2*MW**2 - 4*cHl311*MW**4 - 4*cHl322*MW**4 + 2*cll1221*MW**4 + cHDD*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 4*cHj3*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 12*cHudRe*MC*MS*MW**2*yc*ys) - 16*sth**4*vevhat**2*(2*cHl311*ee**2*LambdaSMEFT**2*MC**4 + 2*cHl322*ee**2*LambdaSMEFT**2*MC**4 - cll1221*ee**2*LambdaSMEFT**2*MC**4 - 4*cHl311*ee**2*LambdaSMEFT**2*MC**2*MS**2 - 4*cHl322*ee**2*LambdaSMEFT**2*MC**2*MS**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MC**2*MS**2 + 2*cHl311*ee**2*LambdaSMEFT**2*MS**4 + 2*cHl322*ee**2*LambdaSMEFT**2*MS**4 - cll1221*ee**2*LambdaSMEFT**2*MS**4 + 2*cHl311*ee**2*LambdaSMEFT**2*MC**2*MW**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MC**2*MW**2 - cll1221*ee**2*LambdaSMEFT**2*MC**2*MW**2 + 2*cHl311*ee**2*LambdaSMEFT**2*MS**2*MW**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MS**2*MW**2 - cll1221*ee**2*LambdaSMEFT**2*MS**2*MW**2 - 4*cHl311*ee**2*LambdaSMEFT**2*MW**4 - 4*cHl322*ee**2*LambdaSMEFT**2*MW**4 + 2*cll1221*ee**2*LambdaSMEFT**2*MW**4 + cHDD*ee**2*LambdaSMEFT**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 8*cHj3*ee**2*LambdaSMEFT**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 64*cuWIm**2*MC**4*MW**2*yc**2 + 64*cuWRe**2*MC**4*MW**2*yc**2 - 128*cuWIm**2*MC**2*MS**2*MW**2*yc**2 - 128*cuWRe**2*MC**2*MS**2*MW**2*yc**2 + 64*cuWIm**2*MS**4*MW**2*yc**2 + 64*cuWRe**2*MS**4*MW**2*yc**2 - 32*cuWIm**2*MC**2*MW**4*yc**2 - 32*cuWRe**2*MC**2*MW**4*yc**2 - 32*cuWIm**2*MS**2*MW**4*yc**2 - 32*cuWRe**2*MS**2*MW**4*yc**2 - 32*cuWIm**2*MW**6*yc**2 - 32*cuWRe**2*MW**6*yc**2 - 24*cHudRe*ee**2*LambdaSMEFT**2*MC*MS*MW**2*yc*ys + 384*cdWIm*cuWIm*MC*MS*MW**4*yc*ys - 384*cdWRe*cuWRe*MC*MS*MW**4*yc*ys + 64*cdWIm**2*MC**4*MW**2*ys**2 + 64*cdWRe**2*MC**4*MW**2*ys**2 - 128*cdWIm**2*MC**2*MS**2*MW**2*ys**2 - 128*cdWRe**2*MC**2*MS**2*MW**2*ys**2 + 64*cdWIm**2*MS**4*MW**2*ys**2 + 64*cdWRe**2*MS**4*MW**2*ys**2 - 32*cdWIm**2*MC**2*MW**4*ys**2 - 32*cdWRe**2*MC**2*MW**4*ys**2 - 32*cdWIm**2*MS**2*MW**4*ys**2 - 32*cdWRe**2*MS**2*MW**4*ys**2 - 32*cdWIm**2*MW**6*ys**2 - 32*cdWRe**2*MW**6*ys**2) + 8*sth**2*vevhat**2*(6*cHl311*ee**2*LambdaSMEFT**2*MC**4 + 6*cHl322*ee**2*LambdaSMEFT**2*MC**4 - 3*cll1221*ee**2*LambdaSMEFT**2*MC**4 - 12*cHl311*ee**2*LambdaSMEFT**2*MC**2*MS**2 - 12*cHl322*ee**2*LambdaSMEFT**2*MC**2*MS**2 + 6*cll1221*ee**2*LambdaSMEFT**2*MC**2*MS**2 + 6*cHl311*ee**2*LambdaSMEFT**2*MS**4 + 6*cHl322*ee**2*LambdaSMEFT**2*MS**4 - 3*cll1221*ee**2*LambdaSMEFT**2*MS**4 + 6*cHl311*ee**2*LambdaSMEFT**2*MC**2*MW**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MC**2*MW**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MC**2*MW**2 + 6*cHl311*ee**2*LambdaSMEFT**2*MS**2*MW**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MS**2*MW**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MS**2*MW**2 - 12*cHl311*ee**2*LambdaSMEFT**2*MW**4 - 12*cHl322*ee**2*LambdaSMEFT**2*MW**4 + 6*cll1221*ee**2*LambdaSMEFT**2*MW**4 + 3*cHDD*ee**2*LambdaSMEFT**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 16*cHj3*ee**2*LambdaSMEFT**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 32*cuWIm**2*MC**4*MW**2*yc**2 + 32*cuWRe**2*MC**4*MW**2*yc**2 - 64*cuWIm**2*MC**2*MS**2*MW**2*yc**2 - 64*cuWRe**2*MC**2*MS**2*MW**2*yc**2 + 32*cuWIm**2*MS**4*MW**2*yc**2 + 32*cuWRe**2*MS**4*MW**2*yc**2 - 16*cuWIm**2*MC**2*MW**4*yc**2 - 16*cuWRe**2*MC**2*MW**4*yc**2 - 16*cuWIm**2*MS**2*MW**4*yc**2 - 16*cuWRe**2*MS**2*MW**4*yc**2 - 16*cuWIm**2*MW**6*yc**2 - 16*cuWRe**2*MW**6*yc**2 - 48*cHudRe*ee**2*LambdaSMEFT**2*MC*MS*MW**2*yc*ys + 192*cdWIm*cuWIm*MC*MS*MW**4*yc*ys - 192*cdWRe*cuWRe*MC*MS*MW**4*yc*ys + 32*cdWIm**2*MC**4*MW**2*ys**2 + 32*cdWRe**2*MC**4*MW**2*ys**2 - 64*cdWIm**2*MC**2*MS**2*MW**2*ys**2 - 64*cdWRe**2*MC**2*MS**2*MW**2*ys**2 + 32*cdWIm**2*MS**4*MW**2*ys**2 + 32*cdWRe**2*MS**4*MW**2*ys**2 - 16*cdWIm**2*MC**2*MW**4*ys**2 - 16*cdWRe**2*MC**2*MW**4*ys**2 - 16*cdWIm**2*MS**2*MW**4*ys**2 - 16*cdWRe**2*MS**2*MW**4*ys**2 - 16*cdWIm**2*MW**6*ys**2 - 16*cdWRe**2*MW**6*ys**2) - 512*MW**2*sth**6*vevhat**2*(cuWIm**2*(-2*MC**4 - 2*MS**4 + MS**2*MW**2 + MW**4 + MC**2*(4*MS**2 + MW**2))*yc**2 + cuWRe**2*(-2*MC**4 - 2*MS**4 + MS**2*MW**2 + MW**4 + MC**2*(4*MS**2 + MW**2))*yc**2 - 12*cdWIm*cuWIm*MC*MS*MW**2*yc*ys + 12*cdWRe*cuWRe*MC*MS*MW**2*yc*ys - (cdWIm**2 + cdWRe**2)*(2*MC**4 + 2*MS**4 - MS**2*MW**2 - MW**4 - MC**2*(4*MS**2 + MW**2))*ys**2) + ee**2*vevhat**4*(4*cHl311**2*MC**4 + 8*cHl311*cHl322*MC**4 + 4*cHl322**2*MC**4 - 4*cHl311*cll1221*MC**4 - 4*cHl322*cll1221*MC**4 + cll1221**2*MC**4 - 8*cHl311**2*MC**2*MS**2 - 16*cHl311*cHl322*MC**2*MS**2 - 8*cHl322**2*MC**2*MS**2 + 8*cHl311*cll1221*MC**2*MS**2 + 8*cHl322*cll1221*MC**2*MS**2 - 2*cll1221**2*MC**2*MS**2 + 4*cHl311**2*MS**4 + 8*cHl311*cHl322*MS**4 + 4*cHl322**2*MS**4 - 4*cHl311*cll1221*MS**4 - 4*cHl322*cll1221*MS**4 + cll1221**2*MS**4 + 4*cHl311**2*MC**2*MW**2 + 8*cHl311*cHl322*MC**2*MW**2 + 4*cHl322**2*MC**2*MW**2 - 4*cHl311*cll1221*MC**2*MW**2 - 4*cHl322*cll1221*MC**2*MW**2 + cll1221**2*MC**2*MW**2 + 4*cHl311**2*MS**2*MW**2 + 8*cHl311*cHl322*MS**2*MW**2 + 4*cHl322**2*MS**2*MW**2 - 4*cHl311*cll1221*MS**2*MW**2 - 4*cHl322*cll1221*MS**2*MW**2 + cll1221**2*MS**2*MW**2 - 8*cHl311**2*MW**4 - 16*cHl311*cHl322*MW**4 - 8*cHl322**2*MW**4 + 8*cHl311*cll1221*MW**4 + 8*cHl322*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 16*cHj3**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 48*cHl311*cHudRe*MC*MS*MW**2*yc*ys - 48*cHl322*cHudRe*MC*MS*MW**2*yc*ys + 24*cHudRe*cll1221*MC*MS*MW**2*yc*ys + 4*cHudIm**2*MC**4*yc**2*ys**2 + 4*cHudRe**2*MC**4*yc**2*ys**2 - 8*cHudIm**2*MC**2*MS**2*yc**2*ys**2 - 8*cHudRe**2*MC**2*MS**2*yc**2*ys**2 + 4*cHudIm**2*MS**4*yc**2*ys**2 + 4*cHudRe**2*MS**4*yc**2*ys**2 + 4*cHudIm**2*MC**2*MW**2*yc**2*ys**2 + 4*cHudRe**2*MC**2*MW**2*yc**2*ys**2 + 4*cHudIm**2*MS**2*MW**2*yc**2*ys**2 + 4*cHudRe**2*MS**2*MW**2*yc**2*ys**2 - 8*cHudIm**2*MW**4*yc**2*ys**2 - 8*cHudRe**2*MW**4*yc**2*ys**2 - 8*cHj3*(-(cll1221*MC**4) + 2*cll1221*MC**2*MS**2 - cll1221*MS**4 - cll1221*MC**2*MW**2 - cll1221*MS**2*MW**2 + 2*cll1221*MW**4 + 2*cHl311*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 2*cHl322*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 12*cHudRe*MC*MS*MW**2*yc*ys) - 2*cHDD*(-2*cHl322*MC**4 + cll1221*MC**4 + 4*cHl322*MC**2*MS**2 - 2*cll1221*MC**2*MS**2 - 2*cHl322*MS**4 + cll1221*MS**4 - 2*cHl322*MC**2*MW**2 + cll1221*MC**2*MW**2 - 2*cHl322*MS**2*MW**2 + cll1221*MS**2*MW**2 + 4*cHl322*MW**4 - 2*cll1221*MW**4 + 4*cHj3*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 2*cHl311*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 12*cHudRe*MC*MS*MW**2*yc*ys)) - 2*ee**2*sth**2*vevhat**4*(4*cHl311**2*MC**4 + 8*cHl311*cHl322*MC**4 + 4*cHl322**2*MC**4 - 8*cHWB**2*MC**4 - 4*cHl311*cll1221*MC**4 - 4*cHl322*cll1221*MC**4 + cll1221**2*MC**4 - 8*cHl311**2*MC**2*MS**2 - 16*cHl311*cHl322*MC**2*MS**2 - 8*cHl322**2*MC**2*MS**2 + 16*cHWB**2*MC**2*MS**2 + 8*cHl311*cll1221*MC**2*MS**2 + 8*cHl322*cll1221*MC**2*MS**2 - 2*cll1221**2*MC**2*MS**2 + 4*cHl311**2*MS**4 + 8*cHl311*cHl322*MS**4 + 4*cHl322**2*MS**4 - 8*cHWB**2*MS**4 - 4*cHl311*cll1221*MS**4 - 4*cHl322*cll1221*MS**4 + cll1221**2*MS**4 + 4*cHl311**2*MC**2*MW**2 + 8*cHl311*cHl322*MC**2*MW**2 + 4*cHl322**2*MC**2*MW**2 - 8*cHWB**2*MC**2*MW**2 - 4*cHl311*cll1221*MC**2*MW**2 - 4*cHl322*cll1221*MC**2*MW**2 + cll1221**2*MC**2*MW**2 + 4*cHl311**2*MS**2*MW**2 + 8*cHl311*cHl322*MS**2*MW**2 + 4*cHl322**2*MS**2*MW**2 - 8*cHWB**2*MS**2*MW**2 - 4*cHl311*cll1221*MS**2*MW**2 - 4*cHl322*cll1221*MS**2*MW**2 + cll1221**2*MS**2*MW**2 - 8*cHl311**2*MW**4 - 16*cHl311*cHl322*MW**4 - 8*cHl322**2*MW**4 + 16*cHWB**2*MW**4 + 8*cHl311*cll1221*MW**4 + 8*cHl322*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 32*cHj3**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 72*cHl311*cHudRe*MC*MS*MW**2*yc*ys - 72*cHl322*cHudRe*MC*MS*MW**2*yc*ys + 36*cHudRe*cll1221*MC*MS*MW**2*yc*ys + 8*cHudIm**2*MC**4*yc**2*ys**2 + 8*cHudRe**2*MC**4*yc**2*ys**2 - 16*cHudIm**2*MC**2*MS**2*yc**2*ys**2 - 16*cHudRe**2*MC**2*MS**2*yc**2*ys**2 + 8*cHudIm**2*MS**4*yc**2*ys**2 + 8*cHudRe**2*MS**4*yc**2*ys**2 + 8*cHudIm**2*MC**2*MW**2*yc**2*ys**2 + 8*cHudRe**2*MC**2*MW**2*yc**2*ys**2 + 8*cHudIm**2*MS**2*MW**2*yc**2*ys**2 + 8*cHudRe**2*MS**2*MW**2*yc**2*ys**2 - 16*cHudIm**2*MW**4*yc**2*ys**2 - 16*cHudRe**2*MW**4*yc**2*ys**2 - 12*cHj3*(-(cll1221*MC**4) + 2*cll1221*MC**2*MS**2 - cll1221*MS**4 - cll1221*MC**2*MW**2 - cll1221*MS**2*MW**2 + 2*cll1221*MW**4 + 2*cHl311*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 2*cHl322*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 16*cHudRe*MC*MS*MW**2*yc*ys) - 2*cHDD*(-2*cHl322*MC**4 + cll1221*MC**4 + 4*cHl322*MC**2*MS**2 - 2*cll1221*MC**2*MS**2 - 2*cHl322*MS**4 + cll1221*MS**4 - 2*cHl322*MC**2*MW**2 + cll1221*MC**2*MW**2 - 2*cHl322*MS**2*MW**2 + cll1221*MS**2*MW**2 + 4*cHl322*MW**4 - 2*cll1221*MW**4 + 6*cHj3*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 2*cHl311*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 18*cHudRe*MC*MS*MW**2*yc*ys)) + ee**2*sth**4*vevhat**4*(4*cHl311**2*MC**4 + 8*cHl311*cHl322*MC**4 + 4*cHl322**2*MC**4 - 16*cHWB**2*MC**4 - 4*cHl311*cll1221*MC**4 - 4*cHl322*cll1221*MC**4 + cll1221**2*MC**4 - 8*cHl311**2*MC**2*MS**2 - 16*cHl311*cHl322*MC**2*MS**2 - 8*cHl322**2*MC**2*MS**2 + 32*cHWB**2*MC**2*MS**2 + 8*cHl311*cll1221*MC**2*MS**2 + 8*cHl322*cll1221*MC**2*MS**2 - 2*cll1221**2*MC**2*MS**2 + 4*cHl311**2*MS**4 + 8*cHl311*cHl322*MS**4 + 4*cHl322**2*MS**4 - 16*cHWB**2*MS**4 - 4*cHl311*cll1221*MS**4 - 4*cHl322*cll1221*MS**4 + cll1221**2*MS**4 + 4*cHl311**2*MC**2*MW**2 + 8*cHl311*cHl322*MC**2*MW**2 + 4*cHl322**2*MC**2*MW**2 - 16*cHWB**2*MC**2*MW**2 - 4*cHl311*cll1221*MC**2*MW**2 - 4*cHl322*cll1221*MC**2*MW**2 + cll1221**2*MC**2*MW**2 + 4*cHl311**2*MS**2*MW**2 + 8*cHl311*cHl322*MS**2*MW**2 + 4*cHl322**2*MS**2*MW**2 - 16*cHWB**2*MS**2*MW**2 - 4*cHl311*cll1221*MS**2*MW**2 - 4*cHl322*cll1221*MS**2*MW**2 + cll1221**2*MS**2*MW**2 - 8*cHl311**2*MW**4 - 16*cHl311*cHl322*MW**4 - 8*cHl322**2*MW**4 + 32*cHWB**2*MW**4 + 8*cHl311*cll1221*MW**4 + 8*cHl322*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 64*cHj3**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 96*cHl311*cHudRe*MC*MS*MW**2*yc*ys - 96*cHl322*cHudRe*MC*MS*MW**2*yc*ys + 48*cHudRe*cll1221*MC*MS*MW**2*yc*ys + 16*cHudIm**2*MC**4*yc**2*ys**2 + 16*cHudRe**2*MC**4*yc**2*ys**2 - 32*cHudIm**2*MC**2*MS**2*yc**2*ys**2 - 32*cHudRe**2*MC**2*MS**2*yc**2*ys**2 + 16*cHudIm**2*MS**4*yc**2*ys**2 + 16*cHudRe**2*MS**4*yc**2*ys**2 + 16*cHudIm**2*MC**2*MW**2*yc**2*ys**2 + 16*cHudRe**2*MC**2*MW**2*yc**2*ys**2 + 16*cHudIm**2*MS**2*MW**2*yc**2*ys**2 + 16*cHudRe**2*MS**2*MW**2*yc**2*ys**2 - 32*cHudIm**2*MW**4*yc**2*ys**2 - 32*cHudRe**2*MW**4*yc**2*ys**2 - 16*cHj3*(-(cll1221*MC**4) + 2*cll1221*MC**2*MS**2 - cll1221*MS**4 - cll1221*MC**2*MW**2 - cll1221*MS**2*MW**2 + 2*cll1221*MW**4 + 2*cHl311*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 2*cHl322*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 24*cHudRe*MC*MS*MW**2*yc*ys) - 2*cHDD*(-2*cHl322*MC**4 + cll1221*MC**4 + 4*cHl322*MC**2*MS**2 - 2*cll1221*MC**2*MS**2 - 2*cHl322*MS**4 + cll1221*MS**4 - 2*cHl322*MC**2*MW**2 + cll1221*MC**2*MW**2 - 2*cHl322*MS**2*MW**2 + cll1221*MS**2*MW**2 + 4*cHl322*MW**4 - 2*cll1221*MW**4 + 8*cHj3*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 2*cHl311*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 24*cHudRe*MC*MS*MW**2*yc*ys)) + 192*ee*LambdaSMEFT**2*MW**2*sth*vevhat*(cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + cdWRe*MS*(MC**2 - MS**2 + MW**2)*ys)*cmath.sqrt(2) - 768*ee*LambdaSMEFT**2*MW**2*sth**3*vevhat*(cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + cdWRe*MS*(MC**2 - MS**2 + MW**2)*ys)*cmath.sqrt(2) + 768*ee*LambdaSMEFT**2*MW**2*sth**5*vevhat*(cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + cdWRe*MS*(MC**2 - MS**2 + MW**2)*ys)*cmath.sqrt(2) - 192*cHWB*cth*ee*MW**2*sth**2*vevhat**3*(cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + cdWRe*MS*(MC**2 - MS**2 + MW**2)*ys)*cmath.sqrt(2) + 384*cHWB*cth*ee*MW**2*sth**4*vevhat**3*(cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + cdWRe*MS*(MC**2 - MS**2 + MW**2)*ys)*cmath.sqrt(2) - 48*ee*MW**2*sth*vevhat**3*(-2*cHl311*cuWRe*MC**3*yc - 2*cHl322*cuWRe*MC**3*yc + cll1221*cuWRe*MC**3*yc + 2*cHl311*cuWRe*MC*MS**2*yc + 2*cHl322*cuWRe*MC*MS**2*yc - cll1221*cuWRe*MC*MS**2*yc + 2*cHl311*cuWRe*MC*MW**2*yc + 2*cHl322*cuWRe*MC*MW**2*yc - cll1221*cuWRe*MC*MW**2*yc + 4*cHj3*cuWRe*MC*(MC**2 - MS**2 - MW**2)*yc + cHDD*cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + 2*cdWRe*cHl311*MC**2*MS*ys + 2*cdWRe*cHl322*MC**2*MS*ys - cdWRe*cll1221*MC**2*MS*ys - 2*cdWRe*cHl311*MS**3*ys - 2*cdWRe*cHl322*MS**3*ys + cdWRe*cll1221*MS**3*ys + 2*cdWRe*cHl311*MS*MW**2*ys + 2*cdWRe*cHl322*MS*MW**2*ys - cdWRe*cll1221*MS*MW**2*ys + 4*cdWRe*cHj3*MS*(-MC**2 + MS**2 - MW**2)*ys + cdWRe*cHDD*MS*(MC**2 - MS**2 + MW**2)*ys - 2*cHudIm*cuWIm*MC**2*MS*yc**2*ys + 2*cHudRe*cuWRe*MC**2*MS*yc**2*ys + 2*cHudIm*cuWIm*MS**3*yc**2*ys - 2*cHudRe*cuWRe*MS**3*yc**2*ys - 2*cHudIm*cuWIm*MS*MW**2*yc**2*ys + 2*cHudRe*cuWRe*MS*MW**2*yc**2*ys - 2*cdWIm*cHudIm*MC**3*yc*ys**2 - 2*cdWRe*cHudRe*MC**3*yc*ys**2 + 2*cdWIm*cHudIm*MC*MS**2*yc*ys**2 + 2*cdWRe*cHudRe*MC*MS**2*yc*ys**2 + 2*cdWIm*cHudIm*MC*MW**2*yc*ys**2 + 2*cdWRe*cHudRe*MC*MW**2*yc*ys**2)*cmath.sqrt(2) - 96*ee*MW**2*sth**5*vevhat**3*(-2*cHl311*cuWRe*MC**3*yc - 2*cHl322*cuWRe*MC**3*yc + cll1221*cuWRe*MC**3*yc + 2*cHl311*cuWRe*MC*MS**2*yc + 2*cHl322*cuWRe*MC*MS**2*yc - cll1221*cuWRe*MC*MS**2*yc + 2*cHl311*cuWRe*MC*MW**2*yc + 2*cHl322*cuWRe*MC*MW**2*yc - cll1221*cuWRe*MC*MW**2*yc + 8*cHj3*cuWRe*MC*(MC**2 - MS**2 - MW**2)*yc + cHDD*cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + 2*cdWRe*cHl311*MC**2*MS*ys + 2*cdWRe*cHl322*MC**2*MS*ys - cdWRe*cll1221*MC**2*MS*ys - 2*cdWRe*cHl311*MS**3*ys - 2*cdWRe*cHl322*MS**3*ys + cdWRe*cll1221*MS**3*ys + 2*cdWRe*cHl311*MS*MW**2*ys + 2*cdWRe*cHl322*MS*MW**2*ys - cdWRe*cll1221*MS*MW**2*ys + 8*cdWRe*cHj3*MS*(-MC**2 + MS**2 - MW**2)*ys + cdWRe*cHDD*MS*(MC**2 - MS**2 + MW**2)*ys - 4*cHudIm*cuWIm*MC**2*MS*yc**2*ys + 4*cHudRe*cuWRe*MC**2*MS*yc**2*ys + 4*cHudIm*cuWIm*MS**3*yc**2*ys - 4*cHudRe*cuWRe*MS**3*yc**2*ys - 4*cHudIm*cuWIm*MS*MW**2*yc**2*ys + 4*cHudRe*cuWRe*MS*MW**2*yc**2*ys - 4*cdWIm*cHudIm*MC**3*yc*ys**2 - 4*cdWRe*cHudRe*MC**3*yc*ys**2 + 4*cdWIm*cHudIm*MC*MS**2*yc*ys**2 + 4*cdWRe*cHudRe*MC*MS**2*yc*ys**2 + 4*cdWIm*cHudIm*MC*MW**2*yc*ys**2 + 4*cdWRe*cHudRe*MC*MW**2*yc*ys**2)*cmath.sqrt(2) + 48*ee*MW**2*sth**3*vevhat**3*(-6*cHl311*cuWRe*MC**3*yc - 6*cHl322*cuWRe*MC**3*yc + 3*cll1221*cuWRe*MC**3*yc + 6*cHl311*cuWRe*MC*MS**2*yc + 6*cHl322*cuWRe*MC*MS**2*yc - 3*cll1221*cuWRe*MC*MS**2*yc + 6*cHl311*cuWRe*MC*MW**2*yc + 6*cHl322*cuWRe*MC*MW**2*yc - 3*cll1221*cuWRe*MC*MW**2*yc + 6*cdWRe*cHl311*MC**2*MS*ys + 6*cdWRe*cHl322*MC**2*MS*ys - 3*cdWRe*cll1221*MC**2*MS*ys - 6*cdWRe*cHl311*MS**3*ys - 6*cdWRe*cHl322*MS**3*ys + 3*cdWRe*cll1221*MS**3*ys + 6*cdWRe*cHl311*MS*MW**2*ys + 6*cdWRe*cHl322*MS*MW**2*ys - 3*cdWRe*cll1221*MS*MW**2*ys - 8*cHudIm*cuWIm*MC**2*MS*yc**2*ys + 8*cHudRe*cuWRe*MC**2*MS*yc**2*ys + 8*cHudIm*cuWIm*MS**3*yc**2*ys - 8*cHudRe*cuWRe*MS**3*yc**2*ys - 8*cHudIm*cuWIm*MS*MW**2*yc**2*ys + 8*cHudRe*cuWRe*MS*MW**2*yc**2*ys - 8*cdWIm*cHudIm*MC**3*yc*ys**2 - 8*cdWRe*cHudRe*MC**3*yc*ys**2 + 8*cdWIm*cHudIm*MC*MS**2*yc*ys**2 + 8*cdWRe*cHudRe*MC*MS**2*yc*ys**2 + 8*cdWIm*cHudIm*MC*MW**2*yc*ys**2 + 8*cdWRe*cHudRe*MC*MW**2*yc*ys**2 + 16*cHj3*(cuWRe*MC*(MC**2 - MS**2 - MW**2)*yc + cdWRe*MS*(-MC**2 + MS**2 - MW**2)*ys) + 3*cHDD*(cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + cdWRe*MS*(MC**2 - MS**2 + MW**2)*ys))*cmath.sqrt(2))*cmath.sqrt(MC**4 + (MS**2 - MW**2)**2 - 2*MC**2*(MS**2 + MW**2)))/(512.*cmath.pi*LambdaSMEFT**4*MW**5*(sth - 2*sth**3)**2)',
(P.t,P.b__tilde__):'-((16*ee**2*LambdaSMEFT**4*MB**4 - 32*ee**2*LambdaSMEFT**4*MB**2*MT**2 + 16*ee**2*LambdaSMEFT**4*MT**4 + 16*ee**2*LambdaSMEFT**4*MB**2*MW**2 + 16*ee**2*LambdaSMEFT**4*MT**2*MW**2 - 32*ee**2*LambdaSMEFT**4*MW**4 - 64*ee**2*LambdaSMEFT**4*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))*sth**2 + 64*ee**2*LambdaSMEFT**4*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))*sth**4 - 8*ee**2*LambdaSMEFT**2*(2*cHl322*MB**4 - 4*cHQ3*MB**4 - cll1221*MB**4 - 4*cHl322*MB**2*MT**2 + 8*cHQ3*MB**2*MT**2 + 2*cll1221*MB**2*MT**2 + 2*cHl322*MT**4 - 4*cHQ3*MT**4 - cll1221*MT**4 + 2*cHl322*MB**2*MW**2 - 4*cHQ3*MB**2*MW**2 - cll1221*MB**2*MW**2 - 12*cHtbRe*MB*MT*MW**2 + 2*cHl322*MT**2*MW**2 - 4*cHQ3*MT**2*MW**2 - cll1221*MT**2*MW**2 - 4*cHl322*MW**4 + 8*cHQ3*MW**4 + 2*cll1221*MW**4 + cHDD*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHl311*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)))*vevhat**2 - 32*cHWB*cth*ee**2*LambdaSMEFT**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))*sth*vevhat**2 + 8*(6*cHl322*ee**2*LambdaSMEFT**2*MB**4 - 16*cHQ3*ee**2*LambdaSMEFT**2*MB**4 - 3*cll1221*ee**2*LambdaSMEFT**2*MB**4 - 12*cHl322*ee**2*LambdaSMEFT**2*MB**2*MT**2 + 32*cHQ3*ee**2*LambdaSMEFT**2*MB**2*MT**2 + 6*cll1221*ee**2*LambdaSMEFT**2*MB**2*MT**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MT**4 - 16*cHQ3*ee**2*LambdaSMEFT**2*MT**4 - 3*cll1221*ee**2*LambdaSMEFT**2*MT**4 + 6*cHl322*ee**2*LambdaSMEFT**2*MB**2*MW**2 - 16*cHQ3*ee**2*LambdaSMEFT**2*MB**2*MW**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MB**2*MW**2 + 32*cbWIm**2*MB**4*MW**2 + 32*cbWRe**2*MB**4*MW**2 + 32*ctWIm**2*MB**4*MW**2 + 32*ctWRe**2*MB**4*MW**2 - 48*cHtbRe*ee**2*LambdaSMEFT**2*MB*MT*MW**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MT**2*MW**2 - 16*cHQ3*ee**2*LambdaSMEFT**2*MT**2*MW**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MT**2*MW**2 - 64*cbWIm**2*MB**2*MT**2*MW**2 - 64*cbWRe**2*MB**2*MT**2*MW**2 - 64*ctWIm**2*MB**2*MT**2*MW**2 - 64*ctWRe**2*MB**2*MT**2*MW**2 + 32*cbWIm**2*MT**4*MW**2 + 32*cbWRe**2*MT**4*MW**2 + 32*ctWIm**2*MT**4*MW**2 + 32*ctWRe**2*MT**4*MW**2 - 12*cHl322*ee**2*LambdaSMEFT**2*MW**4 + 32*cHQ3*ee**2*LambdaSMEFT**2*MW**4 + 6*cll1221*ee**2*LambdaSMEFT**2*MW**4 - 16*cbWIm**2*MB**2*MW**4 - 16*cbWRe**2*MB**2*MW**4 - 16*ctWIm**2*MB**2*MW**4 - 16*ctWRe**2*MB**2*MW**4 + 192*cbWIm*ctWIm*MB*MT*MW**4 - 192*cbWRe*ctWRe*MB*MT*MW**4 - 16*cbWIm**2*MT**2*MW**4 - 16*cbWRe**2*MT**2*MW**4 - 16*ctWIm**2*MT**2*MW**4 - 16*ctWRe**2*MT**2*MW**4 - 16*cbWIm**2*MW**6 - 16*cbWRe**2*MW**6 - 16*ctWIm**2*MW**6 - 16*ctWRe**2*MW**6 + 3*cHDD*ee**2*LambdaSMEFT**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 6*cHl311*ee**2*LambdaSMEFT**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)))*sth**2*vevhat**2 + 64*cHWB*cth*ee**2*LambdaSMEFT**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))*sth**3*vevhat**2 - 16*(2*cHl322*ee**2*LambdaSMEFT**2*MB**4 - 8*cHQ3*ee**2*LambdaSMEFT**2*MB**4 - cll1221*ee**2*LambdaSMEFT**2*MB**4 - 4*cHl322*ee**2*LambdaSMEFT**2*MB**2*MT**2 + 16*cHQ3*ee**2*LambdaSMEFT**2*MB**2*MT**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MB**2*MT**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MT**4 - 8*cHQ3*ee**2*LambdaSMEFT**2*MT**4 - cll1221*ee**2*LambdaSMEFT**2*MT**4 + 2*cHl322*ee**2*LambdaSMEFT**2*MB**2*MW**2 - 8*cHQ3*ee**2*LambdaSMEFT**2*MB**2*MW**2 - cll1221*ee**2*LambdaSMEFT**2*MB**2*MW**2 + 64*cbWIm**2*MB**4*MW**2 + 64*cbWRe**2*MB**4*MW**2 + 64*ctWIm**2*MB**4*MW**2 + 64*ctWRe**2*MB**4*MW**2 - 24*cHtbRe*ee**2*LambdaSMEFT**2*MB*MT*MW**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MT**2*MW**2 - 8*cHQ3*ee**2*LambdaSMEFT**2*MT**2*MW**2 - cll1221*ee**2*LambdaSMEFT**2*MT**2*MW**2 - 128*cbWIm**2*MB**2*MT**2*MW**2 - 128*cbWRe**2*MB**2*MT**2*MW**2 - 128*ctWIm**2*MB**2*MT**2*MW**2 - 128*ctWRe**2*MB**2*MT**2*MW**2 + 64*cbWIm**2*MT**4*MW**2 + 64*cbWRe**2*MT**4*MW**2 + 64*ctWIm**2*MT**4*MW**2 + 64*ctWRe**2*MT**4*MW**2 - 4*cHl322*ee**2*LambdaSMEFT**2*MW**4 + 16*cHQ3*ee**2*LambdaSMEFT**2*MW**4 + 2*cll1221*ee**2*LambdaSMEFT**2*MW**4 - 32*cbWIm**2*MB**2*MW**4 - 32*cbWRe**2*MB**2*MW**4 - 32*ctWIm**2*MB**2*MW**4 - 32*ctWRe**2*MB**2*MW**4 + 384*cbWIm*ctWIm*MB*MT*MW**4 - 384*cbWRe*ctWRe*MB*MT*MW**4 - 32*cbWIm**2*MT**2*MW**4 - 32*cbWRe**2*MT**2*MW**4 - 32*ctWIm**2*MT**2*MW**4 - 32*ctWRe**2*MT**2*MW**4 - 32*cbWIm**2*MW**6 - 32*cbWRe**2*MW**6 - 32*ctWIm**2*MW**6 - 32*ctWRe**2*MW**6 + cHDD*ee**2*LambdaSMEFT**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHl311*ee**2*LambdaSMEFT**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)))*sth**4*vevhat**2 - 512*MW**2*(-12*cbWIm*ctWIm*MB*MT*MW**2 + 12*cbWRe*ctWRe*MB*MT*MW**2 - (ctWIm**2 + ctWRe**2)*(2*MB**4 + 2*MT**4 - MT**2*MW**2 - MW**4 - MB**2*(4*MT**2 + MW**2)) + cbWIm**2*(-2*MB**4 - 2*MT**4 + MT**2*MW**2 + MW**4 + MB**2*(4*MT**2 + MW**2)) + cbWRe**2*(-2*MB**4 - 2*MT**4 + MT**2*MW**2 + MW**4 + MB**2*(4*MT**2 + MW**2)))*sth**6*vevhat**2 + ee**2*(4*cHl322**2*MB**4 - 16*cHl322*cHQ3*MB**4 + 16*cHQ3**2*MB**4 + 4*cHtbIm**2*MB**4 + 4*cHtbRe**2*MB**4 - 4*cHl322*cll1221*MB**4 + 8*cHQ3*cll1221*MB**4 + cll1221**2*MB**4 - 8*cHl322**2*MB**2*MT**2 + 32*cHl322*cHQ3*MB**2*MT**2 - 32*cHQ3**2*MB**2*MT**2 - 8*cHtbIm**2*MB**2*MT**2 - 8*cHtbRe**2*MB**2*MT**2 + 8*cHl322*cll1221*MB**2*MT**2 - 16*cHQ3*cll1221*MB**2*MT**2 - 2*cll1221**2*MB**2*MT**2 + 4*cHl322**2*MT**4 - 16*cHl322*cHQ3*MT**4 + 16*cHQ3**2*MT**4 + 4*cHtbIm**2*MT**4 + 4*cHtbRe**2*MT**4 - 4*cHl322*cll1221*MT**4 + 8*cHQ3*cll1221*MT**4 + cll1221**2*MT**4 + 4*cHl322**2*MB**2*MW**2 - 16*cHl322*cHQ3*MB**2*MW**2 + 16*cHQ3**2*MB**2*MW**2 + 4*cHtbIm**2*MB**2*MW**2 + 4*cHtbRe**2*MB**2*MW**2 - 4*cHl322*cll1221*MB**2*MW**2 + 8*cHQ3*cll1221*MB**2*MW**2 + cll1221**2*MB**2*MW**2 - 48*cHl322*cHtbRe*MB*MT*MW**2 + 96*cHQ3*cHtbRe*MB*MT*MW**2 + 24*cHtbRe*cll1221*MB*MT*MW**2 + 4*cHl322**2*MT**2*MW**2 - 16*cHl322*cHQ3*MT**2*MW**2 + 16*cHQ3**2*MT**2*MW**2 + 4*cHtbIm**2*MT**2*MW**2 + 4*cHtbRe**2*MT**2*MW**2 - 4*cHl322*cll1221*MT**2*MW**2 + 8*cHQ3*cll1221*MT**2*MW**2 + cll1221**2*MT**2*MW**2 - 8*cHl322**2*MW**4 + 32*cHl322*cHQ3*MW**4 - 32*cHQ3**2*MW**4 - 8*cHtbIm**2*MW**4 - 8*cHtbRe**2*MW**4 + 8*cHl322*cll1221*MW**4 - 16*cHQ3*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 4*cHl311**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHDD*(-4*cHQ3*MB**4 - cll1221*MB**4 + 8*cHQ3*MB**2*MT**2 + 2*cll1221*MB**2*MT**2 - 4*cHQ3*MT**4 - cll1221*MT**4 - 4*cHQ3*MB**2*MW**2 - cll1221*MB**2*MW**2 - 12*cHtbRe*MB*MT*MW**2 - 4*cHQ3*MT**2*MW**2 - cll1221*MT**2*MW**2 + 8*cHQ3*MW**4 + 2*cll1221*MW**4 + 2*cHl311*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHl322*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))) + 4*cHl311*(-(cll1221*MB**4) + 2*cll1221*MB**2*MT**2 - cll1221*MT**4 - cll1221*MB**2*MW**2 - 12*cHtbRe*MB*MT*MW**2 - cll1221*MT**2*MW**2 + 2*cll1221*MW**4 + 2*cHl322*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) - 4*cHQ3*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))))*vevhat**4 + 8*cHWB*cth*ee**2*(2*cHl322*MB**4 - 4*cHQ3*MB**4 - cll1221*MB**4 - 4*cHl322*MB**2*MT**2 + 8*cHQ3*MB**2*MT**2 + 2*cll1221*MB**2*MT**2 + 2*cHl322*MT**4 - 4*cHQ3*MT**4 - cll1221*MT**4 + 2*cHl322*MB**2*MW**2 - 4*cHQ3*MB**2*MW**2 - cll1221*MB**2*MW**2 - 12*cHtbRe*MB*MT*MW**2 + 2*cHl322*MT**2*MW**2 - 4*cHQ3*MT**2*MW**2 - cll1221*MT**2*MW**2 - 4*cHl322*MW**4 + 8*cHQ3*MW**4 + 2*cll1221*MW**4 + cHDD*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHl311*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)))*sth*vevhat**4 - 2*ee**2*(4*cHl322**2*MB**4 - 24*cHl322*cHQ3*MB**4 + 32*cHQ3**2*MB**4 + 8*cHtbIm**2*MB**4 + 8*cHtbRe**2*MB**4 - 8*cHWB**2*MB**4 - 4*cHl322*cll1221*MB**4 + 12*cHQ3*cll1221*MB**4 + cll1221**2*MB**4 - 8*cHl322**2*MB**2*MT**2 + 48*cHl322*cHQ3*MB**2*MT**2 - 64*cHQ3**2*MB**2*MT**2 - 16*cHtbIm**2*MB**2*MT**2 - 16*cHtbRe**2*MB**2*MT**2 + 16*cHWB**2*MB**2*MT**2 + 8*cHl322*cll1221*MB**2*MT**2 - 24*cHQ3*cll1221*MB**2*MT**2 - 2*cll1221**2*MB**2*MT**2 + 4*cHl322**2*MT**4 - 24*cHl322*cHQ3*MT**4 + 32*cHQ3**2*MT**4 + 8*cHtbIm**2*MT**4 + 8*cHtbRe**2*MT**4 - 8*cHWB**2*MT**4 - 4*cHl322*cll1221*MT**4 + 12*cHQ3*cll1221*MT**4 + cll1221**2*MT**4 + 4*cHl322**2*MB**2*MW**2 - 24*cHl322*cHQ3*MB**2*MW**2 + 32*cHQ3**2*MB**2*MW**2 + 8*cHtbIm**2*MB**2*MW**2 + 8*cHtbRe**2*MB**2*MW**2 - 8*cHWB**2*MB**2*MW**2 - 4*cHl322*cll1221*MB**2*MW**2 + 12*cHQ3*cll1221*MB**2*MW**2 + cll1221**2*MB**2*MW**2 - 72*cHl322*cHtbRe*MB*MT*MW**2 + 192*cHQ3*cHtbRe*MB*MT*MW**2 + 36*cHtbRe*cll1221*MB*MT*MW**2 + 4*cHl322**2*MT**2*MW**2 - 24*cHl322*cHQ3*MT**2*MW**2 + 32*cHQ3**2*MT**2*MW**2 + 8*cHtbIm**2*MT**2*MW**2 + 8*cHtbRe**2*MT**2*MW**2 - 8*cHWB**2*MT**2*MW**2 - 4*cHl322*cll1221*MT**2*MW**2 + 12*cHQ3*cll1221*MT**2*MW**2 + cll1221**2*MT**2*MW**2 - 8*cHl322**2*MW**4 + 48*cHl322*cHQ3*MW**4 - 64*cHQ3**2*MW**4 - 16*cHtbIm**2*MW**4 - 16*cHtbRe**2*MW**4 + 16*cHWB**2*MW**4 + 8*cHl322*cll1221*MW**4 - 24*cHQ3*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 4*cHl311**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHDD*(-6*cHQ3*MB**4 - cll1221*MB**4 + 12*cHQ3*MB**2*MT**2 + 2*cll1221*MB**2*MT**2 - 6*cHQ3*MT**4 - cll1221*MT**4 - 6*cHQ3*MB**2*MW**2 - cll1221*MB**2*MW**2 - 18*cHtbRe*MB*MT*MW**2 - 6*cHQ3*MT**2*MW**2 - cll1221*MT**2*MW**2 + 12*cHQ3*MW**4 + 2*cll1221*MW**4 + 2*cHl311*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHl322*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))) + 4*cHl311*(-(cll1221*MB**4) + 2*cll1221*MB**2*MT**2 - cll1221*MT**4 - cll1221*MB**2*MW**2 - 18*cHtbRe*MB*MT*MW**2 - cll1221*MT**2*MW**2 + 2*cll1221*MW**4 + 2*cHl322*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) - 6*cHQ3*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))))*sth**2*vevhat**4 - 8*cHWB*cth*ee**2*(2*cHl322*MB**4 - 8*cHQ3*MB**4 - cll1221*MB**4 - 4*cHl322*MB**2*MT**2 + 16*cHQ3*MB**2*MT**2 + 2*cll1221*MB**2*MT**2 + 2*cHl322*MT**4 - 8*cHQ3*MT**4 - cll1221*MT**4 + 2*cHl322*MB**2*MW**2 - 8*cHQ3*MB**2*MW**2 - cll1221*MB**2*MW**2 - 24*cHtbRe*MB*MT*MW**2 + 2*cHl322*MT**2*MW**2 - 8*cHQ3*MT**2*MW**2 - cll1221*MT**2*MW**2 - 4*cHl322*MW**4 + 16*cHQ3*MW**4 + 2*cll1221*MW**4 + cHDD*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHl311*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)))*sth**3*vevhat**4 + ee**2*(4*cHl322**2*MB**4 - 32*cHl322*cHQ3*MB**4 + 64*cHQ3**2*MB**4 + 16*cHtbIm**2*MB**4 + 16*cHtbRe**2*MB**4 - 16*cHWB**2*MB**4 - 4*cHl322*cll1221*MB**4 + 16*cHQ3*cll1221*MB**4 + cll1221**2*MB**4 - 8*cHl322**2*MB**2*MT**2 + 64*cHl322*cHQ3*MB**2*MT**2 - 128*cHQ3**2*MB**2*MT**2 - 32*cHtbIm**2*MB**2*MT**2 - 32*cHtbRe**2*MB**2*MT**2 + 32*cHWB**2*MB**2*MT**2 + 8*cHl322*cll1221*MB**2*MT**2 - 32*cHQ3*cll1221*MB**2*MT**2 - 2*cll1221**2*MB**2*MT**2 + 4*cHl322**2*MT**4 - 32*cHl322*cHQ3*MT**4 + 64*cHQ3**2*MT**4 + 16*cHtbIm**2*MT**4 + 16*cHtbRe**2*MT**4 - 16*cHWB**2*MT**4 - 4*cHl322*cll1221*MT**4 + 16*cHQ3*cll1221*MT**4 + cll1221**2*MT**4 + 4*cHl322**2*MB**2*MW**2 - 32*cHl322*cHQ3*MB**2*MW**2 + 64*cHQ3**2*MB**2*MW**2 + 16*cHtbIm**2*MB**2*MW**2 + 16*cHtbRe**2*MB**2*MW**2 - 16*cHWB**2*MB**2*MW**2 - 4*cHl322*cll1221*MB**2*MW**2 + 16*cHQ3*cll1221*MB**2*MW**2 + cll1221**2*MB**2*MW**2 - 96*cHl322*cHtbRe*MB*MT*MW**2 + 384*cHQ3*cHtbRe*MB*MT*MW**2 + 48*cHtbRe*cll1221*MB*MT*MW**2 + 4*cHl322**2*MT**2*MW**2 - 32*cHl322*cHQ3*MT**2*MW**2 + 64*cHQ3**2*MT**2*MW**2 + 16*cHtbIm**2*MT**2*MW**2 + 16*cHtbRe**2*MT**2*MW**2 - 16*cHWB**2*MT**2*MW**2 - 4*cHl322*cll1221*MT**2*MW**2 + 16*cHQ3*cll1221*MT**2*MW**2 + cll1221**2*MT**2*MW**2 - 8*cHl322**2*MW**4 + 64*cHl322*cHQ3*MW**4 - 128*cHQ3**2*MW**4 - 32*cHtbIm**2*MW**4 - 32*cHtbRe**2*MW**4 + 32*cHWB**2*MW**4 + 8*cHl322*cll1221*MW**4 - 32*cHQ3*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 4*cHl311**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHDD*(-8*cHQ3*MB**4 - cll1221*MB**4 + 16*cHQ3*MB**2*MT**2 + 2*cll1221*MB**2*MT**2 - 8*cHQ3*MT**4 - cll1221*MT**4 - 8*cHQ3*MB**2*MW**2 - cll1221*MB**2*MW**2 - 24*cHtbRe*MB*MT*MW**2 - 8*cHQ3*MT**2*MW**2 - cll1221*MT**2*MW**2 + 16*cHQ3*MW**4 + 2*cll1221*MW**4 + 2*cHl311*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHl322*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))) + 4*cHl311*(-(cll1221*MB**4) + 2*cll1221*MB**2*MT**2 - cll1221*MT**4 - cll1221*MB**2*MW**2 - 24*cHtbRe*MB*MT*MW**2 - cll1221*MT**2*MW**2 + 2*cll1221*MW**4 + 2*cHl322*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) - 8*cHQ3*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))))*sth**4*vevhat**4 + 192*ee*LambdaSMEFT**2*MW**2*(ctWRe*MT*(MB**2 - MT**2 + MW**2) + cbWRe*MB*(-MB**2 + MT**2 + MW**2))*sth*vevhat*cmath.sqrt(2) - 768*ee*LambdaSMEFT**2*MW**2*(ctWRe*MT*(MB**2 - MT**2 + MW**2) + cbWRe*MB*(-MB**2 + MT**2 + MW**2))*sth**3*vevhat*cmath.sqrt(2) + 768*ee*LambdaSMEFT**2*MW**2*(ctWRe*MT*(MB**2 - MT**2 + MW**2) + cbWRe*MB*(-MB**2 + MT**2 + MW**2))*sth**5*vevhat*cmath.sqrt(2) - 48*ee*MW**2*(2*cHtbIm*ctWIm*MB*(MB**2 - MT**2 - MW**2) + 2*cbWIm*cHtbIm*MT*(MB**2 - MT**2 + MW**2) + (cHDD + 2*cHl311 + 2*cHl322 - 4*cHQ3 - cll1221)*ctWRe*MT*(MB**2 - MT**2 + MW**2) + 2*cHtbRe*ctWRe*MB*(-MB**2 + MT**2 + MW**2) + cbWRe*(-2*cHl322*MB**3 + 4*cHQ3*MB**3 + cll1221*MB**3 + 2*cHtbRe*MB**2*MT + 2*cHl322*MB*MT**2 - 4*cHQ3*MB*MT**2 - cll1221*MB*MT**2 - 2*cHtbRe*MT**3 + 2*cHl322*MB*MW**2 - 4*cHQ3*MB*MW**2 - cll1221*MB*MW**2 + 2*cHtbRe*MT*MW**2 + cHDD*MB*(-MB**2 + MT**2 + MW**2) + 2*cHl311*MB*(-MB**2 + MT**2 + MW**2)))*sth*vevhat**3*cmath.sqrt(2) - 192*cHWB*cth*ee*MW**2*(ctWRe*MT*(MB**2 - MT**2 + MW**2) + cbWRe*MB*(-MB**2 + MT**2 + MW**2))*sth**2*vevhat**3*cmath.sqrt(2) + 48*ee*MW**2*(8*cHtbIm*ctWIm*MB*(MB**2 - MT**2 - MW**2) + 8*cbWIm*cHtbIm*MT*(MB**2 - MT**2 + MW**2) + (3*cHDD + 6*cHl311 + 6*cHl322 - 16*cHQ3 - 3*cll1221)*ctWRe*MT*(MB**2 - MT**2 + MW**2) + 8*cHtbRe*ctWRe*MB*(-MB**2 + MT**2 + MW**2) + cbWRe*(-6*cHl322*MB**3 + 16*cHQ3*MB**3 + 3*cll1221*MB**3 + 8*cHtbRe*MB**2*MT + 6*cHl322*MB*MT**2 - 16*cHQ3*MB*MT**2 - 3*cll1221*MB*MT**2 - 8*cHtbRe*MT**3 + 6*cHl322*MB*MW**2 - 16*cHQ3*MB*MW**2 - 3*cll1221*MB*MW**2 + 8*cHtbRe*MT*MW**2 + 3*cHDD*MB*(-MB**2 + MT**2 + MW**2) + 6*cHl311*MB*(-MB**2 + MT**2 + MW**2)))*sth**3*vevhat**3*cmath.sqrt(2) + 384*cHWB*cth*ee*MW**2*(ctWRe*MT*(MB**2 - MT**2 + MW**2) + cbWRe*MB*(-MB**2 + MT**2 + MW**2))*sth**4*vevhat**3*cmath.sqrt(2) - 96*ee*MW**2*(4*cHtbIm*ctWIm*MB*(MB**2 - MT**2 - MW**2) + 4*cbWIm*cHtbIm*MT*(MB**2 - MT**2 + MW**2) + (cHDD + 2*cHl311 + 2*cHl322 - 8*cHQ3 - cll1221)*ctWRe*MT*(MB**2 - MT**2 + MW**2) + 4*cHtbRe*ctWRe*MB*(-MB**2 + MT**2 + MW**2) + cbWRe*(-2*cHl322*MB**3 + 8*cHQ3*MB**3 + cll1221*MB**3 + 4*cHtbRe*MB**2*MT + 2*cHl322*MB*MT**2 - 8*cHQ3*MB*MT**2 - cll1221*MB*MT**2 - 4*cHtbRe*MT**3 + 2*cHl322*MB*MW**2 - 8*cHQ3*MB*MW**2 - cll1221*MB*MW**2 + 4*cHtbRe*MT*MW**2 + cHDD*MB*(-MB**2 + MT**2 + MW**2) + 2*cHl311*MB*(-MB**2 + MT**2 + MW**2)))*sth**5*vevhat**3*cmath.sqrt(2))*cmath.sqrt(MB**4 + (MT**2 - MW**2)**2 - 2*MB**2*(MT**2 + MW**2)))/(512.*cmath.pi*LambdaSMEFT**4*MW**5*(sth - 2*sth**3)**2)',
(P.u,P.d__tilde__):'-((16*ee**2*LambdaSMEFT**4*MD**4 - 32*ee**2*LambdaSMEFT**4*MD**2*MU**2 + 16*ee**2*LambdaSMEFT**4*MU**4 + 16*ee**2*LambdaSMEFT**4*MD**2*MW**2 + 16*ee**2*LambdaSMEFT**4*MU**2*MW**2 - 32*ee**2*LambdaSMEFT**4*MW**4 - 64*ee**2*LambdaSMEFT**4*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2))*sth**2 + 64*ee**2*LambdaSMEFT**4*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2))*sth**4 - 32*cHWB*cth*ee**2*LambdaSMEFT**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2))*sth*vevhat**2 + 64*cHWB*cth*ee**2*LambdaSMEFT**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2))*sth**3*vevhat**2 - 8*cHWB*cth*ee**2*sth**3*vevhat**4*(2*cHl311*MD**4 + 2*cHl322*MD**4 - cll1221*MD**4 - 4*cHl311*MD**2*MU**2 - 4*cHl322*MD**2*MU**2 + 2*cll1221*MD**2*MU**2 + 2*cHl311*MU**4 + 2*cHl322*MU**4 - cll1221*MU**4 + 2*cHl311*MD**2*MW**2 + 2*cHl322*MD**2*MW**2 - cll1221*MD**2*MW**2 + 2*cHl311*MU**2*MW**2 + 2*cHl322*MU**2*MW**2 - cll1221*MU**2*MW**2 - 4*cHl311*MW**4 - 4*cHl322*MW**4 + 2*cll1221*MW**4 + cHDD*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 8*cHj3*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 24*cHudRe*MD*MU*MW**2*ydo*yup) - 8*ee**2*LambdaSMEFT**2*vevhat**2*(2*cHl311*MD**4 + 2*cHl322*MD**4 - cll1221*MD**4 - 4*cHl311*MD**2*MU**2 - 4*cHl322*MD**2*MU**2 + 2*cll1221*MD**2*MU**2 + 2*cHl311*MU**4 + 2*cHl322*MU**4 - cll1221*MU**4 + 2*cHl311*MD**2*MW**2 + 2*cHl322*MD**2*MW**2 - cll1221*MD**2*MW**2 + 2*cHl311*MU**2*MW**2 + 2*cHl322*MU**2*MW**2 - cll1221*MU**2*MW**2 - 4*cHl311*MW**4 - 4*cHl322*MW**4 + 2*cll1221*MW**4 + cHDD*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 4*cHj3*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 12*cHudRe*MD*MU*MW**2*ydo*yup) + 8*cHWB*cth*ee**2*sth*vevhat**4*(2*cHl311*MD**4 + 2*cHl322*MD**4 - cll1221*MD**4 - 4*cHl311*MD**2*MU**2 - 4*cHl322*MD**2*MU**2 + 2*cll1221*MD**2*MU**2 + 2*cHl311*MU**4 + 2*cHl322*MU**4 - cll1221*MU**4 + 2*cHl311*MD**2*MW**2 + 2*cHl322*MD**2*MW**2 - cll1221*MD**2*MW**2 + 2*cHl311*MU**2*MW**2 + 2*cHl322*MU**2*MW**2 - cll1221*MU**2*MW**2 - 4*cHl311*MW**4 - 4*cHl322*MW**4 + 2*cll1221*MW**4 + cHDD*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 4*cHj3*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 12*cHudRe*MD*MU*MW**2*ydo*yup) - 16*sth**4*vevhat**2*(2*cHl311*ee**2*LambdaSMEFT**2*MD**4 + 2*cHl322*ee**2*LambdaSMEFT**2*MD**4 - cll1221*ee**2*LambdaSMEFT**2*MD**4 - 4*cHl311*ee**2*LambdaSMEFT**2*MD**2*MU**2 - 4*cHl322*ee**2*LambdaSMEFT**2*MD**2*MU**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MD**2*MU**2 + 2*cHl311*ee**2*LambdaSMEFT**2*MU**4 + 2*cHl322*ee**2*LambdaSMEFT**2*MU**4 - cll1221*ee**2*LambdaSMEFT**2*MU**4 + 2*cHl311*ee**2*LambdaSMEFT**2*MD**2*MW**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MD**2*MW**2 - cll1221*ee**2*LambdaSMEFT**2*MD**2*MW**2 + 2*cHl311*ee**2*LambdaSMEFT**2*MU**2*MW**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MU**2*MW**2 - cll1221*ee**2*LambdaSMEFT**2*MU**2*MW**2 - 4*cHl311*ee**2*LambdaSMEFT**2*MW**4 - 4*cHl322*ee**2*LambdaSMEFT**2*MW**4 + 2*cll1221*ee**2*LambdaSMEFT**2*MW**4 + cHDD*ee**2*LambdaSMEFT**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 8*cHj3*ee**2*LambdaSMEFT**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 64*cdWIm**2*MD**4*MW**2*ydo**2 + 64*cdWRe**2*MD**4*MW**2*ydo**2 - 128*cdWIm**2*MD**2*MU**2*MW**2*ydo**2 - 128*cdWRe**2*MD**2*MU**2*MW**2*ydo**2 + 64*cdWIm**2*MU**4*MW**2*ydo**2 + 64*cdWRe**2*MU**4*MW**2*ydo**2 - 32*cdWIm**2*MD**2*MW**4*ydo**2 - 32*cdWRe**2*MD**2*MW**4*ydo**2 - 32*cdWIm**2*MU**2*MW**4*ydo**2 - 32*cdWRe**2*MU**2*MW**4*ydo**2 - 32*cdWIm**2*MW**6*ydo**2 - 32*cdWRe**2*MW**6*ydo**2 - 24*cHudRe*ee**2*LambdaSMEFT**2*MD*MU*MW**2*ydo*yup + 384*cdWIm*cuWIm*MD*MU*MW**4*ydo*yup - 384*cdWRe*cuWRe*MD*MU*MW**4*ydo*yup + 64*cuWIm**2*MD**4*MW**2*yup**2 + 64*cuWRe**2*MD**4*MW**2*yup**2 - 128*cuWIm**2*MD**2*MU**2*MW**2*yup**2 - 128*cuWRe**2*MD**2*MU**2*MW**2*yup**2 + 64*cuWIm**2*MU**4*MW**2*yup**2 + 64*cuWRe**2*MU**4*MW**2*yup**2 - 32*cuWIm**2*MD**2*MW**4*yup**2 - 32*cuWRe**2*MD**2*MW**4*yup**2 - 32*cuWIm**2*MU**2*MW**4*yup**2 - 32*cuWRe**2*MU**2*MW**4*yup**2 - 32*cuWIm**2*MW**6*yup**2 - 32*cuWRe**2*MW**6*yup**2) + 8*sth**2*vevhat**2*(6*cHl311*ee**2*LambdaSMEFT**2*MD**4 + 6*cHl322*ee**2*LambdaSMEFT**2*MD**4 - 3*cll1221*ee**2*LambdaSMEFT**2*MD**4 - 12*cHl311*ee**2*LambdaSMEFT**2*MD**2*MU**2 - 12*cHl322*ee**2*LambdaSMEFT**2*MD**2*MU**2 + 6*cll1221*ee**2*LambdaSMEFT**2*MD**2*MU**2 + 6*cHl311*ee**2*LambdaSMEFT**2*MU**4 + 6*cHl322*ee**2*LambdaSMEFT**2*MU**4 - 3*cll1221*ee**2*LambdaSMEFT**2*MU**4 + 6*cHl311*ee**2*LambdaSMEFT**2*MD**2*MW**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MD**2*MW**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MD**2*MW**2 + 6*cHl311*ee**2*LambdaSMEFT**2*MU**2*MW**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MU**2*MW**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MU**2*MW**2 - 12*cHl311*ee**2*LambdaSMEFT**2*MW**4 - 12*cHl322*ee**2*LambdaSMEFT**2*MW**4 + 6*cll1221*ee**2*LambdaSMEFT**2*MW**4 + 3*cHDD*ee**2*LambdaSMEFT**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 16*cHj3*ee**2*LambdaSMEFT**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 32*cdWIm**2*MD**4*MW**2*ydo**2 + 32*cdWRe**2*MD**4*MW**2*ydo**2 - 64*cdWIm**2*MD**2*MU**2*MW**2*ydo**2 - 64*cdWRe**2*MD**2*MU**2*MW**2*ydo**2 + 32*cdWIm**2*MU**4*MW**2*ydo**2 + 32*cdWRe**2*MU**4*MW**2*ydo**2 - 16*cdWIm**2*MD**2*MW**4*ydo**2 - 16*cdWRe**2*MD**2*MW**4*ydo**2 - 16*cdWIm**2*MU**2*MW**4*ydo**2 - 16*cdWRe**2*MU**2*MW**4*ydo**2 - 16*cdWIm**2*MW**6*ydo**2 - 16*cdWRe**2*MW**6*ydo**2 - 48*cHudRe*ee**2*LambdaSMEFT**2*MD*MU*MW**2*ydo*yup + 192*cdWIm*cuWIm*MD*MU*MW**4*ydo*yup - 192*cdWRe*cuWRe*MD*MU*MW**4*ydo*yup + 32*cuWIm**2*MD**4*MW**2*yup**2 + 32*cuWRe**2*MD**4*MW**2*yup**2 - 64*cuWIm**2*MD**2*MU**2*MW**2*yup**2 - 64*cuWRe**2*MD**2*MU**2*MW**2*yup**2 + 32*cuWIm**2*MU**4*MW**2*yup**2 + 32*cuWRe**2*MU**4*MW**2*yup**2 - 16*cuWIm**2*MD**2*MW**4*yup**2 - 16*cuWRe**2*MD**2*MW**4*yup**2 - 16*cuWIm**2*MU**2*MW**4*yup**2 - 16*cuWRe**2*MU**2*MW**4*yup**2 - 16*cuWIm**2*MW**6*yup**2 - 16*cuWRe**2*MW**6*yup**2) - 512*MW**2*sth**6*vevhat**2*(cdWIm**2*(-2*MD**4 - 2*MU**4 + MU**2*MW**2 + MW**4 + MD**2*(4*MU**2 + MW**2))*ydo**2 + cdWRe**2*(-2*MD**4 - 2*MU**4 + MU**2*MW**2 + MW**4 + MD**2*(4*MU**2 + MW**2))*ydo**2 - 12*cdWIm*cuWIm*MD*MU*MW**2*ydo*yup + 12*cdWRe*cuWRe*MD*MU*MW**2*ydo*yup - (cuWIm**2 + cuWRe**2)*(2*MD**4 + 2*MU**4 - MU**2*MW**2 - MW**4 - MD**2*(4*MU**2 + MW**2))*yup**2) + ee**2*vevhat**4*(4*cHl311**2*MD**4 + 8*cHl311*cHl322*MD**4 + 4*cHl322**2*MD**4 - 4*cHl311*cll1221*MD**4 - 4*cHl322*cll1221*MD**4 + cll1221**2*MD**4 - 8*cHl311**2*MD**2*MU**2 - 16*cHl311*cHl322*MD**2*MU**2 - 8*cHl322**2*MD**2*MU**2 + 8*cHl311*cll1221*MD**2*MU**2 + 8*cHl322*cll1221*MD**2*MU**2 - 2*cll1221**2*MD**2*MU**2 + 4*cHl311**2*MU**4 + 8*cHl311*cHl322*MU**4 + 4*cHl322**2*MU**4 - 4*cHl311*cll1221*MU**4 - 4*cHl322*cll1221*MU**4 + cll1221**2*MU**4 + 4*cHl311**2*MD**2*MW**2 + 8*cHl311*cHl322*MD**2*MW**2 + 4*cHl322**2*MD**2*MW**2 - 4*cHl311*cll1221*MD**2*MW**2 - 4*cHl322*cll1221*MD**2*MW**2 + cll1221**2*MD**2*MW**2 + 4*cHl311**2*MU**2*MW**2 + 8*cHl311*cHl322*MU**2*MW**2 + 4*cHl322**2*MU**2*MW**2 - 4*cHl311*cll1221*MU**2*MW**2 - 4*cHl322*cll1221*MU**2*MW**2 + cll1221**2*MU**2*MW**2 - 8*cHl311**2*MW**4 - 16*cHl311*cHl322*MW**4 - 8*cHl322**2*MW**4 + 8*cHl311*cll1221*MW**4 + 8*cHl322*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 16*cHj3**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 48*cHl311*cHudRe*MD*MU*MW**2*ydo*yup - 48*cHl322*cHudRe*MD*MU*MW**2*ydo*yup + 24*cHudRe*cll1221*MD*MU*MW**2*ydo*yup + 4*cHudIm**2*MD**4*ydo**2*yup**2 + 4*cHudRe**2*MD**4*ydo**2*yup**2 - 8*cHudIm**2*MD**2*MU**2*ydo**2*yup**2 - 8*cHudRe**2*MD**2*MU**2*ydo**2*yup**2 + 4*cHudIm**2*MU**4*ydo**2*yup**2 + 4*cHudRe**2*MU**4*ydo**2*yup**2 + 4*cHudIm**2*MD**2*MW**2*ydo**2*yup**2 + 4*cHudRe**2*MD**2*MW**2*ydo**2*yup**2 + 4*cHudIm**2*MU**2*MW**2*ydo**2*yup**2 + 4*cHudRe**2*MU**2*MW**2*ydo**2*yup**2 - 8*cHudIm**2*MW**4*ydo**2*yup**2 - 8*cHudRe**2*MW**4*ydo**2*yup**2 - 8*cHj3*(-(cll1221*MD**4) + 2*cll1221*MD**2*MU**2 - cll1221*MU**4 - cll1221*MD**2*MW**2 - cll1221*MU**2*MW**2 + 2*cll1221*MW**4 + 2*cHl311*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 2*cHl322*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 12*cHudRe*MD*MU*MW**2*ydo*yup) - 2*cHDD*(-2*cHl322*MD**4 + cll1221*MD**4 + 4*cHl322*MD**2*MU**2 - 2*cll1221*MD**2*MU**2 - 2*cHl322*MU**4 + cll1221*MU**4 - 2*cHl322*MD**2*MW**2 + cll1221*MD**2*MW**2 - 2*cHl322*MU**2*MW**2 + cll1221*MU**2*MW**2 + 4*cHl322*MW**4 - 2*cll1221*MW**4 + 4*cHj3*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 2*cHl311*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 12*cHudRe*MD*MU*MW**2*ydo*yup)) - 2*ee**2*sth**2*vevhat**4*(4*cHl311**2*MD**4 + 8*cHl311*cHl322*MD**4 + 4*cHl322**2*MD**4 - 8*cHWB**2*MD**4 - 4*cHl311*cll1221*MD**4 - 4*cHl322*cll1221*MD**4 + cll1221**2*MD**4 - 8*cHl311**2*MD**2*MU**2 - 16*cHl311*cHl322*MD**2*MU**2 - 8*cHl322**2*MD**2*MU**2 + 16*cHWB**2*MD**2*MU**2 + 8*cHl311*cll1221*MD**2*MU**2 + 8*cHl322*cll1221*MD**2*MU**2 - 2*cll1221**2*MD**2*MU**2 + 4*cHl311**2*MU**4 + 8*cHl311*cHl322*MU**4 + 4*cHl322**2*MU**4 - 8*cHWB**2*MU**4 - 4*cHl311*cll1221*MU**4 - 4*cHl322*cll1221*MU**4 + cll1221**2*MU**4 + 4*cHl311**2*MD**2*MW**2 + 8*cHl311*cHl322*MD**2*MW**2 + 4*cHl322**2*MD**2*MW**2 - 8*cHWB**2*MD**2*MW**2 - 4*cHl311*cll1221*MD**2*MW**2 - 4*cHl322*cll1221*MD**2*MW**2 + cll1221**2*MD**2*MW**2 + 4*cHl311**2*MU**2*MW**2 + 8*cHl311*cHl322*MU**2*MW**2 + 4*cHl322**2*MU**2*MW**2 - 8*cHWB**2*MU**2*MW**2 - 4*cHl311*cll1221*MU**2*MW**2 - 4*cHl322*cll1221*MU**2*MW**2 + cll1221**2*MU**2*MW**2 - 8*cHl311**2*MW**4 - 16*cHl311*cHl322*MW**4 - 8*cHl322**2*MW**4 + 16*cHWB**2*MW**4 + 8*cHl311*cll1221*MW**4 + 8*cHl322*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 32*cHj3**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 72*cHl311*cHudRe*MD*MU*MW**2*ydo*yup - 72*cHl322*cHudRe*MD*MU*MW**2*ydo*yup + 36*cHudRe*cll1221*MD*MU*MW**2*ydo*yup + 8*cHudIm**2*MD**4*ydo**2*yup**2 + 8*cHudRe**2*MD**4*ydo**2*yup**2 - 16*cHudIm**2*MD**2*MU**2*ydo**2*yup**2 - 16*cHudRe**2*MD**2*MU**2*ydo**2*yup**2 + 8*cHudIm**2*MU**4*ydo**2*yup**2 + 8*cHudRe**2*MU**4*ydo**2*yup**2 + 8*cHudIm**2*MD**2*MW**2*ydo**2*yup**2 + 8*cHudRe**2*MD**2*MW**2*ydo**2*yup**2 + 8*cHudIm**2*MU**2*MW**2*ydo**2*yup**2 + 8*cHudRe**2*MU**2*MW**2*ydo**2*yup**2 - 16*cHudIm**2*MW**4*ydo**2*yup**2 - 16*cHudRe**2*MW**4*ydo**2*yup**2 - 12*cHj3*(-(cll1221*MD**4) + 2*cll1221*MD**2*MU**2 - cll1221*MU**4 - cll1221*MD**2*MW**2 - cll1221*MU**2*MW**2 + 2*cll1221*MW**4 + 2*cHl311*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 2*cHl322*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 16*cHudRe*MD*MU*MW**2*ydo*yup) - 2*cHDD*(-2*cHl322*MD**4 + cll1221*MD**4 + 4*cHl322*MD**2*MU**2 - 2*cll1221*MD**2*MU**2 - 2*cHl322*MU**4 + cll1221*MU**4 - 2*cHl322*MD**2*MW**2 + cll1221*MD**2*MW**2 - 2*cHl322*MU**2*MW**2 + cll1221*MU**2*MW**2 + 4*cHl322*MW**4 - 2*cll1221*MW**4 + 6*cHj3*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 2*cHl311*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 18*cHudRe*MD*MU*MW**2*ydo*yup)) + ee**2*sth**4*vevhat**4*(4*cHl311**2*MD**4 + 8*cHl311*cHl322*MD**4 + 4*cHl322**2*MD**4 - 16*cHWB**2*MD**4 - 4*cHl311*cll1221*MD**4 - 4*cHl322*cll1221*MD**4 + cll1221**2*MD**4 - 8*cHl311**2*MD**2*MU**2 - 16*cHl311*cHl322*MD**2*MU**2 - 8*cHl322**2*MD**2*MU**2 + 32*cHWB**2*MD**2*MU**2 + 8*cHl311*cll1221*MD**2*MU**2 + 8*cHl322*cll1221*MD**2*MU**2 - 2*cll1221**2*MD**2*MU**2 + 4*cHl311**2*MU**4 + 8*cHl311*cHl322*MU**4 + 4*cHl322**2*MU**4 - 16*cHWB**2*MU**4 - 4*cHl311*cll1221*MU**4 - 4*cHl322*cll1221*MU**4 + cll1221**2*MU**4 + 4*cHl311**2*MD**2*MW**2 + 8*cHl311*cHl322*MD**2*MW**2 + 4*cHl322**2*MD**2*MW**2 - 16*cHWB**2*MD**2*MW**2 - 4*cHl311*cll1221*MD**2*MW**2 - 4*cHl322*cll1221*MD**2*MW**2 + cll1221**2*MD**2*MW**2 + 4*cHl311**2*MU**2*MW**2 + 8*cHl311*cHl322*MU**2*MW**2 + 4*cHl322**2*MU**2*MW**2 - 16*cHWB**2*MU**2*MW**2 - 4*cHl311*cll1221*MU**2*MW**2 - 4*cHl322*cll1221*MU**2*MW**2 + cll1221**2*MU**2*MW**2 - 8*cHl311**2*MW**4 - 16*cHl311*cHl322*MW**4 - 8*cHl322**2*MW**4 + 32*cHWB**2*MW**4 + 8*cHl311*cll1221*MW**4 + 8*cHl322*cll1221*MW**4 - 2*cll1221**2*MW**4 + cHDD**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 64*cHj3**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 96*cHl311*cHudRe*MD*MU*MW**2*ydo*yup - 96*cHl322*cHudRe*MD*MU*MW**2*ydo*yup + 48*cHudRe*cll1221*MD*MU*MW**2*ydo*yup + 16*cHudIm**2*MD**4*ydo**2*yup**2 + 16*cHudRe**2*MD**4*ydo**2*yup**2 - 32*cHudIm**2*MD**2*MU**2*ydo**2*yup**2 - 32*cHudRe**2*MD**2*MU**2*ydo**2*yup**2 + 16*cHudIm**2*MU**4*ydo**2*yup**2 + 16*cHudRe**2*MU**4*ydo**2*yup**2 + 16*cHudIm**2*MD**2*MW**2*ydo**2*yup**2 + 16*cHudRe**2*MD**2*MW**2*ydo**2*yup**2 + 16*cHudIm**2*MU**2*MW**2*ydo**2*yup**2 + 16*cHudRe**2*MU**2*MW**2*ydo**2*yup**2 - 32*cHudIm**2*MW**4*ydo**2*yup**2 - 32*cHudRe**2*MW**4*ydo**2*yup**2 - 16*cHj3*(-(cll1221*MD**4) + 2*cll1221*MD**2*MU**2 - cll1221*MU**4 - cll1221*MD**2*MW**2 - cll1221*MU**2*MW**2 + 2*cll1221*MW**4 + 2*cHl311*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 2*cHl322*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 24*cHudRe*MD*MU*MW**2*ydo*yup) - 2*cHDD*(-2*cHl322*MD**4 + cll1221*MD**4 + 4*cHl322*MD**2*MU**2 - 2*cll1221*MD**2*MU**2 - 2*cHl322*MU**4 + cll1221*MU**4 - 2*cHl322*MD**2*MW**2 + cll1221*MD**2*MW**2 - 2*cHl322*MU**2*MW**2 + cll1221*MU**2*MW**2 + 4*cHl322*MW**4 - 2*cll1221*MW**4 + 8*cHj3*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 2*cHl311*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 24*cHudRe*MD*MU*MW**2*ydo*yup)) + 192*ee*LambdaSMEFT**2*MW**2*sth*vevhat*(cdWRe*MD*(-MD**2 + MU**2 + MW**2)*ydo + cuWRe*MU*(MD**2 - MU**2 + MW**2)*yup)*cmath.sqrt(2) - 768*ee*LambdaSMEFT**2*MW**2*sth**3*vevhat*(cdWRe*MD*(-MD**2 + MU**2 + MW**2)*ydo + cuWRe*MU*(MD**2 - MU**2 + MW**2)*yup)*cmath.sqrt(2) + 768*ee*LambdaSMEFT**2*MW**2*sth**5*vevhat*(cdWRe*MD*(-MD**2 + MU**2 + MW**2)*ydo + cuWRe*MU*(MD**2 - MU**2 + MW**2)*yup)*cmath.sqrt(2) - 192*cHWB*cth*ee*MW**2*sth**2*vevhat**3*(cdWRe*MD*(-MD**2 + MU**2 + MW**2)*ydo + cuWRe*MU*(MD**2 - MU**2 + MW**2)*yup)*cmath.sqrt(2) + 384*cHWB*cth*ee*MW**2*sth**4*vevhat**3*(cdWRe*MD*(-MD**2 + MU**2 + MW**2)*ydo + cuWRe*MU*(MD**2 - MU**2 + MW**2)*yup)*cmath.sqrt(2) - 48*ee*MW**2*sth*vevhat**3*(yup*(2*cHl311*cuWRe*MD**2*MU + 2*cHl322*cuWRe*MD**2*MU - cll1221*cuWRe*MD**2*MU - 2*cHl311*cuWRe*MU**3 - 2*cHl322*cuWRe*MU**3 + cll1221*cuWRe*MU**3 + 2*cHl311*cuWRe*MU*MW**2 + 2*cHl322*cuWRe*MU*MW**2 - cll1221*cuWRe*MU*MW**2 + cHDD*cuWRe*MU*(MD**2 - MU**2 + MW**2) - 4*cHj3*cuWRe*MU*(MD**2 - MU**2 + MW**2) + 2*cdWIm*cHudIm*MD**2*MU*ydo**2 - 2*cdWIm*cHudIm*MU**3*ydo**2 + 2*cdWIm*cHudIm*MU*MW**2*ydo**2 + 2*cHudIm*cuWIm*MD**3*ydo*yup - 2*cHudRe*cuWRe*MD**3*ydo*yup - 2*cHudIm*cuWIm*MD*MU**2*ydo*yup + 2*cHudRe*cuWRe*MD*MU**2*ydo*yup - 2*cHudIm*cuWIm*MD*MW**2*ydo*yup + 2*cHudRe*cuWRe*MD*MW**2*ydo*yup) + cdWRe*ydo*(-2*cHl311*MD**3 - 2*cHl322*MD**3 + cll1221*MD**3 + 2*cHl311*MD*MU**2 + 2*cHl322*MD*MU**2 - cll1221*MD*MU**2 + 2*cHl311*MD*MW**2 + 2*cHl322*MD*MW**2 - cll1221*MD*MW**2 + 4*cHj3*MD*(MD**2 - MU**2 - MW**2) + cHDD*MD*(-MD**2 + MU**2 + MW**2) + 2*cHudRe*MD**2*MU*ydo*yup - 2*cHudRe*MU**3*ydo*yup + 2*cHudRe*MU*MW**2*ydo*yup))*cmath.sqrt(2) - 96*ee*MW**2*sth**5*vevhat**3*(yup*(2*cHl311*cuWRe*MD**2*MU + 2*cHl322*cuWRe*MD**2*MU - cll1221*cuWRe*MD**2*MU - 2*cHl311*cuWRe*MU**3 - 2*cHl322*cuWRe*MU**3 + cll1221*cuWRe*MU**3 + 2*cHl311*cuWRe*MU*MW**2 + 2*cHl322*cuWRe*MU*MW**2 - cll1221*cuWRe*MU*MW**2 + cHDD*cuWRe*MU*(MD**2 - MU**2 + MW**2) - 8*cHj3*cuWRe*MU*(MD**2 - MU**2 + MW**2) + 4*cdWIm*cHudIm*MD**2*MU*ydo**2 - 4*cdWIm*cHudIm*MU**3*ydo**2 + 4*cdWIm*cHudIm*MU*MW**2*ydo**2 + 4*cHudIm*cuWIm*MD**3*ydo*yup - 4*cHudRe*cuWRe*MD**3*ydo*yup - 4*cHudIm*cuWIm*MD*MU**2*ydo*yup + 4*cHudRe*cuWRe*MD*MU**2*ydo*yup - 4*cHudIm*cuWIm*MD*MW**2*ydo*yup + 4*cHudRe*cuWRe*MD*MW**2*ydo*yup) + cdWRe*ydo*(-2*cHl311*MD**3 - 2*cHl322*MD**3 + cll1221*MD**3 + 2*cHl311*MD*MU**2 + 2*cHl322*MD*MU**2 - cll1221*MD*MU**2 + 2*cHl311*MD*MW**2 + 2*cHl322*MD*MW**2 - cll1221*MD*MW**2 + 8*cHj3*MD*(MD**2 - MU**2 - MW**2) + cHDD*MD*(-MD**2 + MU**2 + MW**2) + 4*cHudRe*MD**2*MU*ydo*yup - 4*cHudRe*MU**3*ydo*yup + 4*cHudRe*MU*MW**2*ydo*yup))*cmath.sqrt(2) + 48*ee*MW**2*sth**3*vevhat**3*(yup*(6*cHl311*cuWRe*MD**2*MU + 6*cHl322*cuWRe*MD**2*MU - 3*cll1221*cuWRe*MD**2*MU - 6*cHl311*cuWRe*MU**3 - 6*cHl322*cuWRe*MU**3 + 3*cll1221*cuWRe*MU**3 + 6*cHl311*cuWRe*MU*MW**2 + 6*cHl322*cuWRe*MU*MW**2 - 3*cll1221*cuWRe*MU*MW**2 + 3*cHDD*cuWRe*MU*(MD**2 - MU**2 + MW**2) - 16*cHj3*cuWRe*MU*(MD**2 - MU**2 + MW**2) + 8*cdWIm*cHudIm*MD**2*MU*ydo**2 - 8*cdWIm*cHudIm*MU**3*ydo**2 + 8*cdWIm*cHudIm*MU*MW**2*ydo**2 + 8*cHudIm*cuWIm*MD**3*ydo*yup - 8*cHudRe*cuWRe*MD**3*ydo*yup - 8*cHudIm*cuWIm*MD*MU**2*ydo*yup + 8*cHudRe*cuWRe*MD*MU**2*ydo*yup - 8*cHudIm*cuWIm*MD*MW**2*ydo*yup + 8*cHudRe*cuWRe*MD*MW**2*ydo*yup) + cdWRe*ydo*(-6*cHl311*MD**3 - 6*cHl322*MD**3 + 3*cll1221*MD**3 + 6*cHl311*MD*MU**2 + 6*cHl322*MD*MU**2 - 3*cll1221*MD*MU**2 + 6*cHl311*MD*MW**2 + 6*cHl322*MD*MW**2 - 3*cll1221*MD*MW**2 + 16*cHj3*MD*(MD**2 - MU**2 - MW**2) + 3*cHDD*MD*(-MD**2 + MU**2 + MW**2) + 8*cHudRe*MD**2*MU*ydo*yup - 8*cHudRe*MU**3*ydo*yup + 8*cHudRe*MU*MW**2*ydo*yup))*cmath.sqrt(2))*cmath.sqrt(MD**4 + (MU**2 - MW**2)**2 - 2*MD**2*(MU**2 + MW**2)))/(512.*cmath.pi*LambdaSMEFT**4*MW**5*(sth - 2*sth**3)**2)',
(P.ve,P.e__plus__):'((Me**2 - MW**2)**2*(16*ee**2*LambdaSMEFT**4*Me**2 + 32*ee**2*LambdaSMEFT**4*MW**2 - 64*ee**2*LambdaSMEFT**4*(Me**2 + 2*MW**2)*sth**2 + 64*ee**2*LambdaSMEFT**4*(Me**2 + 2*MW**2)*sth**4 - 8*(cHDD - 2*cHl311 + 2*cHl322 - cll1221)*ee**2*LambdaSMEFT**2*(Me**2 + 2*MW**2)*vevhat**2 - 32*cHWB*cth*ee**2*LambdaSMEFT**2*(Me**2 + 2*MW**2)*sth*vevhat**2 + 8*(6*cHl322*ee**2*LambdaSMEFT**2*Me**2 - 3*cll1221*ee**2*LambdaSMEFT**2*Me**2 + 12*cHl322*ee**2*LambdaSMEFT**2*MW**2 - 6*cll1221*ee**2*LambdaSMEFT**2*MW**2 + 32*ceWIm11**2*Me**2*MW**2 + 32*ceWRe11**2*Me**2*MW**2 + 16*ceWIm11**2*MW**4 + 16*ceWRe11**2*MW**4 + 3*cHDD*ee**2*LambdaSMEFT**2*(Me**2 + 2*MW**2) - 10*cHl311*ee**2*LambdaSMEFT**2*(Me**2 + 2*MW**2))*sth**2*vevhat**2 + 64*cHWB*cth*ee**2*LambdaSMEFT**2*(Me**2 + 2*MW**2)*sth**3*vevhat**2 - 16*(2*cHl322*ee**2*LambdaSMEFT**2*Me**2 - cll1221*ee**2*LambdaSMEFT**2*Me**2 + 4*cHl322*ee**2*LambdaSMEFT**2*MW**2 - 2*cll1221*ee**2*LambdaSMEFT**2*MW**2 + 64*ceWIm11**2*Me**2*MW**2 + 64*ceWRe11**2*Me**2*MW**2 + 32*ceWIm11**2*MW**4 + 32*ceWRe11**2*MW**4 + cHDD*ee**2*LambdaSMEFT**2*(Me**2 + 2*MW**2) - 6*cHl311*ee**2*LambdaSMEFT**2*(Me**2 + 2*MW**2))*sth**4*vevhat**2 + 512*(ceWIm11**2 + ceWRe11**2)*MW**2*(2*Me**2 + MW**2)*sth**6*vevhat**2 + (cHDD - 2*cHl311 + 2*cHl322 - cll1221)**2*ee**2*(Me**2 + 2*MW**2)*vevhat**4 + 8*cHWB*(cHDD - 2*cHl311 + 2*cHl322 - cll1221)*cth*ee**2*(Me**2 + 2*MW**2)*sth*vevhat**4 - 2*(cHDD**2 + 12*cHl311**2 + 4*cHl322**2 - 8*cHWB**2 - 4*cHl322*cll1221 + cll1221**2 + 8*cHl311*(-2*cHl322 + cll1221) - 2*cHDD*(4*cHl311 - 2*cHl322 + cll1221))*ee**2*(Me**2 + 2*MW**2)*sth**2*vevhat**4 - 8*cHWB*(cHDD - 6*cHl311 + 2*cHl322 - cll1221)*cth*ee**2*(Me**2 + 2*MW**2)*sth**3*vevhat**4 + (cHDD**2 + 36*cHl311**2 + 4*cHl322**2 - 16*cHWB**2 - 4*cHl322*cll1221 + cll1221**2 + 12*cHl311*(-2*cHl322 + cll1221) - 2*cHDD*(6*cHl311 - 2*cHl322 + cll1221))*ee**2*(Me**2 + 2*MW**2)*sth**4*vevhat**4 - 192*ceWRe11*ee*LambdaSMEFT**2*Me*MW**2*sth*vevhat*cmath.sqrt(2) + 768*ceWRe11*ee*LambdaSMEFT**2*Me*MW**2*sth**3*vevhat*cmath.sqrt(2) - 768*ceWRe11*ee*LambdaSMEFT**2*Me*MW**2*sth**5*vevhat*cmath.sqrt(2) + 48*ceWRe11*(cHDD - 2*cHl311 + 2*cHl322 - cll1221)*ee*Me*MW**2*sth*vevhat**3*cmath.sqrt(2) + 192*ceWRe11*cHWB*cth*ee*Me*MW**2*sth**2*vevhat**3*cmath.sqrt(2) - 48*ceWRe11*(3*cHDD - 10*cHl311 + 6*cHl322 - 3*cll1221)*ee*Me*MW**2*sth**3*vevhat**3*cmath.sqrt(2) - 384*ceWRe11*cHWB*cth*ee*Me*MW**2*sth**4*vevhat**3*cmath.sqrt(2) + 96*ceWRe11*(cHDD - 6*cHl311 + 2*cHl322 - cll1221)*ee*Me*MW**2*sth**5*vevhat**3*cmath.sqrt(2)))/(1536.*cmath.pi*LambdaSMEFT**4*MW**5*(sth - 2*sth**3)**2)',
(P.vm,P.mu__plus__):'((MMU**2 - MW**2)**2*(16*ee**2*LambdaSMEFT**4*MMU**2 + 32*ee**2*LambdaSMEFT**4*MW**2 - 64*ee**2*LambdaSMEFT**4*(MMU**2 + 2*MW**2)*sth**2 + 64*ee**2*LambdaSMEFT**4*(MMU**2 + 2*MW**2)*sth**4 - 8*(cHDD + 2*cHl311 - 2*cHl322 - cll1221)*ee**2*LambdaSMEFT**2*(MMU**2 + 2*MW**2)*vevhat**2 - 32*cHWB*cth*ee**2*LambdaSMEFT**2*(MMU**2 + 2*MW**2)*sth*vevhat**2 + 8*(-10*cHl322*ee**2*LambdaSMEFT**2*MMU**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MMU**2 - 20*cHl322*ee**2*LambdaSMEFT**2*MW**2 - 6*cll1221*ee**2*LambdaSMEFT**2*MW**2 + 32*ceWIm22**2*MMU**2*MW**2 + 32*ceWRe22**2*MMU**2*MW**2 + 16*ceWIm22**2*MW**4 + 16*ceWRe22**2*MW**4 + 3*cHDD*ee**2*LambdaSMEFT**2*(MMU**2 + 2*MW**2) + 6*cHl311*ee**2*LambdaSMEFT**2*(MMU**2 + 2*MW**2))*sth**2*vevhat**2 + 64*cHWB*cth*ee**2*LambdaSMEFT**2*(MMU**2 + 2*MW**2)*sth**3*vevhat**2 - 16*(-6*cHl322*ee**2*LambdaSMEFT**2*MMU**2 - cll1221*ee**2*LambdaSMEFT**2*MMU**2 - 12*cHl322*ee**2*LambdaSMEFT**2*MW**2 - 2*cll1221*ee**2*LambdaSMEFT**2*MW**2 + 64*ceWIm22**2*MMU**2*MW**2 + 64*ceWRe22**2*MMU**2*MW**2 + 32*ceWIm22**2*MW**4 + 32*ceWRe22**2*MW**4 + cHDD*ee**2*LambdaSMEFT**2*(MMU**2 + 2*MW**2) + 2*cHl311*ee**2*LambdaSMEFT**2*(MMU**2 + 2*MW**2))*sth**4*vevhat**2 + 512*(ceWIm22**2 + ceWRe22**2)*MW**2*(2*MMU**2 + MW**2)*sth**6*vevhat**2 + (cHDD + 2*cHl311 - 2*cHl322 - cll1221)**2*ee**2*(MMU**2 + 2*MW**2)*vevhat**4 + 8*cHWB*(cHDD + 2*cHl311 - 2*cHl322 - cll1221)*cth*ee**2*(MMU**2 + 2*MW**2)*sth*vevhat**4 - 2*(cHDD**2 + 4*cHDD*cHl311 + 4*cHl311**2 + 12*cHl322**2 - 8*cHWB**2 + 8*cHl322*cll1221 + cll1221**2 - 2*cHDD*(4*cHl322 + cll1221) - 4*cHl311*(4*cHl322 + cll1221))*ee**2*(MMU**2 + 2*MW**2)*sth**2*vevhat**4 - 8*cHWB*(cHDD + 2*cHl311 - 6*cHl322 - cll1221)*cth*ee**2*(MMU**2 + 2*MW**2)*sth**3*vevhat**4 + (cHDD**2 + 4*cHl311**2 + 36*cHl322**2 - 16*cHWB**2 + 2*cHDD*(2*cHl311 - 6*cHl322 - cll1221) + 12*cHl322*cll1221 + cll1221**2 - 4*cHl311*(6*cHl322 + cll1221))*ee**2*(MMU**2 + 2*MW**2)*sth**4*vevhat**4 - 192*ceWRe22*ee*LambdaSMEFT**2*MMU*MW**2*sth*vevhat*cmath.sqrt(2) + 768*ceWRe22*ee*LambdaSMEFT**2*MMU*MW**2*sth**3*vevhat*cmath.sqrt(2) - 768*ceWRe22*ee*LambdaSMEFT**2*MMU*MW**2*sth**5*vevhat*cmath.sqrt(2) + 48*ceWRe22*(cHDD + 2*cHl311 - 2*cHl322 - cll1221)*ee*MMU*MW**2*sth*vevhat**3*cmath.sqrt(2) + 192*ceWRe22*cHWB*cth*ee*MMU*MW**2*sth**2*vevhat**3*cmath.sqrt(2) - 48*ceWRe22*(3*cHDD + 6*cHl311 - 10*cHl322 - 3*cll1221)*ee*MMU*MW**2*sth**3*vevhat**3*cmath.sqrt(2) - 384*ceWRe22*cHWB*cth*ee*MMU*MW**2*sth**4*vevhat**3*cmath.sqrt(2) + 96*ceWRe22*(cHDD + 2*cHl311 - 6*cHl322 - cll1221)*ee*MMU*MW**2*sth**5*vevhat**3*cmath.sqrt(2)))/(1536.*cmath.pi*LambdaSMEFT**4*MW**5*(sth - 2*sth**3)**2)',
(P.vt,P.ta__plus__):'((MTA**2 - MW**2)**2*(16*ee**2*LambdaSMEFT**4*MTA**2 + 32*ee**2*LambdaSMEFT**4*MW**2 - 64*ee**2*LambdaSMEFT**4*(MTA**2 + 2*MW**2)*sth**2 + 64*ee**2*LambdaSMEFT**4*(MTA**2 + 2*MW**2)*sth**4 - 8*(cHDD + 2*cHl311 + 2*cHl322 - 4*cHl333 - cll1221)*ee**2*LambdaSMEFT**2*(MTA**2 + 2*MW**2)*vevhat**2 - 32*cHWB*cth*ee**2*LambdaSMEFT**2*(MTA**2 + 2*MW**2)*sth*vevhat**2 + 8*(6*cHl322*ee**2*LambdaSMEFT**2*MTA**2 - 16*cHl333*ee**2*LambdaSMEFT**2*MTA**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MTA**2 + 12*cHl322*ee**2*LambdaSMEFT**2*MW**2 - 32*cHl333*ee**2*LambdaSMEFT**2*MW**2 - 6*cll1221*ee**2*LambdaSMEFT**2*MW**2 + 32*ceWIm33**2*MTA**2*MW**2 + 32*ceWRe33**2*MTA**2*MW**2 + 16*ceWIm33**2*MW**4 + 16*ceWRe33**2*MW**4 + 3*cHDD*ee**2*LambdaSMEFT**2*(MTA**2 + 2*MW**2) + 6*cHl311*ee**2*LambdaSMEFT**2*(MTA**2 + 2*MW**2))*sth**2*vevhat**2 + 64*cHWB*cth*ee**2*LambdaSMEFT**2*(MTA**2 + 2*MW**2)*sth**3*vevhat**2 - 16*(2*cHl322*ee**2*LambdaSMEFT**2*MTA**2 - 8*cHl333*ee**2*LambdaSMEFT**2*MTA**2 - cll1221*ee**2*LambdaSMEFT**2*MTA**2 + 4*cHl322*ee**2*LambdaSMEFT**2*MW**2 - 16*cHl333*ee**2*LambdaSMEFT**2*MW**2 - 2*cll1221*ee**2*LambdaSMEFT**2*MW**2 + 64*ceWIm33**2*MTA**2*MW**2 + 64*ceWRe33**2*MTA**2*MW**2 + 32*ceWIm33**2*MW**4 + 32*ceWRe33**2*MW**4 + cHDD*ee**2*LambdaSMEFT**2*(MTA**2 + 2*MW**2) + 2*cHl311*ee**2*LambdaSMEFT**2*(MTA**2 + 2*MW**2))*sth**4*vevhat**2 + 512*(ceWIm33**2 + ceWRe33**2)*MW**2*(2*MTA**2 + MW**2)*sth**6*vevhat**2 + (cHDD + 2*cHl311 + 2*cHl322 - 4*cHl333 - cll1221)**2*ee**2*(MTA**2 + 2*MW**2)*vevhat**4 + 8*cHWB*(cHDD + 2*cHl311 + 2*cHl322 - 4*cHl333 - cll1221)*cth*ee**2*(MTA**2 + 2*MW**2)*sth*vevhat**4 - 2*(cHDD**2 + 4*cHl311**2 + 4*cHl322**2 - 24*cHl322*cHl333 + 32*cHl333**2 - 8*cHWB**2 + 4*cHl311*(2*cHl322 - 6*cHl333 - cll1221) + 2*cHDD*(2*cHl311 + 2*cHl322 - 6*cHl333 - cll1221) - 4*cHl322*cll1221 + 12*cHl333*cll1221 + cll1221**2)*ee**2*(MTA**2 + 2*MW**2)*sth**2*vevhat**4 - 8*cHWB*(cHDD + 2*cHl311 + 2*cHl322 - 8*cHl333 - cll1221)*cth*ee**2*(MTA**2 + 2*MW**2)*sth**3*vevhat**4 + (cHDD**2 + 4*cHl311**2 + 4*cHl322**2 - 32*cHl322*cHl333 + 64*cHl333**2 - 16*cHWB**2 + 4*cHl311*(2*cHl322 - 8*cHl333 - cll1221) + 2*cHDD*(2*cHl311 + 2*cHl322 - 8*cHl333 - cll1221) - 4*cHl322*cll1221 + 16*cHl333*cll1221 + cll1221**2)*ee**2*(MTA**2 + 2*MW**2)*sth**4*vevhat**4 - 192*ceWRe33*ee*LambdaSMEFT**2*MTA*MW**2*sth*vevhat*cmath.sqrt(2) + 768*ceWRe33*ee*LambdaSMEFT**2*MTA*MW**2*sth**3*vevhat*cmath.sqrt(2) - 768*ceWRe33*ee*LambdaSMEFT**2*MTA*MW**2*sth**5*vevhat*cmath.sqrt(2) + 48*ceWRe33*(cHDD + 2*cHl311 + 2*cHl322 - 4*cHl333 - cll1221)*ee*MTA*MW**2*sth*vevhat**3*cmath.sqrt(2) + 192*ceWRe33*cHWB*cth*ee*MTA*MW**2*sth**2*vevhat**3*cmath.sqrt(2) - 48*ceWRe33*(3*cHDD + 6*cHl311 + 6*cHl322 - 16*cHl333 - 3*cll1221)*ee*MTA*MW**2*sth**3*vevhat**3*cmath.sqrt(2) - 384*ceWRe33*cHWB*cth*ee*MTA*MW**2*sth**4*vevhat**3*cmath.sqrt(2) + 96*ceWRe33*(cHDD + 2*cHl311 + 2*cHl322 - 8*cHl333 - cll1221)*ee*MTA*MW**2*sth**5*vevhat**3*cmath.sqrt(2)))/(1536.*cmath.pi*LambdaSMEFT**4*MW**5*(sth - 2*sth**3)**2)'})
Decay_Z = Decay(name = 'Decay_Z',
particle = P.Z,
partial_widths = {(P.a,P.H):'-((MH**2 - MZ**2)**3*(gHza**2*LambdaSMEFT**4 - 2*gHza*LambdaSMEFT**2*(cHWB + 2*(cHB - cHW)*cth*sth - 2*cHWB*sth**2)*vevhat**2 + (cHWB**2*(1 - 2*sth**2)**2 + cHWBtil**2*(1 - 2*sth**2)**2 - 4*(cHB**2 + cHBtil**2 - 2*cHB*cHW + cHW**2 - 2*cHBtil*cHWtil + cHWtil**2)*sth**2*(-1 + sth**2) - 4*(cHB - cHW)*cHWB*cth*sth*(-1 + 2*sth**2) - 4*cHWBtil*(cHBtil - cHWtil)*cth*sth*(-1 + 2*sth**2))*vevhat**4))/(24.*cmath.pi*LambdaSMEFT**4*MZ**3*vevhat**2)',
(P.b,P.b__tilde__):'((-144*ee**2*LambdaSMEFT**4*MB**2 + 144*ee**2*LambdaSMEFT**4*MZ**2 + 192*ee**2*LambdaSMEFT**4*(MB**2 - 4*MZ**2)*sth**2 + 64*ee**2*LambdaSMEFT**4*(19*MB**2 + 23*MZ**2)*sth**4 - 1280*ee**2*LambdaSMEFT**4*(2*MB**2 + MZ**2)*sth**6 + 512*ee**2*LambdaSMEFT**4*(2*MB**2 + MZ**2)*sth**8 + 72*ee**2*LambdaSMEFT**2*(12*cHbq*MB**2 + (cHDD + 2*cHl311 + 2*cHl322 - 4*cHQ1 - 4*cHQ3 - cll1221)*(MB**2 - MZ**2))*vevhat**2 - 192*cHWB*cth*ee**2*LambdaSMEFT**2*(2*MB**2 + MZ**2)*sth*vevhat**2 - 96*(6*cHl311*ee**2*LambdaSMEFT**2*MB**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MB**2 - 8*cHQ1*ee**2*LambdaSMEFT**2*MB**2 - 8*cHQ3*ee**2*LambdaSMEFT**2*MB**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MB**2 - 6*cHl311*ee**2*LambdaSMEFT**2*MZ**2 - 6*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + 14*cHQ1*ee**2*LambdaSMEFT**2*MZ**2 + 14*cHQ3*ee**2*LambdaSMEFT**2*MZ**2 + 3*cll1221*ee**2*LambdaSMEFT**2*MZ**2 + 48*cbWIm**2*MB**2*MZ**2 - 96*cbWRe**2*MB**2*MZ**2 - 12*cbWIm**2*MZ**4 - 12*cbWRe**2*MZ**4 + 3*cHDD*ee**2*LambdaSMEFT**2*(MB**2 - MZ**2) + 2*cHbq*ee**2*LambdaSMEFT**2*(20*MB**2 + MZ**2))*sth**2*vevhat**2 + 128*cth*(5*cHWB*ee**2*LambdaSMEFT**2*(2*MB**2 + MZ**2) + 18*MZ**2*(cbBIm*cbWIm*(-4*MB**2 + MZ**2) + cbBRe*cbWRe*(8*MB**2 + MZ**2)))*sth**3*vevhat**2 + 32*(14*cHl311*ee**2*LambdaSMEFT**2*MB**2 + 14*cHl322*ee**2*LambdaSMEFT**2*MB**2 + 12*cHQ1*ee**2*LambdaSMEFT**2*MB**2 + 12*cHQ3*ee**2*LambdaSMEFT**2*MB**2 - 7*cll1221*ee**2*LambdaSMEFT**2*MB**2 - 20*cHl311*ee**2*LambdaSMEFT**2*MZ**2 - 20*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + 60*cHQ1*ee**2*LambdaSMEFT**2*MZ**2 + 60*cHQ3*ee**2*LambdaSMEFT**2*MZ**2 + 10*cll1221*ee**2*LambdaSMEFT**2*MZ**2 - 144*cbBIm**2*MB**2*MZ**2 + 288*cbBRe**2*MB**2*MZ**2 + 864*cbWIm**2*MB**2*MZ**2 - 1728*cbWRe**2*MB**2*MZ**2 + 36*cbBIm**2*MZ**4 + 36*cbBRe**2*MZ**4 - 216*cbWIm**2*MZ**4 - 216*cbWRe**2*MZ**4 + cHDD*ee**2*LambdaSMEFT**2*(7*MB**2 - 10*MZ**2) + 12*cHbq*ee**2*LambdaSMEFT**2*(13*MB**2 + 2*MZ**2))*sth**4*vevhat**2 - 256*cth*(2*cHWB*ee**2*LambdaSMEFT**2*(2*MB**2 + MZ**2) + 45*MZ**2*(cbBIm*cbWIm*(-4*MB**2 + MZ**2) + cbBRe*cbWRe*(8*MB**2 + MZ**2)))*sth**5*vevhat**2 - 64*(-4*cHl311*ee**2*LambdaSMEFT**2*MB**2 - 4*cHl322*ee**2*LambdaSMEFT**2*MB**2 + 24*cHQ1*ee**2*LambdaSMEFT**2*MB**2 + 24*cHQ3*ee**2*LambdaSMEFT**2*MB**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MB**2 - 2*cHl311*ee**2*LambdaSMEFT**2*MZ**2 - 2*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + 12*cHQ1*ee**2*LambdaSMEFT**2*MZ**2 + 12*cHQ3*ee**2*LambdaSMEFT**2*MZ**2 + cll1221*ee**2*LambdaSMEFT**2*MZ**2 - 360*cbBIm**2*MB**2*MZ**2 + 720*cbBRe**2*MB**2*MZ**2 + 936*cbWIm**2*MB**2*MZ**2 - 1872*cbWRe**2*MB**2*MZ**2 + 90*cbBIm**2*MZ**4 + 90*cbBRe**2*MZ**4 - 234*cbWIm**2*MZ**4 - 234*cbWRe**2*MZ**4 + 12*cHbq*ee**2*LambdaSMEFT**2*(2*MB**2 + MZ**2) - cHDD*ee**2*LambdaSMEFT**2*(2*MB**2 + MZ**2))*sth**6*vevhat**2 + 18432*cth*MZ**2*(cbBIm*cbWIm*(-4*MB**2 + MZ**2) + cbBRe*cbWRe*(8*MB**2 + MZ**2))*sth**7*vevhat**2 + 4608*MZ**2*(3*cbWIm**2*(4*MB**2 - MZ**2) + 2*cbBRe**2*(8*MB**2 + MZ**2) - 3*cbWRe**2*(8*MB**2 + MZ**2) + cbBIm**2*(-8*MB**2 + 2*MZ**2))*sth**8*vevhat**2 - 9216*cth*MZ**2*(cbBIm*cbWIm*(-4*MB**2 + MZ**2) + cbBRe*cbWRe*(8*MB**2 + MZ**2))*sth**9*vevhat**2 + 4608*MZ**2*(-4*cbWIm**2*MB**2 + 8*cbWRe**2*MB**2 + cbWIm**2*MZ**2 + cbWRe**2*MZ**2 + cbBIm**2*(4*MB**2 - MZ**2) - cbBRe**2*(8*MB**2 + MZ**2))*sth**10*vevhat**2 - 9*ee**2*(24*cHbq*(cHDD + 2*cHl311 + 2*cHl322 - 4*cHQ1 - 4*cHQ3 - cll1221)*MB**2 + 16*cHbq**2*(MB**2 - MZ**2) + (cHDD + 2*cHl311 + 2*cHl322 - 4*cHQ1 - 4*cHQ3 - cll1221)**2*(MB**2 - MZ**2))*vevhat**4 - 48*cHWB*(4*cHbq - cHDD - 2*cHl311 - 2*cHl322 + 4*cHQ1 + 4*cHQ3 + cll1221)*cth*ee**2*(2*MB**2 + MZ**2)*sth*vevhat**4 + 4*ee**2*(60*cHl311**2*MB**2 + 120*cHl311*cHl322*MB**2 + 60*cHl322**2*MB**2 - 192*cHl311*cHQ1*MB**2 - 192*cHl322*cHQ1*MB**2 + 144*cHQ1**2*MB**2 - 192*cHl311*cHQ3*MB**2 - 192*cHl322*cHQ3*MB**2 + 288*cHQ1*cHQ3*MB**2 + 144*cHQ3**2*MB**2 + 64*cHWB**2*MB**2 - 60*cHl311*cll1221*MB**2 - 60*cHl322*cll1221*MB**2 + 96*cHQ1*cll1221*MB**2 + 96*cHQ3*cll1221*MB**2 + 15*cll1221**2*MB**2 - 24*cHl311**2*MZ**2 - 48*cHl311*cHl322*MZ**2 - 24*cHl322**2*MZ**2 + 120*cHl311*cHQ1*MZ**2 + 120*cHl322*cHQ1*MZ**2 - 144*cHQ1**2*MZ**2 + 120*cHl311*cHQ3*MZ**2 + 120*cHl322*cHQ3*MZ**2 - 288*cHQ1*cHQ3*MZ**2 - 144*cHQ3**2*MZ**2 + 32*cHWB**2*MZ**2 + 24*cHl311*cll1221*MZ**2 + 24*cHl322*cll1221*MZ**2 - 60*cHQ1*cll1221*MZ**2 - 60*cHQ3*cll1221*MZ**2 - 6*cll1221**2*MZ**2 + 3*cHDD**2*(5*MB**2 - 2*MZ**2) + 144*cHbq**2*(MB**2 - MZ**2) + 12*cHbq*(16*cHDD*MB**2 + 32*cHl311*MB**2 + 32*cHl322*MB**2 - 72*cHQ1*MB**2 - 72*cHQ3*MB**2 - 16*cll1221*MB**2 - cHDD*MZ**2 - 2*cHl311*MZ**2 - 2*cHl322*MZ**2 + cll1221*MZ**2) + 6*cHDD*(10*cHl311*MB**2 + 10*cHl322*MB**2 - 16*cHQ1*MB**2 - 16*cHQ3*MB**2 - 5*cll1221*MB**2 - 4*cHl311*MZ**2 - 4*cHl322*MZ**2 + 10*cHQ1*MZ**2 + 10*cHQ3*MZ**2 + 2*cll1221*MZ**2))*sth**2*vevhat**4 + 32*cHWB*(12*cHbq - cHDD - 2*cHl311 - 2*cHl322 + 12*cHQ1 + 12*cHQ3 + cll1221)*cth*ee**2*(2*MB**2 + MZ**2)*sth**3*vevhat**4 - 4*ee**2*(68*cHl311**2*MB**2 + 136*cHl311*cHl322*MB**2 + 68*cHl322**2*MB**2 - 240*cHl311*cHQ1*MB**2 - 240*cHl322*cHQ1*MB**2 + 144*cHQ1**2*MB**2 - 240*cHl311*cHQ3*MB**2 - 240*cHl322*cHQ3*MB**2 + 288*cHQ1*cHQ3*MB**2 + 144*cHQ3**2*MB**2 + 64*cHWB**2*MB**2 - 68*cHl311*cll1221*MB**2 - 68*cHl322*cll1221*MB**2 + 120*cHQ1*cll1221*MB**2 + 120*cHQ3*cll1221*MB**2 + 17*cll1221**2*MB**2 - 20*cHl311**2*MZ**2 - 40*cHl311*cHl322*MZ**2 - 20*cHl322**2*MZ**2 + 96*cHl311*cHQ1*MZ**2 + 96*cHl322*cHQ1*MZ**2 - 144*cHQ1**2*MZ**2 + 96*cHl311*cHQ3*MZ**2 + 96*cHl322*cHQ3*MZ**2 - 288*cHQ1*cHQ3*MZ**2 - 144*cHQ3**2*MZ**2 + 32*cHWB**2*MZ**2 + 20*cHl311*cll1221*MZ**2 + 20*cHl322*cll1221*MZ**2 - 48*cHQ1*cll1221*MZ**2 - 48*cHQ3*cll1221*MZ**2 - 5*cll1221**2*MZ**2 + cHDD**2*(17*MB**2 - 5*MZ**2) + 144*cHbq**2*(MB**2 - MZ**2) + 24*cHbq*(7*cHDD*MB**2 + 14*cHl311*MB**2 + 14*cHl322*MB**2 - 36*cHQ1*MB**2 - 36*cHQ3*MB**2 - 7*cll1221*MB**2 - cHDD*MZ**2 - 2*cHl311*MZ**2 - 2*cHl322*MZ**2 + cll1221*MZ**2) + 2*cHDD*(34*cHl311*MB**2 + 34*cHl322*MB**2 - 60*cHQ1*MB**2 - 60*cHQ3*MB**2 - 17*cll1221*MB**2 - 10*cHl311*MZ**2 - 10*cHl322*MZ**2 + 24*cHQ1*MZ**2 + 24*cHQ3*MZ**2 + 5*cll1221*MZ**2))*sth**4*vevhat**4 - 1728*cbWRe*ee*LambdaSMEFT**2*MB*MZ**2*sth*vevhat*cmath.sqrt(2) - 1728*cbBRe*cth*ee*LambdaSMEFT**2*MB*MZ**2*sth**2*vevhat*cmath.sqrt(2) + 10944*cbWRe*ee*LambdaSMEFT**2*MB*MZ**2*sth**3*vevhat*cmath.sqrt(2) + 9216*cbBRe*cth*ee*LambdaSMEFT**2*MB*MZ**2*sth**4*vevhat*cmath.sqrt(2) - 25344*cbWRe*ee*LambdaSMEFT**2*MB*MZ**2*sth**5*vevhat*cmath.sqrt(2) - 16128*cbBRe*cth*ee*LambdaSMEFT**2*MB*MZ**2*sth**6*vevhat*cmath.sqrt(2) + 25344*cbWRe*ee*LambdaSMEFT**2*MB*MZ**2*sth**7*vevhat*cmath.sqrt(2) + 9216*cbBRe*cth*ee*LambdaSMEFT**2*MB*MZ**2*sth**8*vevhat*cmath.sqrt(2) - 9216*cbWRe*ee*LambdaSMEFT**2*MB*MZ**2*sth**9*vevhat*cmath.sqrt(2) - 432*cbWRe*(4*cHbq - cHDD - 2*cHl311 - 2*cHl322 + 4*cHQ1 + 4*cHQ3 + cll1221)*ee*MB*MZ**2*sth*vevhat**3*cmath.sqrt(2) - 144*(-16*cbWRe*cHWB + 3*cbBRe*(4*cHbq - cHDD - 2*cHl311 - 2*cHl322 + 4*cHQ1 + 4*cHQ3 + cll1221))*cth*ee*MB*MZ**2*sth**2*vevhat**3*cmath.sqrt(2) + 144*(16*cbBRe*cHWB + cbWRe*(60*cHbq - 11*cHDD - 22*cHl311 - 22*cHl322 + 60*cHQ1 + 60*cHQ3 + 11*cll1221))*ee*MB*MZ**2*sth**3*vevhat**3*cmath.sqrt(2) + 1152*(-6*cbWRe*cHWB + cbBRe*(6*cHbq - cHDD - 2*cHl311 - 2*cHl322 + 6*cHQ1 + 6*cHQ3 + cll1221))*cth*ee*MB*MZ**2*sth**4*vevhat**3*cmath.sqrt(2) - 1728*(4*cbBRe*cHWB + cbWRe*(8*cHbq - cHDD - 2*cHl311 - 2*cHl322 + 8*cHQ1 + 8*cHQ3 + cll1221))*ee*MB*MZ**2*sth**5*vevhat**3*cmath.sqrt(2) - 576*(-8*cbWRe*cHWB + cbBRe*(12*cHbq - cHDD - 2*cHl311 - 2*cHl322 + 12*cHQ1 + 12*cHQ3 + cll1221))*cth*ee*MB*MZ**2*sth**6*vevhat**3*cmath.sqrt(2) + 576*(8*cbBRe*cHWB + cbWRe*(12*cHbq - cHDD - 2*cHl311 - 2*cHl322 + 12*cHQ1 + 12*cHQ3 + cll1221))*ee*MB*MZ**2*sth**7*vevhat**3*cmath.sqrt(2))*cmath.sqrt(-4*MB**2 + MZ**2))/(4608.*cth**2*cmath.pi*LambdaSMEFT**4*MZ**2*sth**2*(1 - 2*sth**2)**2)',
(P.c,P.c__tilde__):'((-144*ee**2*LambdaSMEFT**4*MC**2 + 144*ee**2*LambdaSMEFT**4*MZ**2 - 192*ee**2*LambdaSMEFT**4*(MC**2 + 5*MZ**2)*sth**2 + 64*ee**2*LambdaSMEFT**4*(55*MC**2 + 41*MZ**2)*sth**4 - 3584*ee**2*LambdaSMEFT**4*(2*MC**2 + MZ**2)*sth**6 + 2048*ee**2*LambdaSMEFT**4*(2*MC**2 + MZ**2)*sth**8 + 72*ee**2*LambdaSMEFT**2*(-4*cHj3*MC**2 + 2*cHl311*MC**2 + 2*cHl322*MC**2 - 12*cHu*MC**2 - cll1221*MC**2 + 4*cHj3*MZ**2 - 2*cHl311*MZ**2 - 2*cHl322*MZ**2 + cll1221*MZ**2 + cHDD*(MC**2 - MZ**2) + 4*cHj1*(MC**2 - MZ**2))*vevhat**2 - 384*cHWB*cth*ee**2*LambdaSMEFT**2*(2*MC**2 + MZ**2)*sth*vevhat**2 - 9*ee**2*(16*cHj3**2*MC**2 - 16*cHj3*cHl311*MC**2 + 4*cHl311**2*MC**2 - 16*cHj3*cHl322*MC**2 + 8*cHl311*cHl322*MC**2 + 4*cHl322**2*MC**2 + 96*cHj3*cHu*MC**2 - 48*cHl311*cHu*MC**2 - 48*cHl322*cHu*MC**2 + 16*cHu**2*MC**2 + 8*cHj3*cll1221*MC**2 - 4*cHl311*cll1221*MC**2 - 4*cHl322*cll1221*MC**2 + 24*cHu*cll1221*MC**2 + cll1221**2*MC**2 - 16*cHj3**2*MZ**2 + 16*cHj3*cHl311*MZ**2 - 4*cHl311**2*MZ**2 + 16*cHj3*cHl322*MZ**2 - 8*cHl311*cHl322*MZ**2 - 4*cHl322**2*MZ**2 - 16*cHu**2*MZ**2 - 8*cHj3*cll1221*MZ**2 + 4*cHl311*cll1221*MZ**2 + 4*cHl322*cll1221*MZ**2 - cll1221**2*MZ**2 + cHDD**2*(MC**2 - MZ**2) + 16*cHj1**2*(MC**2 - MZ**2) - 8*cHj1*(4*cHj3*MC**2 - 2*cHl311*MC**2 - 2*cHl322*MC**2 + 12*cHu*MC**2 + cll1221*MC**2 - 4*cHj3*MZ**2 + 2*cHl311*MZ**2 + 2*cHl322*MZ**2 - cll1221*MZ**2) + 2*cHDD*(-4*cHj3*MC**2 + 2*cHl311*MC**2 + 2*cHl322*MC**2 - 12*cHu*MC**2 - cll1221*MC**2 + 4*cHj3*MZ**2 - 2*cHl311*MZ**2 - 2*cHl322*MZ**2 + cll1221*MZ**2 + 4*cHj1*(MC**2 - MZ**2)))*vevhat**4 + 96*cHWB*(cHDD + 4*cHj1 - 4*cHj3 + 2*cHl311 + 2*cHl322 + 4*cHu - cll1221)*cth*ee**2*(2*MC**2 + MZ**2)*sth*vevhat**4 + 4*ee**2*(144*cHj3**2*MC**2 - 240*cHj3*cHl311*MC**2 + 84*cHl311**2*MC**2 - 240*cHj3*cHl322*MC**2 + 168*cHl311*cHl322*MC**2 + 84*cHl322**2*MC**2 + 864*cHj3*cHu*MC**2 - 336*cHl311*cHu*MC**2 - 336*cHl322*cHu*MC**2 + 144*cHu**2*MC**2 + 256*cHWB**2*MC**2 + 120*cHj3*cll1221*MC**2 - 84*cHl311*cll1221*MC**2 - 84*cHl322*cll1221*MC**2 + 168*cHu*cll1221*MC**2 + 21*cll1221**2*MC**2 - 144*cHj3**2*MZ**2 + 96*cHj3*cHl311*MZ**2 - 12*cHl311**2*MZ**2 + 96*cHj3*cHl322*MZ**2 - 24*cHl311*cHl322*MZ**2 - 12*cHl322**2*MZ**2 + 48*cHl311*cHu*MZ**2 + 48*cHl322*cHu*MZ**2 - 144*cHu**2*MZ**2 + 128*cHWB**2*MZ**2 - 48*cHj3*cll1221*MZ**2 + 12*cHl311*cll1221*MZ**2 + 12*cHl322*cll1221*MZ**2 - 24*cHu*cll1221*MZ**2 - 3*cll1221**2*MZ**2 + 144*cHj1**2*(MC**2 - MZ**2) + 3*cHDD**2*(7*MC**2 - MZ**2) - 24*cHj1*(-10*cHl311*MC**2 - 10*cHl322*MC**2 + 36*cHu*MC**2 + 5*cll1221*MC**2 + 4*cHl311*MZ**2 + 4*cHl322*MZ**2 - 2*cll1221*MZ**2 + 12*cHj3*(MC**2 - MZ**2)) + 6*cHDD*(4*cHj1*(5*MC**2 - 2*MZ**2) + (2*cHl311 + 2*cHl322 - 4*cHu - cll1221)*(7*MC**2 - MZ**2) + cHj3*(-20*MC**2 + 8*MZ**2)))*sth**2*vevhat**4 + 64*cHWB*(cHDD - 12*cHj1 + 12*cHj3 + 2*cHl311 + 2*cHl322 - 12*cHu - cll1221)*cth*ee**2*(2*MC**2 + MZ**2)*sth**3*vevhat**4 - 4*ee**2*(144*cHj3**2*MC**2 - 336*cHj3*cHl311*MC**2 + 68*cHl311**2*MC**2 - 336*cHj3*cHl322*MC**2 + 136*cHl311*cHl322*MC**2 + 68*cHl322**2*MC**2 + 864*cHj3*cHu*MC**2 - 240*cHl311*cHu*MC**2 - 240*cHl322*cHu*MC**2 + 144*cHu**2*MC**2 + 256*cHWB**2*MC**2 + 168*cHj3*cll1221*MC**2 - 68*cHl311*cll1221*MC**2 - 68*cHl322*cll1221*MC**2 + 120*cHu*cll1221*MC**2 + 17*cll1221**2*MC**2 - 144*cHj3**2*MZ**2 + 48*cHj3*cHl311*MZ**2 - 20*cHl311**2*MZ**2 + 48*cHj3*cHl322*MZ**2 - 40*cHl311*cHl322*MZ**2 - 20*cHl322**2*MZ**2 + 96*cHl311*cHu*MZ**2 + 96*cHl322*cHu*MZ**2 - 144*cHu**2*MZ**2 + 128*cHWB**2*MZ**2 - 24*cHj3*cll1221*MZ**2 + 20*cHl311*cll1221*MZ**2 + 20*cHl322*cll1221*MZ**2 - 48*cHu*cll1221*MZ**2 - 5*cll1221**2*MZ**2 + cHDD**2*(17*MC**2 - 5*MZ**2) + 144*cHj1**2*(MC**2 - MZ**2) + 2*cHDD*(84*cHj1*MC**2 - 84*cHj3*MC**2 + 34*cHl311*MC**2 + 34*cHl322*MC**2 - 60*cHu*MC**2 - 17*cll1221*MC**2 - 12*cHj1*MZ**2 + 12*cHj3*MZ**2 - 10*cHl311*MZ**2 - 10*cHl322*MZ**2 + 24*cHu*MZ**2 + 5*cll1221*MZ**2) + 24*cHj1*(14*cHl311*MC**2 + 14*cHl322*MC**2 - 36*cHu*MC**2 - 7*cll1221*MC**2 - 2*cHl311*MZ**2 - 2*cHl322*MZ**2 + cll1221*MZ**2 - 12*cHj3*(MC**2 - MZ**2)))*sth**4*vevhat**4 - 18432*cth*MZ**2*(cuBIm*cuWIm*(-4*MC**2 + MZ**2) + cuBRe*cuWRe*(8*MC**2 + MZ**2))*sth**7*vevhat**2*yc**2 + 4608*MZ**2*(3*cuWIm**2*(4*MC**2 - MZ**2) + 2*cuBRe**2*(8*MC**2 + MZ**2) - 3*cuWRe**2*(8*MC**2 + MZ**2) + cuBIm**2*(-8*MC**2 + 2*MZ**2))*sth**8*vevhat**2*yc**2 + 9216*cth*MZ**2*(cuBIm*cuWIm*(-4*MC**2 + MZ**2) + cuBRe*cuWRe*(8*MC**2 + MZ**2))*sth**9*vevhat**2*yc**2 + 4608*MZ**2*(-4*cuWIm**2*MC**2 + 8*cuWRe**2*MC**2 + cuWIm**2*MZ**2 + cuWRe**2*MZ**2 + cuBIm**2*(4*MC**2 - MZ**2) - cuBRe**2*(8*MC**2 + MZ**2))*sth**10*vevhat**2*yc**2 + 32*sth**4*vevhat**2*(60*cHj3*ee**2*LambdaSMEFT**2*MC**2 + 26*cHl311*ee**2*LambdaSMEFT**2*MC**2 + 26*cHl322*ee**2*LambdaSMEFT**2*MC**2 - 204*cHu*ee**2*LambdaSMEFT**2*MC**2 - 13*cll1221*ee**2*LambdaSMEFT**2*MC**2 + 84*cHj3*ee**2*LambdaSMEFT**2*MZ**2 - 14*cHl311*ee**2*LambdaSMEFT**2*MZ**2 - 14*cHl322*ee**2*LambdaSMEFT**2*MZ**2 - 48*cHu*ee**2*LambdaSMEFT**2*MZ**2 + 7*cll1221*ee**2*LambdaSMEFT**2*MZ**2 + cHDD*ee**2*LambdaSMEFT**2*(13*MC**2 - 7*MZ**2) - 12*cHj1*ee**2*LambdaSMEFT**2*(5*MC**2 + 7*MZ**2) - 144*cuBIm**2*MC**2*MZ**2*yc**2 + 288*cuBRe**2*MC**2*MZ**2*yc**2 + 864*cuWIm**2*MC**2*MZ**2*yc**2 - 1728*cuWRe**2*MC**2*MZ**2*yc**2 + 36*cuBIm**2*MZ**4*yc**2 + 36*cuBRe**2*MZ**4*yc**2 - 216*cuWIm**2*MZ**4*yc**2 - 216*cuWRe**2*MZ**4*yc**2) - 128*sth**6*vevhat**2*(24*cHj3*ee**2*LambdaSMEFT**2*MC**2 + 4*cHl311*ee**2*LambdaSMEFT**2*MC**2 + 4*cHl322*ee**2*LambdaSMEFT**2*MC**2 - 24*cHu*ee**2*LambdaSMEFT**2*MC**2 - 2*cll1221*ee**2*LambdaSMEFT**2*MC**2 + 12*cHj3*ee**2*LambdaSMEFT**2*MZ**2 + 2*cHl311*ee**2*LambdaSMEFT**2*MZ**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MZ**2 - 12*cHu*ee**2*LambdaSMEFT**2*MZ**2 - cll1221*ee**2*LambdaSMEFT**2*MZ**2 + cHDD*ee**2*LambdaSMEFT**2*(2*MC**2 + MZ**2) - 12*cHj1*ee**2*LambdaSMEFT**2*(2*MC**2 + MZ**2) - 180*cuBIm**2*MC**2*MZ**2*yc**2 + 360*cuBRe**2*MC**2*MZ**2*yc**2 + 468*cuWIm**2*MC**2*MZ**2*yc**2 - 936*cuWRe**2*MC**2*MZ**2*yc**2 + 45*cuBIm**2*MZ**4*yc**2 + 45*cuBRe**2*MZ**4*yc**2 - 117*cuWIm**2*MZ**4*yc**2 - 117*cuWRe**2*MZ**4*yc**2) - 96*sth**2*vevhat**2*(-4*cHj3*ee**2*LambdaSMEFT**2*MC**2 + 6*cHl311*ee**2*LambdaSMEFT**2*MC**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MC**2 - 44*cHu*ee**2*LambdaSMEFT**2*MC**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MC**2 + 16*cHj3*ee**2*LambdaSMEFT**2*MZ**2 - 6*cHl311*ee**2*LambdaSMEFT**2*MZ**2 - 6*cHl322*ee**2*LambdaSMEFT**2*MZ**2 - 4*cHu*ee**2*LambdaSMEFT**2*MZ**2 + 3*cll1221*ee**2*LambdaSMEFT**2*MZ**2 + 4*cHj1*ee**2*LambdaSMEFT**2*(MC**2 - 4*MZ**2) + 3*cHDD*ee**2*LambdaSMEFT**2*(MC**2 - MZ**2) + 48*cuWIm**2*MC**2*MZ**2*yc**2 - 96*cuWRe**2*MC**2*MZ**2*yc**2 - 12*cuWIm**2*MZ**4*yc**2 - 12*cuWRe**2*MZ**4*yc**2) - 256*cth*sth**5*vevhat**2*(8*cHWB*ee**2*LambdaSMEFT**2*(2*MC**2 + MZ**2) - 45*MZ**2*(cuBIm*cuWIm*(-4*MC**2 + MZ**2) + cuBRe*cuWRe*(8*MC**2 + MZ**2))*yc**2) + 256*cth*sth**3*vevhat**2*(7*cHWB*ee**2*LambdaSMEFT**2*(2*MC**2 + MZ**2) - 9*MZ**2*(cuBIm*cuWIm*(-4*MC**2 + MZ**2) + cuBRe*cuWRe*(8*MC**2 + MZ**2))*yc**2) - 1728*cuWRe*ee*LambdaSMEFT**2*MC*MZ**2*sth*vevhat*yc*cmath.sqrt(2) + 1728*cth*cuBRe*ee*LambdaSMEFT**2*MC*MZ**2*sth**2*vevhat*yc*cmath.sqrt(2) + 13248*cuWRe*ee*LambdaSMEFT**2*MC*MZ**2*sth**3*vevhat*yc*cmath.sqrt(2) - 11520*cth*cuBRe*ee*LambdaSMEFT**2*MC*MZ**2*sth**4*vevhat*yc*cmath.sqrt(2) - 36864*cuWRe*ee*LambdaSMEFT**2*MC*MZ**2*sth**5*vevhat*yc*cmath.sqrt(2) + 25344*cth*cuBRe*ee*LambdaSMEFT**2*MC*MZ**2*sth**6*vevhat*yc*cmath.sqrt(2) + 43776*cuWRe*ee*LambdaSMEFT**2*MC*MZ**2*sth**7*vevhat*yc*cmath.sqrt(2) - 18432*cth*cuBRe*ee*LambdaSMEFT**2*MC*MZ**2*sth**8*vevhat*yc*cmath.sqrt(2) - 18432*cuWRe*ee*LambdaSMEFT**2*MC*MZ**2*sth**9*vevhat*yc*cmath.sqrt(2) + 432*(cHDD + 4*cHj1 - 4*cHj3 + 2*cHl311 + 2*cHl322 + 4*cHu - cll1221)*cuWRe*ee*MC*MZ**2*sth*vevhat**3*yc*cmath.sqrt(2) - 144*cth*(3*cHDD*cuBRe + 12*cHj1*cuBRe - 12*cHj3*cuBRe + 6*cHl311*cuBRe + 6*cHl322*cuBRe + 12*cHu*cuBRe - 3*cll1221*cuBRe - 32*cHWB*cuWRe)*ee*MC*MZ**2*sth**2*vevhat**3*yc*cmath.sqrt(2) - 144*(32*cHWB*cuBRe + (7*cHDD + 60*cHj1 - 60*cHj3 + 14*cHl311 + 14*cHl322 + 60*cHu - 7*cll1221)*cuWRe)*ee*MC*MZ**2*sth**3*vevhat**3*yc*cmath.sqrt(2) + 576*cth*(cHDD*cuBRe + 12*cHj1*cuBRe - 12*cHj3*cuBRe + 2*cHl311*cuBRe + 2*cHl322*cuBRe + 12*cHu*cuBRe - cll1221*cuBRe - 24*cHWB*cuWRe)*ee*MC*MZ**2*sth**4*vevhat**3*yc*cmath.sqrt(2) + 13824*(cHWB*cuBRe + (cHj1 - cHj3 + cHu)*cuWRe)*ee*MC*MZ**2*sth**5*vevhat**3*yc*cmath.sqrt(2) + 576*cth*(cHDD*cuBRe - 12*cHj1*cuBRe + 12*cHj3*cuBRe + 2*cHl311*cuBRe + 2*cHl322*cuBRe - 12*cHu*cuBRe - cll1221*cuBRe + 16*cHWB*cuWRe)*ee*MC*MZ**2*sth**6*vevhat**3*yc*cmath.sqrt(2) - 576*(16*cHWB*cuBRe + (-cHDD + 12*cHj1 - 12*cHj3 - 2*cHl311 - 2*cHl322 + 12*cHu + cll1221)*cuWRe)*ee*MC*MZ**2*sth**7*vevhat**3*yc*cmath.sqrt(2))*cmath.sqrt(-4*MC**2 + MZ**2))/(4608.*cth**2*cmath.pi*LambdaSMEFT**4*MZ**2*sth**2*(1 - 2*sth**2)**2)',
(P.d,P.d__tilde__):'((-144*ee**2*LambdaSMEFT**4*MD**2 + 144*ee**2*LambdaSMEFT**4*MZ**2 + 192*ee**2*LambdaSMEFT**4*(MD**2 - 4*MZ**2)*sth**2 + 64*ee**2*LambdaSMEFT**4*(19*MD**2 + 23*MZ**2)*sth**4 - 1280*ee**2*LambdaSMEFT**4*(2*MD**2 + MZ**2)*sth**6 + 512*ee**2*LambdaSMEFT**4*(2*MD**2 + MZ**2)*sth**8 + 72*ee**2*LambdaSMEFT**2*(12*cHd*MD**2 + (cHDD - 4*cHj1 - 4*cHj3 + 2*cHl311 + 2*cHl322 - cll1221)*(MD**2 - MZ**2))*vevhat**2 - 192*cHWB*cth*ee**2*LambdaSMEFT**2*(2*MD**2 + MZ**2)*sth*vevhat**2 - 9*ee**2*(24*cHd*(cHDD - 4*cHj1 - 4*cHj3 + 2*cHl311 + 2*cHl322 - cll1221)*MD**2 + 16*cHd**2*(MD**2 - MZ**2) + (cHDD - 4*cHj1 - 4*cHj3 + 2*cHl311 + 2*cHl322 - cll1221)**2*(MD**2 - MZ**2))*vevhat**4 - 48*cHWB*(4*cHd - cHDD + 4*cHj1 + 4*cHj3 - 2*cHl311 - 2*cHl322 + cll1221)*cth*ee**2*(2*MD**2 + MZ**2)*sth*vevhat**4 + 4*ee**2*(144*cHj1**2*MD**2 + 288*cHj1*cHj3*MD**2 + 144*cHj3**2*MD**2 - 192*cHj1*cHl311*MD**2 - 192*cHj3*cHl311*MD**2 + 60*cHl311**2*MD**2 - 192*cHj1*cHl322*MD**2 - 192*cHj3*cHl322*MD**2 + 120*cHl311*cHl322*MD**2 + 60*cHl322**2*MD**2 + 64*cHWB**2*MD**2 + 96*cHj1*cll1221*MD**2 + 96*cHj3*cll1221*MD**2 - 60*cHl311*cll1221*MD**2 - 60*cHl322*cll1221*MD**2 + 15*cll1221**2*MD**2 - 144*cHj1**2*MZ**2 - 288*cHj1*cHj3*MZ**2 - 144*cHj3**2*MZ**2 + 120*cHj1*cHl311*MZ**2 + 120*cHj3*cHl311*MZ**2 - 24*cHl311**2*MZ**2 + 120*cHj1*cHl322*MZ**2 + 120*cHj3*cHl322*MZ**2 - 48*cHl311*cHl322*MZ**2 - 24*cHl322**2*MZ**2 + 32*cHWB**2*MZ**2 - 60*cHj1*cll1221*MZ**2 - 60*cHj3*cll1221*MZ**2 + 24*cHl311*cll1221*MZ**2 + 24*cHl322*cll1221*MZ**2 - 6*cll1221**2*MZ**2 + 3*cHDD**2*(5*MD**2 - 2*MZ**2) + 144*cHd**2*(MD**2 - MZ**2) + 12*cHd*(16*cHDD*MD**2 - 72*cHj1*MD**2 - 72*cHj3*MD**2 + 32*cHl311*MD**2 + 32*cHl322*MD**2 - 16*cll1221*MD**2 - cHDD*MZ**2 - 2*cHl311*MZ**2 - 2*cHl322*MZ**2 + cll1221*MZ**2) - 6*cHDD*(2*cHj1*(8*MD**2 - 5*MZ**2) + 2*cHj3*(8*MD**2 - 5*MZ**2) - (2*cHl311 + 2*cHl322 - cll1221)*(5*MD**2 - 2*MZ**2)))*sth**2*vevhat**4 + 32*cHWB*(12*cHd - cHDD + 12*cHj1 + 12*cHj3 - 2*cHl311 - 2*cHl322 + cll1221)*cth*ee**2*(2*MD**2 + MZ**2)*sth**3*vevhat**4 - 4*ee**2*(144*cHj1**2*MD**2 + 288*cHj1*cHj3*MD**2 + 144*cHj3**2*MD**2 - 240*cHj1*cHl311*MD**2 - 240*cHj3*cHl311*MD**2 + 68*cHl311**2*MD**2 - 240*cHj1*cHl322*MD**2 - 240*cHj3*cHl322*MD**2 + 136*cHl311*cHl322*MD**2 + 68*cHl322**2*MD**2 + 64*cHWB**2*MD**2 + 120*cHj1*cll1221*MD**2 + 120*cHj3*cll1221*MD**2 - 68*cHl311*cll1221*MD**2 - 68*cHl322*cll1221*MD**2 + 17*cll1221**2*MD**2 - 144*cHj1**2*MZ**2 - 288*cHj1*cHj3*MZ**2 - 144*cHj3**2*MZ**2 + 96*cHj1*cHl311*MZ**2 + 96*cHj3*cHl311*MZ**2 - 20*cHl311**2*MZ**2 + 96*cHj1*cHl322*MZ**2 + 96*cHj3*cHl322*MZ**2 - 40*cHl311*cHl322*MZ**2 - 20*cHl322**2*MZ**2 + 32*cHWB**2*MZ**2 - 48*cHj1*cll1221*MZ**2 - 48*cHj3*cll1221*MZ**2 + 20*cHl311*cll1221*MZ**2 + 20*cHl322*cll1221*MZ**2 - 5*cll1221**2*MZ**2 + cHDD**2*(17*MD**2 - 5*MZ**2) + 2*cHDD*(2*cHl311 + 2*cHl322 - cll1221)*(17*MD**2 - 5*MZ**2) - 24*cHDD*cHj1*(5*MD**2 - 2*MZ**2) - 24*cHDD*cHj3*(5*MD**2 - 2*MZ**2) + 144*cHd**2*(MD**2 - MZ**2) + 24*cHd*(7*cHDD*MD**2 - 36*cHj1*MD**2 - 36*cHj3*MD**2 + 14*cHl311*MD**2 + 14*cHl322*MD**2 - 7*cll1221*MD**2 - cHDD*MZ**2 - 2*cHl311*MZ**2 - 2*cHl322*MZ**2 + cll1221*MZ**2))*sth**4*vevhat**4 + 18432*cth*MZ**2*(cdBIm*cdWIm*(-4*MD**2 + MZ**2) + cdBRe*cdWRe*(8*MD**2 + MZ**2))*sth**7*vevhat**2*ydo**2 + 4608*MZ**2*(3*cdWIm**2*(4*MD**2 - MZ**2) + 2*cdBRe**2*(8*MD**2 + MZ**2) - 3*cdWRe**2*(8*MD**2 + MZ**2) + cdBIm**2*(-8*MD**2 + 2*MZ**2))*sth**8*vevhat**2*ydo**2 - 9216*cth*MZ**2*(cdBIm*cdWIm*(-4*MD**2 + MZ**2) + cdBRe*cdWRe*(8*MD**2 + MZ**2))*sth**9*vevhat**2*ydo**2 + 4608*MZ**2*(-4*cdWIm**2*MD**2 + 8*cdWRe**2*MD**2 + cdWIm**2*MZ**2 + cdWRe**2*MZ**2 + cdBIm**2*(4*MD**2 - MZ**2) - cdBRe**2*(8*MD**2 + MZ**2))*sth**10*vevhat**2*ydo**2 - 64*sth**6*vevhat**2*(24*cHj1*ee**2*LambdaSMEFT**2*MD**2 + 24*cHj3*ee**2*LambdaSMEFT**2*MD**2 - 4*cHl311*ee**2*LambdaSMEFT**2*MD**2 - 4*cHl322*ee**2*LambdaSMEFT**2*MD**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MD**2 + 12*cHj1*ee**2*LambdaSMEFT**2*MZ**2 + 12*cHj3*ee**2*LambdaSMEFT**2*MZ**2 - 2*cHl311*ee**2*LambdaSMEFT**2*MZ**2 - 2*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + cll1221*ee**2*LambdaSMEFT**2*MZ**2 + 12*cHd*ee**2*LambdaSMEFT**2*(2*MD**2 + MZ**2) - cHDD*ee**2*LambdaSMEFT**2*(2*MD**2 + MZ**2) - 360*cdBIm**2*MD**2*MZ**2*ydo**2 + 720*cdBRe**2*MD**2*MZ**2*ydo**2 + 936*cdWIm**2*MD**2*MZ**2*ydo**2 - 1872*cdWRe**2*MD**2*MZ**2*ydo**2 + 90*cdBIm**2*MZ**4*ydo**2 + 90*cdBRe**2*MZ**4*ydo**2 - 234*cdWIm**2*MZ**4*ydo**2 - 234*cdWRe**2*MZ**4*ydo**2) + 32*sth**4*vevhat**2*(12*cHj1*ee**2*LambdaSMEFT**2*MD**2 + 12*cHj3*ee**2*LambdaSMEFT**2*MD**2 + 14*cHl311*ee**2*LambdaSMEFT**2*MD**2 + 14*cHl322*ee**2*LambdaSMEFT**2*MD**2 - 7*cll1221*ee**2*LambdaSMEFT**2*MD**2 + 60*cHj1*ee**2*LambdaSMEFT**2*MZ**2 + 60*cHj3*ee**2*LambdaSMEFT**2*MZ**2 - 20*cHl311*ee**2*LambdaSMEFT**2*MZ**2 - 20*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + 10*cll1221*ee**2*LambdaSMEFT**2*MZ**2 + cHDD*ee**2*LambdaSMEFT**2*(7*MD**2 - 10*MZ**2) + 12*cHd*ee**2*LambdaSMEFT**2*(13*MD**2 + 2*MZ**2) - 144*cdBIm**2*MD**2*MZ**2*ydo**2 + 288*cdBRe**2*MD**2*MZ**2*ydo**2 + 864*cdWIm**2*MD**2*MZ**2*ydo**2 - 1728*cdWRe**2*MD**2*MZ**2*ydo**2 + 36*cdBIm**2*MZ**4*ydo**2 + 36*cdBRe**2*MZ**4*ydo**2 - 216*cdWIm**2*MZ**4*ydo**2 - 216*cdWRe**2*MZ**4*ydo**2) - 96*sth**2*vevhat**2*(-8*cHj1*ee**2*LambdaSMEFT**2*MD**2 - 8*cHj3*ee**2*LambdaSMEFT**2*MD**2 + 6*cHl311*ee**2*LambdaSMEFT**2*MD**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MD**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MD**2 + 14*cHj1*ee**2*LambdaSMEFT**2*MZ**2 + 14*cHj3*ee**2*LambdaSMEFT**2*MZ**2 - 6*cHl311*ee**2*LambdaSMEFT**2*MZ**2 - 6*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + 3*cll1221*ee**2*LambdaSMEFT**2*MZ**2 + 3*cHDD*ee**2*LambdaSMEFT**2*(MD**2 - MZ**2) + 2*cHd*ee**2*LambdaSMEFT**2*(20*MD**2 + MZ**2) + 48*cdWIm**2*MD**2*MZ**2*ydo**2 - 96*cdWRe**2*MD**2*MZ**2*ydo**2 - 12*cdWIm**2*MZ**4*ydo**2 - 12*cdWRe**2*MZ**4*ydo**2) + 128*cth*sth**3*vevhat**2*(5*cHWB*ee**2*LambdaSMEFT**2*(2*MD**2 + MZ**2) + 18*MZ**2*(cdBIm*cdWIm*(-4*MD**2 + MZ**2) + cdBRe*cdWRe*(8*MD**2 + MZ**2))*ydo**2) - 256*cth*sth**5*vevhat**2*(2*cHWB*ee**2*LambdaSMEFT**2*(2*MD**2 + MZ**2) + 45*MZ**2*(cdBIm*cdWIm*(-4*MD**2 + MZ**2) + cdBRe*cdWRe*(8*MD**2 + MZ**2))*ydo**2) - 1728*cdWRe*ee*LambdaSMEFT**2*MD*MZ**2*sth*vevhat*ydo*cmath.sqrt(2) - 1728*cdBRe*cth*ee*LambdaSMEFT**2*MD*MZ**2*sth**2*vevhat*ydo*cmath.sqrt(2) + 10944*cdWRe*ee*LambdaSMEFT**2*MD*MZ**2*sth**3*vevhat*ydo*cmath.sqrt(2) + 9216*cdBRe*cth*ee*LambdaSMEFT**2*MD*MZ**2*sth**4*vevhat*ydo*cmath.sqrt(2) - 25344*cdWRe*ee*LambdaSMEFT**2*MD*MZ**2*sth**5*vevhat*ydo*cmath.sqrt(2) - 16128*cdBRe*cth*ee*LambdaSMEFT**2*MD*MZ**2*sth**6*vevhat*ydo*cmath.sqrt(2) + 25344*cdWRe*ee*LambdaSMEFT**2*MD*MZ**2*sth**7*vevhat*ydo*cmath.sqrt(2) + 9216*cdBRe*cth*ee*LambdaSMEFT**2*MD*MZ**2*sth**8*vevhat*ydo*cmath.sqrt(2) - 9216*cdWRe*ee*LambdaSMEFT**2*MD*MZ**2*sth**9*vevhat*ydo*cmath.sqrt(2) - 432*cdWRe*(4*cHd - cHDD + 4*cHj1 + 4*cHj3 - 2*cHl311 - 2*cHl322 + cll1221)*ee*MD*MZ**2*sth*vevhat**3*ydo*cmath.sqrt(2) - 144*(-16*cdWRe*cHWB + 3*cdBRe*(4*cHd - cHDD + 4*cHj1 + 4*cHj3 - 2*cHl311 - 2*cHl322 + cll1221))*cth*ee*MD*MZ**2*sth**2*vevhat**3*ydo*cmath.sqrt(2) + 144*(16*cdBRe*cHWB + cdWRe*(60*cHd - 11*cHDD + 60*cHj1 + 60*cHj3 - 22*cHl311 - 22*cHl322 + 11*cll1221))*ee*MD*MZ**2*sth**3*vevhat**3*ydo*cmath.sqrt(2) + 1152*(-6*cdWRe*cHWB + cdBRe*(6*cHd - cHDD + 6*cHj1 + 6*cHj3 - 2*cHl311 - 2*cHl322 + cll1221))*cth*ee*MD*MZ**2*sth**4*vevhat**3*ydo*cmath.sqrt(2) - 1728*(4*cdBRe*cHWB + cdWRe*(8*cHd - cHDD + 8*cHj1 + 8*cHj3 - 2*cHl311 - 2*cHl322 + cll1221))*ee*MD*MZ**2*sth**5*vevhat**3*ydo*cmath.sqrt(2) - 576*(-8*cdWRe*cHWB + cdBRe*(12*cHd - cHDD + 12*cHj1 + 12*cHj3 - 2*cHl311 - 2*cHl322 + cll1221))*cth*ee*MD*MZ**2*sth**6*vevhat**3*ydo*cmath.sqrt(2) + 576*(8*cdBRe*cHWB + cdWRe*(12*cHd - cHDD + 12*cHj1 + 12*cHj3 - 2*cHl311 - 2*cHl322 + cll1221))*ee*MD*MZ**2*sth**7*vevhat**3*ydo*cmath.sqrt(2))*cmath.sqrt(-4*MD**2 + MZ**2))/(4608.*cth**2*cmath.pi*LambdaSMEFT**4*MZ**2*sth**2*(1 - 2*sth**2)**2)',
(P.e__minus__,P.e__plus__):'((-16*ee**2*LambdaSMEFT**4*Me**2 + 16*ee**2*LambdaSMEFT**4*MZ**2 - 64*ee**2*LambdaSMEFT**4*(Me**2 + 2*MZ**2)*sth**2 + 64*ee**2*LambdaSMEFT**4*(11*Me**2 + 7*MZ**2)*sth**4 - 768*ee**2*LambdaSMEFT**4*(2*Me**2 + MZ**2)*sth**6 + 512*ee**2*LambdaSMEFT**4*(2*Me**2 + MZ**2)*sth**8 + 8*ee**2*LambdaSMEFT**2*(12*cHe11*Me**2 + cHDD*(Me**2 - MZ**2) - (4*cHl111 + 2*cHl311 - 2*cHl322 + cll1221)*(Me**2 - MZ**2))*vevhat**2 - 64*cHWB*cth*ee**2*LambdaSMEFT**2*(2*Me**2 + MZ**2)*sth*vevhat**2 - 32*(2*cHl311*ee**2*LambdaSMEFT**2*Me**2 + 2*cHl322*ee**2*LambdaSMEFT**2*Me**2 - cll1221*ee**2*LambdaSMEFT**2*Me**2 + 6*cHl111*ee**2*LambdaSMEFT**2*MZ**2 + 4*cHl311*ee**2*LambdaSMEFT**2*MZ**2 - 2*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + cll1221*ee**2*LambdaSMEFT**2*MZ**2 + 16*ceWIm11**2*Me**2*MZ**2 - 32*ceWRe11**2*Me**2*MZ**2 - 4*ceWIm11**2*MZ**4 - 4*ceWRe11**2*MZ**4 + cHDD*ee**2*LambdaSMEFT**2*(Me**2 - MZ**2) + 2*cHe11*ee**2*LambdaSMEFT**2*(8*Me**2 + MZ**2))*sth**2*vevhat**2 + 128*cth*(3*cHWB*ee**2*LambdaSMEFT**2*(2*Me**2 + MZ**2) + 2*MZ**2*(ceBIm11*ceWIm11*(-4*Me**2 + MZ**2) + ceBRe11*ceWRe11*(8*Me**2 + MZ**2)))*sth**3*vevhat**2 + 32*(3*cHDD*ee**2*LambdaSMEFT**2*Me**2 + 12*cHl111*ee**2*LambdaSMEFT**2*Me**2 + 18*cHl311*ee**2*LambdaSMEFT**2*Me**2 + 6*cHl322*ee**2*LambdaSMEFT**2*Me**2 - 3*cll1221*ee**2*LambdaSMEFT**2*Me**2 + 12*cHl111*ee**2*LambdaSMEFT**2*MZ**2 + 12*cHl311*ee**2*LambdaSMEFT**2*MZ**2 - 16*ceBIm11**2*Me**2*MZ**2 + 32*ceBRe11**2*Me**2*MZ**2 + 96*ceWIm11**2*Me**2*MZ**2 - 192*ceWRe11**2*Me**2*MZ**2 + 4*ceBIm11**2*MZ**4 + 4*ceBRe11**2*MZ**4 - 24*ceWIm11**2*MZ**4 - 24*ceWRe11**2*MZ**4 + 4*cHe11*ee**2*LambdaSMEFT**2*(7*Me**2 + 2*MZ**2))*sth**4*vevhat**2 - 256*cth*(2*cHWB*ee**2*LambdaSMEFT**2*(2*Me**2 + MZ**2) + 5*MZ**2*(ceBIm11*ceWIm11*(-4*Me**2 + MZ**2) + ceBRe11*ceWRe11*(8*Me**2 + MZ**2)))*sth**5*vevhat**2 - 64*(8*cHl111*ee**2*LambdaSMEFT**2*Me**2 + 12*cHl311*ee**2*LambdaSMEFT**2*Me**2 + 4*cHl322*ee**2*LambdaSMEFT**2*Me**2 - 2*cll1221*ee**2*LambdaSMEFT**2*Me**2 + 4*cHl111*ee**2*LambdaSMEFT**2*MZ**2 + 6*cHl311*ee**2*LambdaSMEFT**2*MZ**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MZ**2 - cll1221*ee**2*LambdaSMEFT**2*MZ**2 - 40*ceBIm11**2*Me**2*MZ**2 + 80*ceBRe11**2*Me**2*MZ**2 + 104*ceWIm11**2*Me**2*MZ**2 - 208*ceWRe11**2*Me**2*MZ**2 + 10*ceBIm11**2*MZ**4 + 10*ceBRe11**2*MZ**4 - 26*ceWIm11**2*MZ**4 - 26*ceWRe11**2*MZ**4 + cHDD*ee**2*LambdaSMEFT**2*(2*Me**2 + MZ**2) + 4*cHe11*ee**2*LambdaSMEFT**2*(2*Me**2 + MZ**2))*sth**6*vevhat**2 + 2048*cth*MZ**2*(ceBIm11*ceWIm11*(-4*Me**2 + MZ**2) + ceBRe11*ceWRe11*(8*Me**2 + MZ**2))*sth**7*vevhat**2 + 512*MZ**2*(3*ceWIm11**2*(4*Me**2 - MZ**2) + 2*ceBRe11**2*(8*Me**2 + MZ**2) - 3*ceWRe11**2*(8*Me**2 + MZ**2) + ceBIm11**2*(-8*Me**2 + 2*MZ**2))*sth**8*vevhat**2 - 1024*cth*MZ**2*(ceBIm11*ceWIm11*(-4*Me**2 + MZ**2) + ceBRe11*ceWRe11*(8*Me**2 + MZ**2))*sth**9*vevhat**2 + 512*MZ**2*(-4*ceWIm11**2*Me**2 + 8*ceWRe11**2*Me**2 + ceWIm11**2*MZ**2 + ceWRe11**2*MZ**2 + ceBIm11**2*(4*Me**2 - MZ**2) - ceBRe11**2*(8*Me**2 + MZ**2))*sth**10*vevhat**2 + ee**2*(-24*cHDD*cHe11*Me**2 + 24*cHe11*(4*cHl111 + 2*cHl311 - 2*cHl322 + cll1221)*Me**2 - 16*cHe11**2*(Me**2 - MZ**2) + 2*cHDD*(4*cHl111 + 2*cHl311 - 2*cHl322 + cll1221)*(Me**2 - MZ**2) - (4*cHl111 + 2*cHl311 - 2*cHl322 + cll1221)**2*(Me**2 - MZ**2) + cHDD**2*(-Me**2 + MZ**2))*vevhat**4 + 16*cHWB*(cHDD - 4*cHe11 - 4*cHl111 - 2*cHl311 + 2*cHl322 - cll1221)*cth*ee**2*(2*Me**2 + MZ**2)*sth*vevhat**4 + 4*ee**2*(3*cHDD**2*Me**2 - 96*cHe11*cHl111*Me**2 + 16*cHl111**2*Me**2 - 4*cHl311**2*Me**2 - 32*cHl111*cHl322*Me**2 - 8*cHl311*cHl322*Me**2 + 12*cHl322**2*Me**2 + 64*cHWB**2*Me**2 + 16*cHl111*cll1221*Me**2 + 4*cHl311*cll1221*Me**2 - 12*cHl322*cll1221*Me**2 + 3*cll1221**2*Me**2 - 16*cHl111**2*MZ**2 - 24*cHl111*cHl311*MZ**2 - 8*cHl311**2*MZ**2 + 8*cHl111*cHl322*MZ**2 + 8*cHl311*cHl322*MZ**2 + 32*cHWB**2*MZ**2 - 4*cHl111*cll1221*MZ**2 - 4*cHl311*cll1221*MZ**2 + 16*cHe11**2*(Me**2 - MZ**2) + 4*cHe11*(2*cHl322 - cll1221)*(4*Me**2 - MZ**2) - 8*cHe11*cHl311*(8*Me**2 + MZ**2) + 2*cHDD*(8*cHe11*Me**2 - 8*cHl111*Me**2 - 2*cHl311*Me**2 + 6*cHl322*Me**2 - 3*cll1221*Me**2 - 2*cHe11*MZ**2 + 2*cHl111*MZ**2 + 2*cHl311*MZ**2))*sth**2*vevhat**4 + 32*cHWB*(cHDD + 4*cHe11 + 4*cHl111 + 6*cHl311 + 2*cHl322 - cll1221)*cth*ee**2*(2*Me**2 + MZ**2)*sth**3*vevhat**4 - 4*ee**2*(16*cHl111**2*Me**2 - 16*cHl111*cHl311*Me**2 - 28*cHl311**2*Me**2 - 48*cHl111*cHl322*Me**2 - 40*cHl311*cHl322*Me**2 + 4*cHl322**2*Me**2 + 64*cHWB**2*Me**2 + 24*cHl111*cll1221*Me**2 + 20*cHl311*cll1221*Me**2 - 4*cHl322*cll1221*Me**2 + cll1221**2*Me**2 - 16*cHl111**2*MZ**2 - 32*cHl111*cHl311*MZ**2 - 20*cHl311**2*MZ**2 - 8*cHl311*cHl322*MZ**2 - 4*cHl322**2*MZ**2 + 32*cHWB**2*MZ**2 + 4*cHl311*cll1221*MZ**2 + 4*cHl322*cll1221*MZ**2 - cll1221**2*MZ**2 + cHDD**2*(Me**2 - MZ**2) + 16*cHe11**2*(Me**2 - MZ**2) - 2*cHDD*(-4*cHe11*Me**2 + 12*cHl111*Me**2 + 10*cHl311*Me**2 - 2*cHl322*Me**2 + cll1221*Me**2 + 4*cHe11*MZ**2 + 2*cHl311*MZ**2 + 2*cHl322*MZ**2 - cll1221*MZ**2) - 8*cHe11*(12*cHl111*Me**2 - (2*cHl322 - cll1221)*(Me**2 - MZ**2) + 2*cHl311*(5*Me**2 + MZ**2)))*sth**4*vevhat**4 - 192*ceWRe11*ee*LambdaSMEFT**2*Me*MZ**2*sth*vevhat*cmath.sqrt(2) - 192*ceBRe11*cth*ee*LambdaSMEFT**2*Me*MZ**2*sth**2*vevhat*cmath.sqrt(2) + 1728*ceWRe11*ee*LambdaSMEFT**2*Me*MZ**2*sth**3*vevhat*cmath.sqrt(2) + 1536*ceBRe11*cth*ee*LambdaSMEFT**2*Me*MZ**2*sth**4*vevhat*cmath.sqrt(2) - 5376*ceWRe11*ee*LambdaSMEFT**2*Me*MZ**2*sth**5*vevhat*cmath.sqrt(2) - 3840*ceBRe11*cth*ee*LambdaSMEFT**2*Me*MZ**2*sth**6*vevhat*cmath.sqrt(2) + 6912*ceWRe11*ee*LambdaSMEFT**2*Me*MZ**2*sth**7*vevhat*cmath.sqrt(2) + 3072*ceBRe11*cth*ee*LambdaSMEFT**2*Me*MZ**2*sth**8*vevhat*cmath.sqrt(2) - 3072*ceWRe11*ee*LambdaSMEFT**2*Me*MZ**2*sth**9*vevhat*cmath.sqrt(2) + 48*ceWRe11*(cHDD - 4*cHe11 - 4*cHl111 - 2*cHl311 + 2*cHl322 - cll1221)*ee*Me*MZ**2*sth*vevhat**3*cmath.sqrt(2) + 48*(16*ceWRe11*cHWB + ceBRe11*(cHDD - 4*cHe11 - 4*cHl111 - 2*cHl311 + 2*cHl322 - cll1221))*cth*ee*Me*MZ**2*sth**2*vevhat**3*cmath.sqrt(2) + 48*(16*ceBRe11*cHWB + ceWRe11*(-cHDD + 20*cHe11 + 20*cHl111 + 18*cHl311 - 2*cHl322 + cll1221))*ee*Me*MZ**2*sth**3*vevhat**3*cmath.sqrt(2) + 768*(ceBRe11*(cHe11 + cHl111 + cHl311) - 3*ceWRe11*cHWB)*cth*ee*Me*MZ**2*sth**4*vevhat**3*cmath.sqrt(2) - 192*(12*ceBRe11*cHWB + ceWRe11*(cHDD + 8*cHe11 + 8*cHl111 + 10*cHl311 + 2*cHl322 - cll1221))*ee*Me*MZ**2*sth**5*vevhat**3*cmath.sqrt(2) - 192*(-8*ceWRe11*cHWB + ceBRe11*(cHDD + 4*cHe11 + 4*cHl111 + 6*cHl311 + 2*cHl322 - cll1221))*cth*ee*Me*MZ**2*sth**6*vevhat**3*cmath.sqrt(2) + 192*(8*ceBRe11*cHWB + ceWRe11*(cHDD + 4*cHe11 + 4*cHl111 + 6*cHl311 + 2*cHl322 - cll1221))*ee*Me*MZ**2*sth**7*vevhat**3*cmath.sqrt(2))*cmath.sqrt(-4*Me**2 + MZ**2))/(1536.*cth**2*cmath.pi*LambdaSMEFT**4*MZ**2*sth**2*(1 - 2*sth**2)**2)',
(P.mu__minus__,P.mu__plus__):'((-16*ee**2*LambdaSMEFT**4*MMU**2 + 16*ee**2*LambdaSMEFT**4*MZ**2 - 64*ee**2*LambdaSMEFT**4*(MMU**2 + 2*MZ**2)*sth**2 + 64*ee**2*LambdaSMEFT**4*(11*MMU**2 + 7*MZ**2)*sth**4 - 768*ee**2*LambdaSMEFT**4*(2*MMU**2 + MZ**2)*sth**6 + 512*ee**2*LambdaSMEFT**4*(2*MMU**2 + MZ**2)*sth**8 + 8*ee**2*LambdaSMEFT**2*(12*cHe22*MMU**2 + cHDD*(MMU**2 - MZ**2) - (4*cHl122 - 2*cHl311 + 2*cHl322 + cll1221)*(MMU**2 - MZ**2))*vevhat**2 - 64*cHWB*cth*ee**2*LambdaSMEFT**2*(2*MMU**2 + MZ**2)*sth*vevhat**2 - 32*(2*cHl311*ee**2*LambdaSMEFT**2*MMU**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MMU**2 - cll1221*ee**2*LambdaSMEFT**2*MMU**2 + 6*cHl122*ee**2*LambdaSMEFT**2*MZ**2 - 2*cHl311*ee**2*LambdaSMEFT**2*MZ**2 + 4*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + cll1221*ee**2*LambdaSMEFT**2*MZ**2 + 16*ceWIm22**2*MMU**2*MZ**2 - 32*ceWRe22**2*MMU**2*MZ**2 - 4*ceWIm22**2*MZ**4 - 4*ceWRe22**2*MZ**4 + cHDD*ee**2*LambdaSMEFT**2*(MMU**2 - MZ**2) + 2*cHe22*ee**2*LambdaSMEFT**2*(8*MMU**2 + MZ**2))*sth**2*vevhat**2 + 128*cth*(3*cHWB*ee**2*LambdaSMEFT**2*(2*MMU**2 + MZ**2) + 2*MZ**2*(ceBIm22*ceWIm22*(-4*MMU**2 + MZ**2) + ceBRe22*ceWRe22*(8*MMU**2 + MZ**2)))*sth**3*vevhat**2 + 32*(3*cHDD*ee**2*LambdaSMEFT**2*MMU**2 + 12*cHl122*ee**2*LambdaSMEFT**2*MMU**2 + 6*cHl311*ee**2*LambdaSMEFT**2*MMU**2 + 18*cHl322*ee**2*LambdaSMEFT**2*MMU**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MMU**2 + 12*cHl122*ee**2*LambdaSMEFT**2*MZ**2 + 12*cHl322*ee**2*LambdaSMEFT**2*MZ**2 - 16*ceBIm22**2*MMU**2*MZ**2 + 32*ceBRe22**2*MMU**2*MZ**2 + 96*ceWIm22**2*MMU**2*MZ**2 - 192*ceWRe22**2*MMU**2*MZ**2 + 4*ceBIm22**2*MZ**4 + 4*ceBRe22**2*MZ**4 - 24*ceWIm22**2*MZ**4 - 24*ceWRe22**2*MZ**4 + 4*cHe22*ee**2*LambdaSMEFT**2*(7*MMU**2 + 2*MZ**2))*sth**4*vevhat**2 - 256*cth*(2*cHWB*ee**2*LambdaSMEFT**2*(2*MMU**2 + MZ**2) + 5*MZ**2*(ceBIm22*ceWIm22*(-4*MMU**2 + MZ**2) + ceBRe22*ceWRe22*(8*MMU**2 + MZ**2)))*sth**5*vevhat**2 - 64*(8*cHl122*ee**2*LambdaSMEFT**2*MMU**2 + 4*cHl311*ee**2*LambdaSMEFT**2*MMU**2 + 12*cHl322*ee**2*LambdaSMEFT**2*MMU**2 - 2*cll1221*ee**2*LambdaSMEFT**2*MMU**2 + 4*cHl122*ee**2*LambdaSMEFT**2*MZ**2 + 2*cHl311*ee**2*LambdaSMEFT**2*MZ**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MZ**2 - cll1221*ee**2*LambdaSMEFT**2*MZ**2 - 40*ceBIm22**2*MMU**2*MZ**2 + 80*ceBRe22**2*MMU**2*MZ**2 + 104*ceWIm22**2*MMU**2*MZ**2 - 208*ceWRe22**2*MMU**2*MZ**2 + 10*ceBIm22**2*MZ**4 + 10*ceBRe22**2*MZ**4 - 26*ceWIm22**2*MZ**4 - 26*ceWRe22**2*MZ**4 + cHDD*ee**2*LambdaSMEFT**2*(2*MMU**2 + MZ**2) + 4*cHe22*ee**2*LambdaSMEFT**2*(2*MMU**2 + MZ**2))*sth**6*vevhat**2 + 2048*cth*MZ**2*(ceBIm22*ceWIm22*(-4*MMU**2 + MZ**2) + ceBRe22*ceWRe22*(8*MMU**2 + MZ**2))*sth**7*vevhat**2 + 512*MZ**2*(3*ceWIm22**2*(4*MMU**2 - MZ**2) + 2*ceBRe22**2*(8*MMU**2 + MZ**2) - 3*ceWRe22**2*(8*MMU**2 + MZ**2) + ceBIm22**2*(-8*MMU**2 + 2*MZ**2))*sth**8*vevhat**2 - 1024*cth*MZ**2*(ceBIm22*ceWIm22*(-4*MMU**2 + MZ**2) + ceBRe22*ceWRe22*(8*MMU**2 + MZ**2))*sth**9*vevhat**2 + 512*MZ**2*(-4*ceWIm22**2*MMU**2 + 8*ceWRe22**2*MMU**2 + ceWIm22**2*MZ**2 + ceWRe22**2*MZ**2 + ceBIm22**2*(4*MMU**2 - MZ**2) - ceBRe22**2*(8*MMU**2 + MZ**2))*sth**10*vevhat**2 + ee**2*(-24*cHDD*cHe22*MMU**2 + 24*cHe22*(4*cHl122 - 2*cHl311 + 2*cHl322 + cll1221)*MMU**2 - 16*cHe22**2*(MMU**2 - MZ**2) + 2*cHDD*(4*cHl122 - 2*cHl311 + 2*cHl322 + cll1221)*(MMU**2 - MZ**2) - (4*cHl122 - 2*cHl311 + 2*cHl322 + cll1221)**2*(MMU**2 - MZ**2) + cHDD**2*(-MMU**2 + MZ**2))*vevhat**4 + 16*cHWB*(cHDD - 4*cHe22 - 4*cHl122 + 2*cHl311 - 2*cHl322 - cll1221)*cth*ee**2*(2*MMU**2 + MZ**2)*sth*vevhat**4 + 4*ee**2*(3*cHDD**2*MMU**2 + 16*cHl122**2*MMU**2 - 32*cHl122*cHl311*MMU**2 + 12*cHl311**2*MMU**2 - 8*cHl311*cHl322*MMU**2 - 4*cHl322**2*MMU**2 + 64*cHWB**2*MMU**2 + 16*cHl122*cll1221*MMU**2 - 12*cHl311*cll1221*MMU**2 + 4*cHl322*cll1221*MMU**2 + 3*cll1221**2*MMU**2 - 16*cHl122**2*MZ**2 + 8*cHl122*cHl311*MZ**2 - 24*cHl122*cHl322*MZ**2 + 8*cHl311*cHl322*MZ**2 - 8*cHl322**2*MZ**2 + 32*cHWB**2*MZ**2 - 4*cHl122*cll1221*MZ**2 - 4*cHl322*cll1221*MZ**2 + 16*cHe22**2*(MMU**2 - MZ**2) + 2*cHDD*(8*cHe22*MMU**2 - 8*cHl122*MMU**2 + 6*cHl311*MMU**2 - 2*cHl322*MMU**2 - 3*cll1221*MMU**2 - 2*cHe22*MZ**2 + 2*cHl122*MZ**2 + 2*cHl322*MZ**2) - 4*cHe22*(24*cHl122*MMU**2 - 8*cHl311*MMU**2 + 16*cHl322*MMU**2 + 4*cll1221*MMU**2 + 2*cHl311*MZ**2 + 2*cHl322*MZ**2 - cll1221*MZ**2))*sth**2*vevhat**4 + 32*cHWB*(cHDD + 4*cHe22 + 4*cHl122 + 2*cHl311 + 6*cHl322 - cll1221)*cth*ee**2*(2*MMU**2 + MZ**2)*sth**3*vevhat**4 - 4*ee**2*(16*cHl122**2*MMU**2 - 48*cHl122*cHl311*MMU**2 + 4*cHl311**2*MMU**2 - 16*cHl122*cHl322*MMU**2 - 40*cHl311*cHl322*MMU**2 - 28*cHl322**2*MMU**2 + 64*cHWB**2*MMU**2 + 24*cHl122*cll1221*MMU**2 - 4*cHl311*cll1221*MMU**2 + 20*cHl322*cll1221*MMU**2 + cll1221**2*MMU**2 - 16*cHl122**2*MZ**2 - 4*cHl311**2*MZ**2 - 32*cHl122*cHl322*MZ**2 - 8*cHl311*cHl322*MZ**2 - 20*cHl322**2*MZ**2 + 32*cHWB**2*MZ**2 + 4*cHl311*cll1221*MZ**2 + 4*cHl322*cll1221*MZ**2 - cll1221**2*MZ**2 + cHDD**2*(MMU**2 - MZ**2) + 16*cHe22**2*(MMU**2 - MZ**2) - 8*cHe22*(12*cHl122*MMU**2 - 2*cHl311*MMU**2 + 10*cHl322*MMU**2 + cll1221*MMU**2 + 2*cHl311*MZ**2 + 2*cHl322*MZ**2 - cll1221*MZ**2) - 2*cHDD*(-4*cHe22*MMU**2 + 12*cHl122*MMU**2 - 2*cHl311*MMU**2 + 10*cHl322*MMU**2 + cll1221*MMU**2 + 4*cHe22*MZ**2 + 2*cHl311*MZ**2 + 2*cHl322*MZ**2 - cll1221*MZ**2))*sth**4*vevhat**4 - 192*ceWRe22*ee*LambdaSMEFT**2*MMU*MZ**2*sth*vevhat*cmath.sqrt(2) - 192*ceBRe22*cth*ee*LambdaSMEFT**2*MMU*MZ**2*sth**2*vevhat*cmath.sqrt(2) + 1728*ceWRe22*ee*LambdaSMEFT**2*MMU*MZ**2*sth**3*vevhat*cmath.sqrt(2) + 1536*ceBRe22*cth*ee*LambdaSMEFT**2*MMU*MZ**2*sth**4*vevhat*cmath.sqrt(2) - 5376*ceWRe22*ee*LambdaSMEFT**2*MMU*MZ**2*sth**5*vevhat*cmath.sqrt(2) - 3840*ceBRe22*cth*ee*LambdaSMEFT**2*MMU*MZ**2*sth**6*vevhat*cmath.sqrt(2) + 6912*ceWRe22*ee*LambdaSMEFT**2*MMU*MZ**2*sth**7*vevhat*cmath.sqrt(2) + 3072*ceBRe22*cth*ee*LambdaSMEFT**2*MMU*MZ**2*sth**8*vevhat*cmath.sqrt(2) - 3072*ceWRe22*ee*LambdaSMEFT**2*MMU*MZ**2*sth**9*vevhat*cmath.sqrt(2) + 48*ceWRe22*(cHDD - 4*cHe22 - 4*cHl122 + 2*cHl311 - 2*cHl322 - cll1221)*ee*MMU*MZ**2*sth*vevhat**3*cmath.sqrt(2) + 48*(16*ceWRe22*cHWB + ceBRe22*(cHDD - 4*cHe22 - 4*cHl122 + 2*cHl311 - 2*cHl322 - cll1221))*cth*ee*MMU*MZ**2*sth**2*vevhat**3*cmath.sqrt(2) + 48*(16*ceBRe22*cHWB + ceWRe22*(-cHDD + 20*cHe22 + 20*cHl122 - 2*cHl311 + 18*cHl322 + cll1221))*ee*MMU*MZ**2*sth**3*vevhat**3*cmath.sqrt(2) + 768*(ceBRe22*(cHe22 + cHl122 + cHl322) - 3*ceWRe22*cHWB)*cth*ee*MMU*MZ**2*sth**4*vevhat**3*cmath.sqrt(2) - 192*(12*ceBRe22*cHWB + ceWRe22*(cHDD + 8*cHe22 + 8*cHl122 + 2*cHl311 + 10*cHl322 - cll1221))*ee*MMU*MZ**2*sth**5*vevhat**3*cmath.sqrt(2) - 192*(-8*ceWRe22*cHWB + ceBRe22*(cHDD + 4*cHe22 + 4*cHl122 + 2*cHl311 + 6*cHl322 - cll1221))*cth*ee*MMU*MZ**2*sth**6*vevhat**3*cmath.sqrt(2) + 192*(8*ceBRe22*cHWB + ceWRe22*(cHDD + 4*cHe22 + 4*cHl122 + 2*cHl311 + 6*cHl322 - cll1221))*ee*MMU*MZ**2*sth**7*vevhat**3*cmath.sqrt(2))*cmath.sqrt(-4*MMU**2 + MZ**2))/(1536.*cth**2*cmath.pi*LambdaSMEFT**4*MZ**2*sth**2*(1 - 2*sth**2)**2)',
(P.s,P.s__tilde__):'((-144*ee**2*LambdaSMEFT**4*MS**2 + 144*ee**2*LambdaSMEFT**4*MZ**2 + 192*ee**2*LambdaSMEFT**4*(MS**2 - 4*MZ**2)*sth**2 + 64*ee**2*LambdaSMEFT**4*(19*MS**2 + 23*MZ**2)*sth**4 - 1280*ee**2*LambdaSMEFT**4*(2*MS**2 + MZ**2)*sth**6 + 512*ee**2*LambdaSMEFT**4*(2*MS**2 + MZ**2)*sth**8 + 72*ee**2*LambdaSMEFT**2*(12*cHd*MS**2 + (cHDD - 4*cHj1 - 4*cHj3 + 2*cHl311 + 2*cHl322 - cll1221)*(MS**2 - MZ**2))*vevhat**2 - 192*cHWB*cth*ee**2*LambdaSMEFT**2*(2*MS**2 + MZ**2)*sth*vevhat**2 - 9*ee**2*(24*cHd*(cHDD - 4*cHj1 - 4*cHj3 + 2*cHl311 + 2*cHl322 - cll1221)*MS**2 + 16*cHd**2*(MS**2 - MZ**2) + (cHDD - 4*cHj1 - 4*cHj3 + 2*cHl311 + 2*cHl322 - cll1221)**2*(MS**2 - MZ**2))*vevhat**4 - 48*cHWB*(4*cHd - cHDD + 4*cHj1 + 4*cHj3 - 2*cHl311 - 2*cHl322 + cll1221)*cth*ee**2*(2*MS**2 + MZ**2)*sth*vevhat**4 + 4*ee**2*(144*cHj1**2*MS**2 + 288*cHj1*cHj3*MS**2 + 144*cHj3**2*MS**2 - 192*cHj1*cHl311*MS**2 - 192*cHj3*cHl311*MS**2 + 60*cHl311**2*MS**2 - 192*cHj1*cHl322*MS**2 - 192*cHj3*cHl322*MS**2 + 120*cHl311*cHl322*MS**2 + 60*cHl322**2*MS**2 + 64*cHWB**2*MS**2 + 96*cHj1*cll1221*MS**2 + 96*cHj3*cll1221*MS**2 - 60*cHl311*cll1221*MS**2 - 60*cHl322*cll1221*MS**2 + 15*cll1221**2*MS**2 - 144*cHj1**2*MZ**2 - 288*cHj1*cHj3*MZ**2 - 144*cHj3**2*MZ**2 + 120*cHj1*cHl311*MZ**2 + 120*cHj3*cHl311*MZ**2 - 24*cHl311**2*MZ**2 + 120*cHj1*cHl322*MZ**2 + 120*cHj3*cHl322*MZ**2 - 48*cHl311*cHl322*MZ**2 - 24*cHl322**2*MZ**2 + 32*cHWB**2*MZ**2 - 60*cHj1*cll1221*MZ**2 - 60*cHj3*cll1221*MZ**2 + 24*cHl311*cll1221*MZ**2 + 24*cHl322*cll1221*MZ**2 - 6*cll1221**2*MZ**2 + 3*cHDD**2*(5*MS**2 - 2*MZ**2) + 144*cHd**2*(MS**2 - MZ**2) + 12*cHd*(16*cHDD*MS**2 - 72*cHj1*MS**2 - 72*cHj3*MS**2 + 32*cHl311*MS**2 + 32*cHl322*MS**2 - 16*cll1221*MS**2 - cHDD*MZ**2 - 2*cHl311*MZ**2 - 2*cHl322*MZ**2 + cll1221*MZ**2) - 6*cHDD*(2*cHj1*(8*MS**2 - 5*MZ**2) + 2*cHj3*(8*MS**2 - 5*MZ**2) - (2*cHl311 + 2*cHl322 - cll1221)*(5*MS**2 - 2*MZ**2)))*sth**2*vevhat**4 + 32*cHWB*(12*cHd - cHDD + 12*cHj1 + 12*cHj3 - 2*cHl311 - 2*cHl322 + cll1221)*cth*ee**2*(2*MS**2 + MZ**2)*sth**3*vevhat**4 - 4*ee**2*(144*cHj1**2*MS**2 + 288*cHj1*cHj3*MS**2 + 144*cHj3**2*MS**2 - 240*cHj1*cHl311*MS**2 - 240*cHj3*cHl311*MS**2 + 68*cHl311**2*MS**2 - 240*cHj1*cHl322*MS**2 - 240*cHj3*cHl322*MS**2 + 136*cHl311*cHl322*MS**2 + 68*cHl322**2*MS**2 + 64*cHWB**2*MS**2 + 120*cHj1*cll1221*MS**2 + 120*cHj3*cll1221*MS**2 - 68*cHl311*cll1221*MS**2 - 68*cHl322*cll1221*MS**2 + 17*cll1221**2*MS**2 - 144*cHj1**2*MZ**2 - 288*cHj1*cHj3*MZ**2 - 144*cHj3**2*MZ**2 + 96*cHj1*cHl311*MZ**2 + 96*cHj3*cHl311*MZ**2 - 20*cHl311**2*MZ**2 + 96*cHj1*cHl322*MZ**2 + 96*cHj3*cHl322*MZ**2 - 40*cHl311*cHl322*MZ**2 - 20*cHl322**2*MZ**2 + 32*cHWB**2*MZ**2 - 48*cHj1*cll1221*MZ**2 - 48*cHj3*cll1221*MZ**2 + 20*cHl311*cll1221*MZ**2 + 20*cHl322*cll1221*MZ**2 - 5*cll1221**2*MZ**2 + cHDD**2*(17*MS**2 - 5*MZ**2) + 2*cHDD*(2*cHl311 + 2*cHl322 - cll1221)*(17*MS**2 - 5*MZ**2) - 24*cHDD*cHj1*(5*MS**2 - 2*MZ**2) - 24*cHDD*cHj3*(5*MS**2 - 2*MZ**2) + 144*cHd**2*(MS**2 - MZ**2) + 24*cHd*(7*cHDD*MS**2 - 36*cHj1*MS**2 - 36*cHj3*MS**2 + 14*cHl311*MS**2 + 14*cHl322*MS**2 - 7*cll1221*MS**2 - cHDD*MZ**2 - 2*cHl311*MZ**2 - 2*cHl322*MZ**2 + cll1221*MZ**2))*sth**4*vevhat**4 + 18432*cth*MZ**2*(cdBIm*cdWIm*(-4*MS**2 + MZ**2) + cdBRe*cdWRe*(8*MS**2 + MZ**2))*sth**7*vevhat**2*ys**2 + 4608*MZ**2*(3*cdWIm**2*(4*MS**2 - MZ**2) + 2*cdBRe**2*(8*MS**2 + MZ**2) - 3*cdWRe**2*(8*MS**2 + MZ**2) + cdBIm**2*(-8*MS**2 + 2*MZ**2))*sth**8*vevhat**2*ys**2 - 9216*cth*MZ**2*(cdBIm*cdWIm*(-4*MS**2 + MZ**2) + cdBRe*cdWRe*(8*MS**2 + MZ**2))*sth**9*vevhat**2*ys**2 + 4608*MZ**2*(-4*cdWIm**2*MS**2 + 8*cdWRe**2*MS**2 + cdWIm**2*MZ**2 + cdWRe**2*MZ**2 + cdBIm**2*(4*MS**2 - MZ**2) - cdBRe**2*(8*MS**2 + MZ**2))*sth**10*vevhat**2*ys**2 - 64*sth**6*vevhat**2*(24*cHj1*ee**2*LambdaSMEFT**2*MS**2 + 24*cHj3*ee**2*LambdaSMEFT**2*MS**2 - 4*cHl311*ee**2*LambdaSMEFT**2*MS**2 - 4*cHl322*ee**2*LambdaSMEFT**2*MS**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MS**2 + 12*cHj1*ee**2*LambdaSMEFT**2*MZ**2 + 12*cHj3*ee**2*LambdaSMEFT**2*MZ**2 - 2*cHl311*ee**2*LambdaSMEFT**2*MZ**2 - 2*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + cll1221*ee**2*LambdaSMEFT**2*MZ**2 + 12*cHd*ee**2*LambdaSMEFT**2*(2*MS**2 + MZ**2) - cHDD*ee**2*LambdaSMEFT**2*(2*MS**2 + MZ**2) - 360*cdBIm**2*MS**2*MZ**2*ys**2 + 720*cdBRe**2*MS**2*MZ**2*ys**2 + 936*cdWIm**2*MS**2*MZ**2*ys**2 - 1872*cdWRe**2*MS**2*MZ**2*ys**2 + 90*cdBIm**2*MZ**4*ys**2 + 90*cdBRe**2*MZ**4*ys**2 - 234*cdWIm**2*MZ**4*ys**2 - 234*cdWRe**2*MZ**4*ys**2) + 32*sth**4*vevhat**2*(12*cHj1*ee**2*LambdaSMEFT**2*MS**2 + 12*cHj3*ee**2*LambdaSMEFT**2*MS**2 + 14*cHl311*ee**2*LambdaSMEFT**2*MS**2 + 14*cHl322*ee**2*LambdaSMEFT**2*MS**2 - 7*cll1221*ee**2*LambdaSMEFT**2*MS**2 + 60*cHj1*ee**2*LambdaSMEFT**2*MZ**2 + 60*cHj3*ee**2*LambdaSMEFT**2*MZ**2 - 20*cHl311*ee**2*LambdaSMEFT**2*MZ**2 - 20*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + 10*cll1221*ee**2*LambdaSMEFT**2*MZ**2 + cHDD*ee**2*LambdaSMEFT**2*(7*MS**2 - 10*MZ**2) + 12*cHd*ee**2*LambdaSMEFT**2*(13*MS**2 + 2*MZ**2) - 144*cdBIm**2*MS**2*MZ**2*ys**2 + 288*cdBRe**2*MS**2*MZ**2*ys**2 + 864*cdWIm**2*MS**2*MZ**2*ys**2 - 1728*cdWRe**2*MS**2*MZ**2*ys**2 + 36*cdBIm**2*MZ**4*ys**2 + 36*cdBRe**2*MZ**4*ys**2 - 216*cdWIm**2*MZ**4*ys**2 - 216*cdWRe**2*MZ**4*ys**2) - 96*sth**2*vevhat**2*(-8*cHj1*ee**2*LambdaSMEFT**2*MS**2 - 8*cHj3*ee**2*LambdaSMEFT**2*MS**2 + 6*cHl311*ee**2*LambdaSMEFT**2*MS**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MS**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MS**2 + 14*cHj1*ee**2*LambdaSMEFT**2*MZ**2 + 14*cHj3*ee**2*LambdaSMEFT**2*MZ**2 - 6*cHl311*ee**2*LambdaSMEFT**2*MZ**2 - 6*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + 3*cll1221*ee**2*LambdaSMEFT**2*MZ**2 + 3*cHDD*ee**2*LambdaSMEFT**2*(MS**2 - MZ**2) + 2*cHd*ee**2*LambdaSMEFT**2*(20*MS**2 + MZ**2) + 48*cdWIm**2*MS**2*MZ**2*ys**2 - 96*cdWRe**2*MS**2*MZ**2*ys**2 - 12*cdWIm**2*MZ**4*ys**2 - 12*cdWRe**2*MZ**4*ys**2) + 128*cth*sth**3*vevhat**2*(5*cHWB*ee**2*LambdaSMEFT**2*(2*MS**2 + MZ**2) + 18*MZ**2*(cdBIm*cdWIm*(-4*MS**2 + MZ**2) + cdBRe*cdWRe*(8*MS**2 + MZ**2))*ys**2) - 256*cth*sth**5*vevhat**2*(2*cHWB*ee**2*LambdaSMEFT**2*(2*MS**2 + MZ**2) + 45*MZ**2*(cdBIm*cdWIm*(-4*MS**2 + MZ**2) + cdBRe*cdWRe*(8*MS**2 + MZ**2))*ys**2) - 1728*cdWRe*ee*LambdaSMEFT**2*MS*MZ**2*sth*vevhat*ys*cmath.sqrt(2) - 1728*cdBRe*cth*ee*LambdaSMEFT**2*MS*MZ**2*sth**2*vevhat*ys*cmath.sqrt(2) + 10944*cdWRe*ee*LambdaSMEFT**2*MS*MZ**2*sth**3*vevhat*ys*cmath.sqrt(2) + 9216*cdBRe*cth*ee*LambdaSMEFT**2*MS*MZ**2*sth**4*vevhat*ys*cmath.sqrt(2) - 25344*cdWRe*ee*LambdaSMEFT**2*MS*MZ**2*sth**5*vevhat*ys*cmath.sqrt(2) - 16128*cdBRe*cth*ee*LambdaSMEFT**2*MS*MZ**2*sth**6*vevhat*ys*cmath.sqrt(2) + 25344*cdWRe*ee*LambdaSMEFT**2*MS*MZ**2*sth**7*vevhat*ys*cmath.sqrt(2) + 9216*cdBRe*cth*ee*LambdaSMEFT**2*MS*MZ**2*sth**8*vevhat*ys*cmath.sqrt(2) - 9216*cdWRe*ee*LambdaSMEFT**2*MS*MZ**2*sth**9*vevhat*ys*cmath.sqrt(2) - 432*cdWRe*(4*cHd - cHDD + 4*cHj1 + 4*cHj3 - 2*cHl311 - 2*cHl322 + cll1221)*ee*MS*MZ**2*sth*vevhat**3*ys*cmath.sqrt(2) - 144*(-16*cdWRe*cHWB + 3*cdBRe*(4*cHd - cHDD + 4*cHj1 + 4*cHj3 - 2*cHl311 - 2*cHl322 + cll1221))*cth*ee*MS*MZ**2*sth**2*vevhat**3*ys*cmath.sqrt(2) + 144*(16*cdBRe*cHWB + cdWRe*(60*cHd - 11*cHDD + 60*cHj1 + 60*cHj3 - 22*cHl311 - 22*cHl322 + 11*cll1221))*ee*MS*MZ**2*sth**3*vevhat**3*ys*cmath.sqrt(2) + 1152*(-6*cdWRe*cHWB + cdBRe*(6*cHd - cHDD + 6*cHj1 + 6*cHj3 - 2*cHl311 - 2*cHl322 + cll1221))*cth*ee*MS*MZ**2*sth**4*vevhat**3*ys*cmath.sqrt(2) - 1728*(4*cdBRe*cHWB + cdWRe*(8*cHd - cHDD + 8*cHj1 + 8*cHj3 - 2*cHl311 - 2*cHl322 + cll1221))*ee*MS*MZ**2*sth**5*vevhat**3*ys*cmath.sqrt(2) - 576*(-8*cdWRe*cHWB + cdBRe*(12*cHd - cHDD + 12*cHj1 + 12*cHj3 - 2*cHl311 - 2*cHl322 + cll1221))*cth*ee*MS*MZ**2*sth**6*vevhat**3*ys*cmath.sqrt(2) + 576*(8*cdBRe*cHWB + cdWRe*(12*cHd - cHDD + 12*cHj1 + 12*cHj3 - 2*cHl311 - 2*cHl322 + cll1221))*ee*MS*MZ**2*sth**7*vevhat**3*ys*cmath.sqrt(2))*cmath.sqrt(-4*MS**2 + MZ**2))/(4608.*cth**2*cmath.pi*LambdaSMEFT**4*MZ**2*sth**2*(1 - 2*sth**2)**2)',
(P.ta__minus__,P.ta__plus__):'((-16*ee**2*LambdaSMEFT**4*MTA**2 + 16*ee**2*LambdaSMEFT**4*MZ**2 - 64*ee**2*LambdaSMEFT**4*(MTA**2 + 2*MZ**2)*sth**2 + 64*ee**2*LambdaSMEFT**4*(11*MTA**2 + 7*MZ**2)*sth**4 - 768*ee**2*LambdaSMEFT**4*(2*MTA**2 + MZ**2)*sth**6 + 512*ee**2*LambdaSMEFT**4*(2*MTA**2 + MZ**2)*sth**8 + 8*ee**2*LambdaSMEFT**2*(12*cHe33*MTA**2 + cHDD*(MTA**2 - MZ**2) - (4*cHl133 - 2*cHl311 - 2*cHl322 + 4*cHl333 + cll1221)*(MTA**2 - MZ**2))*vevhat**2 - 64*cHWB*cth*ee**2*LambdaSMEFT**2*(2*MTA**2 + MZ**2)*sth*vevhat**2 - 32*(2*cHl311*ee**2*LambdaSMEFT**2*MTA**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MTA**2 - cll1221*ee**2*LambdaSMEFT**2*MTA**2 + 6*cHl133*ee**2*LambdaSMEFT**2*MZ**2 - 2*cHl311*ee**2*LambdaSMEFT**2*MZ**2 - 2*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + 6*cHl333*ee**2*LambdaSMEFT**2*MZ**2 + cll1221*ee**2*LambdaSMEFT**2*MZ**2 + 16*ceWIm33**2*MTA**2*MZ**2 - 32*ceWRe33**2*MTA**2*MZ**2 - 4*ceWIm33**2*MZ**4 - 4*ceWRe33**2*MZ**4 + cHDD*ee**2*LambdaSMEFT**2*(MTA**2 - MZ**2) + 2*cHe33*ee**2*LambdaSMEFT**2*(8*MTA**2 + MZ**2))*sth**2*vevhat**2 + 128*cth*(3*cHWB*ee**2*LambdaSMEFT**2*(2*MTA**2 + MZ**2) + 2*MZ**2*(ceBIm33*ceWIm33*(-4*MTA**2 + MZ**2) + ceBRe33*ceWRe33*(8*MTA**2 + MZ**2)))*sth**3*vevhat**2 + 32*(3*cHDD*ee**2*LambdaSMEFT**2*MTA**2 + 12*cHl133*ee**2*LambdaSMEFT**2*MTA**2 + 6*cHl311*ee**2*LambdaSMEFT**2*MTA**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MTA**2 + 12*cHl333*ee**2*LambdaSMEFT**2*MTA**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MTA**2 + 12*cHl133*ee**2*LambdaSMEFT**2*MZ**2 + 12*cHl333*ee**2*LambdaSMEFT**2*MZ**2 - 16*ceBIm33**2*MTA**2*MZ**2 + 32*ceBRe33**2*MTA**2*MZ**2 + 96*ceWIm33**2*MTA**2*MZ**2 - 192*ceWRe33**2*MTA**2*MZ**2 + 4*ceBIm33**2*MZ**4 + 4*ceBRe33**2*MZ**4 - 24*ceWIm33**2*MZ**4 - 24*ceWRe33**2*MZ**4 + 4*cHe33*ee**2*LambdaSMEFT**2*(7*MTA**2 + 2*MZ**2))*sth**4*vevhat**2 - 256*cth*(2*cHWB*ee**2*LambdaSMEFT**2*(2*MTA**2 + MZ**2) + 5*MZ**2*(ceBIm33*ceWIm33*(-4*MTA**2 + MZ**2) + ceBRe33*ceWRe33*(8*MTA**2 + MZ**2)))*sth**5*vevhat**2 - 64*(8*cHl133*ee**2*LambdaSMEFT**2*MTA**2 + 4*cHl311*ee**2*LambdaSMEFT**2*MTA**2 + 4*cHl322*ee**2*LambdaSMEFT**2*MTA**2 + 8*cHl333*ee**2*LambdaSMEFT**2*MTA**2 - 2*cll1221*ee**2*LambdaSMEFT**2*MTA**2 + 4*cHl133*ee**2*LambdaSMEFT**2*MZ**2 + 2*cHl311*ee**2*LambdaSMEFT**2*MZ**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + 4*cHl333*ee**2*LambdaSMEFT**2*MZ**2 - cll1221*ee**2*LambdaSMEFT**2*MZ**2 - 40*ceBIm33**2*MTA**2*MZ**2 + 80*ceBRe33**2*MTA**2*MZ**2 + 104*ceWIm33**2*MTA**2*MZ**2 - 208*ceWRe33**2*MTA**2*MZ**2 + 10*ceBIm33**2*MZ**4 + 10*ceBRe33**2*MZ**4 - 26*ceWIm33**2*MZ**4 - 26*ceWRe33**2*MZ**4 + cHDD*ee**2*LambdaSMEFT**2*(2*MTA**2 + MZ**2) + 4*cHe33*ee**2*LambdaSMEFT**2*(2*MTA**2 + MZ**2))*sth**6*vevhat**2 + 2048*cth*MZ**2*(ceBIm33*ceWIm33*(-4*MTA**2 + MZ**2) + ceBRe33*ceWRe33*(8*MTA**2 + MZ**2))*sth**7*vevhat**2 + 512*MZ**2*(3*ceWIm33**2*(4*MTA**2 - MZ**2) + 2*ceBRe33**2*(8*MTA**2 + MZ**2) - 3*ceWRe33**2*(8*MTA**2 + MZ**2) + ceBIm33**2*(-8*MTA**2 + 2*MZ**2))*sth**8*vevhat**2 - 1024*cth*MZ**2*(ceBIm33*ceWIm33*(-4*MTA**2 + MZ**2) + ceBRe33*ceWRe33*(8*MTA**2 + MZ**2))*sth**9*vevhat**2 + 512*MZ**2*(-4*ceWIm33**2*MTA**2 + 8*ceWRe33**2*MTA**2 + ceWIm33**2*MZ**2 + ceWRe33**2*MZ**2 + ceBIm33**2*(4*MTA**2 - MZ**2) - ceBRe33**2*(8*MTA**2 + MZ**2))*sth**10*vevhat**2 + ee**2*(-24*cHDD*cHe33*MTA**2 + 24*cHe33*(4*cHl133 - 2*cHl311 - 2*cHl322 + 4*cHl333 + cll1221)*MTA**2 - 16*cHe33**2*(MTA**2 - MZ**2) + 2*cHDD*(4*cHl133 - 2*cHl311 - 2*cHl322 + 4*cHl333 + cll1221)*(MTA**2 - MZ**2) - (4*cHl133 - 2*cHl311 - 2*cHl322 + 4*cHl333 + cll1221)**2*(MTA**2 - MZ**2) + cHDD**2*(-MTA**2 + MZ**2))*vevhat**4 + 16*cHWB*(cHDD - 4*cHe33 - 4*cHl133 + 2*cHl311 + 2*cHl322 - 4*cHl333 - cll1221)*cth*ee**2*(2*MTA**2 + MZ**2)*sth*vevhat**4 + 4*ee**2*(3*cHDD**2*MTA**2 + 16*cHl133**2*MTA**2 - 32*cHl133*cHl311*MTA**2 + 12*cHl311**2*MTA**2 - 32*cHl133*cHl322*MTA**2 + 24*cHl311*cHl322*MTA**2 + 12*cHl322**2*MTA**2 + 32*cHl133*cHl333*MTA**2 - 32*cHl311*cHl333*MTA**2 - 32*cHl322*cHl333*MTA**2 + 16*cHl333**2*MTA**2 + 64*cHWB**2*MTA**2 + 16*cHl133*cll1221*MTA**2 - 12*cHl311*cll1221*MTA**2 - 12*cHl322*cll1221*MTA**2 + 16*cHl333*cll1221*MTA**2 + 3*cll1221**2*MTA**2 - 16*cHl133**2*MZ**2 + 8*cHl133*cHl311*MZ**2 + 8*cHl133*cHl322*MZ**2 - 32*cHl133*cHl333*MZ**2 + 8*cHl311*cHl333*MZ**2 + 8*cHl322*cHl333*MZ**2 - 16*cHl333**2*MZ**2 + 32*cHWB**2*MZ**2 - 4*cHl133*cll1221*MZ**2 - 4*cHl333*cll1221*MZ**2 + 16*cHe33**2*(MTA**2 - MZ**2) + 2*cHDD*(8*cHe33*MTA**2 - 8*cHl133*MTA**2 + 6*cHl311*MTA**2 + 6*cHl322*MTA**2 - 8*cHl333*MTA**2 - 3*cll1221*MTA**2 - 2*cHe33*MZ**2 + 2*cHl133*MZ**2 + 2*cHl333*MZ**2) - 4*cHe33*(24*cHl133*MTA**2 - 8*cHl311*MTA**2 - 8*cHl322*MTA**2 + 24*cHl333*MTA**2 + 4*cll1221*MTA**2 + 2*cHl311*MZ**2 + 2*cHl322*MZ**2 - cll1221*MZ**2))*sth**2*vevhat**4 + 32*cHWB*(cHDD + 4*cHe33 + 4*cHl133 + 2*cHl311 + 2*cHl322 + 4*cHl333 - cll1221)*cth*ee**2*(2*MTA**2 + MZ**2)*sth**3*vevhat**4 - 4*ee**2*(16*cHl133**2*MTA**2 - 48*cHl133*cHl311*MTA**2 + 4*cHl311**2*MTA**2 - 48*cHl133*cHl322*MTA**2 + 8*cHl311*cHl322*MTA**2 + 4*cHl322**2*MTA**2 + 32*cHl133*cHl333*MTA**2 - 48*cHl311*cHl333*MTA**2 - 48*cHl322*cHl333*MTA**2 + 16*cHl333**2*MTA**2 + 64*cHWB**2*MTA**2 + 24*cHl133*cll1221*MTA**2 - 4*cHl311*cll1221*MTA**2 - 4*cHl322*cll1221*MTA**2 + 24*cHl333*cll1221*MTA**2 + cll1221**2*MTA**2 - 16*cHl133**2*MZ**2 - 4*cHl311**2*MZ**2 - 8*cHl311*cHl322*MZ**2 - 4*cHl322**2*MZ**2 - 32*cHl133*cHl333*MZ**2 - 16*cHl333**2*MZ**2 + 32*cHWB**2*MZ**2 + 4*cHl311*cll1221*MZ**2 + 4*cHl322*cll1221*MZ**2 - cll1221**2*MZ**2 + cHDD**2*(MTA**2 - MZ**2) + 16*cHe33**2*(MTA**2 - MZ**2) - 8*cHe33*(12*cHl133*MTA**2 - 2*cHl311*MTA**2 - 2*cHl322*MTA**2 + 12*cHl333*MTA**2 + cll1221*MTA**2 + 2*cHl311*MZ**2 + 2*cHl322*MZ**2 - cll1221*MZ**2) - 2*cHDD*(-4*cHe33*MTA**2 + 12*cHl133*MTA**2 - 2*cHl311*MTA**2 - 2*cHl322*MTA**2 + 12*cHl333*MTA**2 + cll1221*MTA**2 + 4*cHe33*MZ**2 + 2*cHl311*MZ**2 + 2*cHl322*MZ**2 - cll1221*MZ**2))*sth**4*vevhat**4 - 192*ceWRe33*ee*LambdaSMEFT**2*MTA*MZ**2*sth*vevhat*cmath.sqrt(2) - 192*ceBRe33*cth*ee*LambdaSMEFT**2*MTA*MZ**2*sth**2*vevhat*cmath.sqrt(2) + 1728*ceWRe33*ee*LambdaSMEFT**2*MTA*MZ**2*sth**3*vevhat*cmath.sqrt(2) + 1536*ceBRe33*cth*ee*LambdaSMEFT**2*MTA*MZ**2*sth**4*vevhat*cmath.sqrt(2) - 5376*ceWRe33*ee*LambdaSMEFT**2*MTA*MZ**2*sth**5*vevhat*cmath.sqrt(2) - 3840*ceBRe33*cth*ee*LambdaSMEFT**2*MTA*MZ**2*sth**6*vevhat*cmath.sqrt(2) + 6912*ceWRe33*ee*LambdaSMEFT**2*MTA*MZ**2*sth**7*vevhat*cmath.sqrt(2) + 3072*ceBRe33*cth*ee*LambdaSMEFT**2*MTA*MZ**2*sth**8*vevhat*cmath.sqrt(2) - 3072*ceWRe33*ee*LambdaSMEFT**2*MTA*MZ**2*sth**9*vevhat*cmath.sqrt(2) + 48*ceWRe33*(cHDD - 4*cHe33 - 4*cHl133 + 2*cHl311 + 2*cHl322 - 4*cHl333 - cll1221)*ee*MTA*MZ**2*sth*vevhat**3*cmath.sqrt(2) + 48*(16*ceWRe33*cHWB + ceBRe33*(cHDD - 4*cHe33 - 4*cHl133 + 2*cHl311 + 2*cHl322 - 4*cHl333 - cll1221))*cth*ee*MTA*MZ**2*sth**2*vevhat**3*cmath.sqrt(2) - 48*(-16*ceBRe33*cHWB + ceWRe33*(cHDD - 20*cHe33 - 20*cHl133 + 2*cHl311 + 2*cHl322 - 20*cHl333 - cll1221))*ee*MTA*MZ**2*sth**3*vevhat**3*cmath.sqrt(2) + 768*(ceBRe33*(cHe33 + cHl133 + cHl333) - 3*ceWRe33*cHWB)*cth*ee*MTA*MZ**2*sth**4*vevhat**3*cmath.sqrt(2) - 192*(12*ceBRe33*cHWB + ceWRe33*(cHDD + 8*cHe33 + 8*cHl133 + 2*cHl311 + 2*cHl322 + 8*cHl333 - cll1221))*ee*MTA*MZ**2*sth**5*vevhat**3*cmath.sqrt(2) - 192*(-8*ceWRe33*cHWB + ceBRe33*(cHDD + 4*cHe33 + 4*cHl133 + 2*cHl311 + 2*cHl322 + 4*cHl333 - cll1221))*cth*ee*MTA*MZ**2*sth**6*vevhat**3*cmath.sqrt(2) + 192*(8*ceBRe33*cHWB + ceWRe33*(cHDD + 4*cHe33 + 4*cHl133 + 2*cHl311 + 2*cHl322 + 4*cHl333 - cll1221))*ee*MTA*MZ**2*sth**7*vevhat**3*cmath.sqrt(2))*cmath.sqrt(-4*MTA**2 + MZ**2))/(1536.*cth**2*cmath.pi*LambdaSMEFT**4*MZ**2*sth**2*(1 - 2*sth**2)**2)',
(P.t,P.t__tilde__):'((-144*ee**2*LambdaSMEFT**4*MT**2 + 144*ee**2*LambdaSMEFT**4*MZ**2 - 192*ee**2*LambdaSMEFT**4*(MT**2 + 5*MZ**2)*sth**2 + 64*ee**2*LambdaSMEFT**4*(55*MT**2 + 41*MZ**2)*sth**4 - 3584*ee**2*LambdaSMEFT**4*(2*MT**2 + MZ**2)*sth**6 + 2048*ee**2*LambdaSMEFT**4*(2*MT**2 + MZ**2)*sth**8 + 72*ee**2*LambdaSMEFT**2*(2*cHl322*MT**2 + 4*cHQ1*MT**2 - 4*cHQ3*MT**2 - 12*cHt*MT**2 - cll1221*MT**2 - 2*cHl322*MZ**2 - 4*cHQ1*MZ**2 + 4*cHQ3*MZ**2 + cll1221*MZ**2 + cHDD*(MT**2 - MZ**2) + 2*cHl311*(MT**2 - MZ**2))*vevhat**2 - 384*cHWB*cth*ee**2*LambdaSMEFT**2*(2*MT**2 + MZ**2)*sth*vevhat**2 - 96*(6*cHl322*ee**2*LambdaSMEFT**2*MT**2 + 4*cHQ1*ee**2*LambdaSMEFT**2*MT**2 - 4*cHQ3*ee**2*LambdaSMEFT**2*MT**2 - 44*cHt*ee**2*LambdaSMEFT**2*MT**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MT**2 - 6*cHl322*ee**2*LambdaSMEFT**2*MZ**2 - 16*cHQ1*ee**2*LambdaSMEFT**2*MZ**2 + 16*cHQ3*ee**2*LambdaSMEFT**2*MZ**2 - 4*cHt*ee**2*LambdaSMEFT**2*MZ**2 + 3*cll1221*ee**2*LambdaSMEFT**2*MZ**2 + 48*ctWIm**2*MT**2*MZ**2 - 96*ctWRe**2*MT**2*MZ**2 - 12*ctWIm**2*MZ**4 - 12*ctWRe**2*MZ**4 + 3*cHDD*ee**2*LambdaSMEFT**2*(MT**2 - MZ**2) + 6*cHl311*ee**2*LambdaSMEFT**2*(MT**2 - MZ**2))*sth**2*vevhat**2 + 256*cth*(7*cHWB*ee**2*LambdaSMEFT**2*(2*MT**2 + MZ**2) - 9*MZ**2*(ctBIm*ctWIm*(-4*MT**2 + MZ**2) + ctBRe*ctWRe*(8*MT**2 + MZ**2)))*sth**3*vevhat**2 + 32*(26*cHl322*ee**2*LambdaSMEFT**2*MT**2 - 60*cHQ1*ee**2*LambdaSMEFT**2*MT**2 + 60*cHQ3*ee**2*LambdaSMEFT**2*MT**2 - 204*cHt*ee**2*LambdaSMEFT**2*MT**2 - 13*cll1221*ee**2*LambdaSMEFT**2*MT**2 - 14*cHl322*ee**2*LambdaSMEFT**2*MZ**2 - 84*cHQ1*ee**2*LambdaSMEFT**2*MZ**2 + 84*cHQ3*ee**2*LambdaSMEFT**2*MZ**2 - 48*cHt*ee**2*LambdaSMEFT**2*MZ**2 + 7*cll1221*ee**2*LambdaSMEFT**2*MZ**2 - 144*ctBIm**2*MT**2*MZ**2 + 288*ctBRe**2*MT**2*MZ**2 + 864*ctWIm**2*MT**2*MZ**2 - 1728*ctWRe**2*MT**2*MZ**2 + 36*ctBIm**2*MZ**4 + 36*ctBRe**2*MZ**4 - 216*ctWIm**2*MZ**4 - 216*ctWRe**2*MZ**4 + cHDD*ee**2*LambdaSMEFT**2*(13*MT**2 - 7*MZ**2) + 2*cHl311*ee**2*LambdaSMEFT**2*(13*MT**2 - 7*MZ**2))*sth**4*vevhat**2 - 256*cth*(8*cHWB*ee**2*LambdaSMEFT**2*(2*MT**2 + MZ**2) - 45*MZ**2*(ctBIm*ctWIm*(-4*MT**2 + MZ**2) + ctBRe*ctWRe*(8*MT**2 + MZ**2)))*sth**5*vevhat**2 - 128*(4*cHl322*ee**2*LambdaSMEFT**2*MT**2 - 24*cHQ1*ee**2*LambdaSMEFT**2*MT**2 + 24*cHQ3*ee**2*LambdaSMEFT**2*MT**2 - 24*cHt*ee**2*LambdaSMEFT**2*MT**2 - 2*cll1221*ee**2*LambdaSMEFT**2*MT**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MZ**2 - 12*cHQ1*ee**2*LambdaSMEFT**2*MZ**2 + 12*cHQ3*ee**2*LambdaSMEFT**2*MZ**2 - 12*cHt*ee**2*LambdaSMEFT**2*MZ**2 - cll1221*ee**2*LambdaSMEFT**2*MZ**2 - 180*ctBIm**2*MT**2*MZ**2 + 360*ctBRe**2*MT**2*MZ**2 + 468*ctWIm**2*MT**2*MZ**2 - 936*ctWRe**2*MT**2*MZ**2 + 45*ctBIm**2*MZ**4 + 45*ctBRe**2*MZ**4 - 117*ctWIm**2*MZ**4 - 117*ctWRe**2*MZ**4 + cHDD*ee**2*LambdaSMEFT**2*(2*MT**2 + MZ**2) + 2*cHl311*ee**2*LambdaSMEFT**2*(2*MT**2 + MZ**2))*sth**6*vevhat**2 - 18432*cth*MZ**2*(ctBIm*ctWIm*(-4*MT**2 + MZ**2) + ctBRe*ctWRe*(8*MT**2 + MZ**2))*sth**7*vevhat**2 + 4608*MZ**2*(3*ctWIm**2*(4*MT**2 - MZ**2) + 2*ctBRe**2*(8*MT**2 + MZ**2) - 3*ctWRe**2*(8*MT**2 + MZ**2) + ctBIm**2*(-8*MT**2 + 2*MZ**2))*sth**8*vevhat**2 + 9216*cth*MZ**2*(ctBIm*ctWIm*(-4*MT**2 + MZ**2) + ctBRe*ctWRe*(8*MT**2 + MZ**2))*sth**9*vevhat**2 + 4608*MZ**2*(-4*ctWIm**2*MT**2 + 8*ctWRe**2*MT**2 + ctWIm**2*MZ**2 + ctWRe**2*MZ**2 + ctBIm**2*(4*MT**2 - MZ**2) - ctBRe**2*(8*MT**2 + MZ**2))*sth**10*vevhat**2 - 9*ee**2*(4*cHl322**2*MT**2 + 16*cHl322*cHQ1*MT**2 + 16*cHQ1**2*MT**2 - 16*cHl322*cHQ3*MT**2 - 32*cHQ1*cHQ3*MT**2 + 16*cHQ3**2*MT**2 - 48*cHl322*cHt*MT**2 - 96*cHQ1*cHt*MT**2 + 96*cHQ3*cHt*MT**2 + 16*cHt**2*MT**2 - 4*cHl322*cll1221*MT**2 - 8*cHQ1*cll1221*MT**2 + 8*cHQ3*cll1221*MT**2 + 24*cHt*cll1221*MT**2 + cll1221**2*MT**2 - 4*cHl322**2*MZ**2 - 16*cHl322*cHQ1*MZ**2 - 16*cHQ1**2*MZ**2 + 16*cHl322*cHQ3*MZ**2 + 32*cHQ1*cHQ3*MZ**2 - 16*cHQ3**2*MZ**2 - 16*cHt**2*MZ**2 + 4*cHl322*cll1221*MZ**2 + 8*cHQ1*cll1221*MZ**2 - 8*cHQ3*cll1221*MZ**2 - cll1221**2*MZ**2 + cHDD**2*(MT**2 - MZ**2) + 4*cHl311**2*(MT**2 - MZ**2) + 4*cHl311*(2*cHl322*MT**2 + 4*cHQ1*MT**2 - 4*cHQ3*MT**2 - 12*cHt*MT**2 - cll1221*MT**2 - 2*cHl322*MZ**2 - 4*cHQ1*MZ**2 + 4*cHQ3*MZ**2 + cll1221*MZ**2) + 2*cHDD*(2*cHl322*MT**2 + 4*cHQ1*MT**2 - 4*cHQ3*MT**2 - 12*cHt*MT**2 - cll1221*MT**2 - 2*cHl322*MZ**2 - 4*cHQ1*MZ**2 + 4*cHQ3*MZ**2 + cll1221*MZ**2 + 2*cHl311*(MT**2 - MZ**2)))*vevhat**4 + 96*cHWB*(cHDD + 2*cHl311 + 2*cHl322 + 4*cHQ1 - 4*cHQ3 + 4*cHt - cll1221)*cth*ee**2*(2*MT**2 + MZ**2)*sth*vevhat**4 + 4*ee**2*(84*cHl322**2*MT**2 + 240*cHl322*cHQ1*MT**2 + 144*cHQ1**2*MT**2 - 240*cHl322*cHQ3*MT**2 - 288*cHQ1*cHQ3*MT**2 + 144*cHQ3**2*MT**2 - 336*cHl322*cHt*MT**2 - 864*cHQ1*cHt*MT**2 + 864*cHQ3*cHt*MT**2 + 144*cHt**2*MT**2 + 256*cHWB**2*MT**2 - 84*cHl322*cll1221*MT**2 - 120*cHQ1*cll1221*MT**2 + 120*cHQ3*cll1221*MT**2 + 168*cHt*cll1221*MT**2 + 21*cll1221**2*MT**2 - 12*cHl322**2*MZ**2 - 96*cHl322*cHQ1*MZ**2 - 144*cHQ1**2*MZ**2 + 96*cHl322*cHQ3*MZ**2 + 288*cHQ1*cHQ3*MZ**2 - 144*cHQ3**2*MZ**2 + 48*cHl322*cHt*MZ**2 - 144*cHt**2*MZ**2 + 128*cHWB**2*MZ**2 + 12*cHl322*cll1221*MZ**2 + 48*cHQ1*cll1221*MZ**2 - 48*cHQ3*cll1221*MZ**2 - 24*cHt*cll1221*MZ**2 - 3*cll1221**2*MZ**2 + 3*cHDD**2*(7*MT**2 - MZ**2) + 12*cHl311**2*(7*MT**2 - MZ**2) + 12*cHl311*(14*cHl322*MT**2 + 20*cHQ1*MT**2 - 20*cHQ3*MT**2 - 28*cHt*MT**2 - 7*cll1221*MT**2 - 2*cHl322*MZ**2 - 8*cHQ1*MZ**2 + 8*cHQ3*MZ**2 + 4*cHt*MZ**2 + cll1221*MZ**2) + 6*cHDD*(14*cHl311*MT**2 + 14*cHl322*MT**2 + 20*cHQ1*MT**2 - 20*cHQ3*MT**2 - 28*cHt*MT**2 - 7*cll1221*MT**2 - 2*cHl311*MZ**2 - 2*cHl322*MZ**2 - 8*cHQ1*MZ**2 + 8*cHQ3*MZ**2 + 4*cHt*MZ**2 + cll1221*MZ**2))*sth**2*vevhat**4 + 64*cHWB*(cHDD + 2*cHl311 + 2*cHl322 - 12*cHQ1 + 12*cHQ3 - 12*cHt - cll1221)*cth*ee**2*(2*MT**2 + MZ**2)*sth**3*vevhat**4 - 4*ee**2*(68*cHl322**2*MT**2 + 336*cHl322*cHQ1*MT**2 + 144*cHQ1**2*MT**2 - 336*cHl322*cHQ3*MT**2 - 288*cHQ1*cHQ3*MT**2 + 144*cHQ3**2*MT**2 - 240*cHl322*cHt*MT**2 - 864*cHQ1*cHt*MT**2 + 864*cHQ3*cHt*MT**2 + 144*cHt**2*MT**2 + 256*cHWB**2*MT**2 - 68*cHl322*cll1221*MT**2 - 168*cHQ1*cll1221*MT**2 + 168*cHQ3*cll1221*MT**2 + 120*cHt*cll1221*MT**2 + 17*cll1221**2*MT**2 - 20*cHl322**2*MZ**2 - 48*cHl322*cHQ1*MZ**2 - 144*cHQ1**2*MZ**2 + 48*cHl322*cHQ3*MZ**2 + 288*cHQ1*cHQ3*MZ**2 - 144*cHQ3**2*MZ**2 + 96*cHl322*cHt*MZ**2 - 144*cHt**2*MZ**2 + 128*cHWB**2*MZ**2 + 20*cHl322*cll1221*MZ**2 + 24*cHQ1*cll1221*MZ**2 - 24*cHQ3*cll1221*MZ**2 - 48*cHt*cll1221*MZ**2 - 5*cll1221**2*MZ**2 + cHDD**2*(17*MT**2 - 5*MZ**2) + 4*cHl311**2*(17*MT**2 - 5*MZ**2) + 4*cHl311*(34*cHl322*MT**2 + 84*cHQ1*MT**2 - 84*cHQ3*MT**2 - 60*cHt*MT**2 - 17*cll1221*MT**2 - 10*cHl322*MZ**2 - 12*cHQ1*MZ**2 + 12*cHQ3*MZ**2 + 24*cHt*MZ**2 + 5*cll1221*MZ**2) + 2*cHDD*(34*cHl311*MT**2 + 34*cHl322*MT**2 + 84*cHQ1*MT**2 - 84*cHQ3*MT**2 - 60*cHt*MT**2 - 17*cll1221*MT**2 - 10*cHl311*MZ**2 - 10*cHl322*MZ**2 - 12*cHQ1*MZ**2 + 12*cHQ3*MZ**2 + 24*cHt*MZ**2 + 5*cll1221*MZ**2))*sth**4*vevhat**4 - 1728*ctWRe*ee*LambdaSMEFT**2*MT*MZ**2*sth*vevhat*cmath.sqrt(2) + 1728*ctBRe*cth*ee*LambdaSMEFT**2*MT*MZ**2*sth**2*vevhat*cmath.sqrt(2) + 13248*ctWRe*ee*LambdaSMEFT**2*MT*MZ**2*sth**3*vevhat*cmath.sqrt(2) - 11520*ctBRe*cth*ee*LambdaSMEFT**2*MT*MZ**2*sth**4*vevhat*cmath.sqrt(2) - 36864*ctWRe*ee*LambdaSMEFT**2*MT*MZ**2*sth**5*vevhat*cmath.sqrt(2) + 25344*ctBRe*cth*ee*LambdaSMEFT**2*MT*MZ**2*sth**6*vevhat*cmath.sqrt(2) + 43776*ctWRe*ee*LambdaSMEFT**2*MT*MZ**2*sth**7*vevhat*cmath.sqrt(2) - 18432*ctBRe*cth*ee*LambdaSMEFT**2*MT*MZ**2*sth**8*vevhat*cmath.sqrt(2) - 18432*ctWRe*ee*LambdaSMEFT**2*MT*MZ**2*sth**9*vevhat*cmath.sqrt(2) + 432*(cHDD + 2*cHl311 + 2*cHl322 + 4*cHQ1 - 4*cHQ3 + 4*cHt - cll1221)*ctWRe*ee*MT*MZ**2*sth*vevhat**3*cmath.sqrt(2) - 144*cth*(3*cHDD*ctBRe + 6*cHl311*ctBRe + 6*cHl322*ctBRe + 12*cHQ1*ctBRe - 12*cHQ3*ctBRe + 12*cHt*ctBRe - 3*cll1221*ctBRe - 32*cHWB*ctWRe)*ee*MT*MZ**2*sth**2*vevhat**3*cmath.sqrt(2) - 144*(32*cHWB*ctBRe + (7*cHDD + 14*cHl311 + 14*cHl322 + 60*cHQ1 - 60*cHQ3 + 60*cHt - 7*cll1221)*ctWRe)*ee*MT*MZ**2*sth**3*vevhat**3*cmath.sqrt(2) + 576*cth*(cHDD*ctBRe + 2*cHl311*ctBRe + 2*cHl322*ctBRe + 12*cHQ1*ctBRe - 12*cHQ3*ctBRe + 12*cHt*ctBRe - cll1221*ctBRe - 24*cHWB*ctWRe)*ee*MT*MZ**2*sth**4*vevhat**3*cmath.sqrt(2) + 13824*(cHWB*ctBRe + (cHQ1 - cHQ3 + cHt)*ctWRe)*ee*MT*MZ**2*sth**5*vevhat**3*cmath.sqrt(2) + 576*cth*(cHDD*ctBRe + 2*cHl311*ctBRe + 2*cHl322*ctBRe - 12*cHQ1*ctBRe + 12*cHQ3*ctBRe - 12*cHt*ctBRe - cll1221*ctBRe + 16*cHWB*ctWRe)*ee*MT*MZ**2*sth**6*vevhat**3*cmath.sqrt(2) - 576*(16*cHWB*ctBRe + (-cHDD - 2*cHl311 - 2*cHl322 + 12*cHQ1 - 12*cHQ3 + 12*cHt + cll1221)*ctWRe)*ee*MT*MZ**2*sth**7*vevhat**3*cmath.sqrt(2))*cmath.sqrt(-4*MT**2 + MZ**2))/(4608.*cth**2*cmath.pi*LambdaSMEFT**4*MZ**2*sth**2*(1 - 2*sth**2)**2)',
(P.u,P.u__tilde__):'((-144*ee**2*LambdaSMEFT**4*MU**2 + 144*ee**2*LambdaSMEFT**4*MZ**2 - 192*ee**2*LambdaSMEFT**4*(MU**2 + 5*MZ**2)*sth**2 + 64*ee**2*LambdaSMEFT**4*(55*MU**2 + 41*MZ**2)*sth**4 - 3584*ee**2*LambdaSMEFT**4*(2*MU**2 + MZ**2)*sth**6 + 2048*ee**2*LambdaSMEFT**4*(2*MU**2 + MZ**2)*sth**8 + 72*ee**2*LambdaSMEFT**2*(-4*cHj3*MU**2 + 2*cHl311*MU**2 + 2*cHl322*MU**2 - 12*cHu*MU**2 - cll1221*MU**2 + 4*cHj3*MZ**2 - 2*cHl311*MZ**2 - 2*cHl322*MZ**2 + cll1221*MZ**2 + cHDD*(MU**2 - MZ**2) + 4*cHj1*(MU**2 - MZ**2))*vevhat**2 - 384*cHWB*cth*ee**2*LambdaSMEFT**2*(2*MU**2 + MZ**2)*sth*vevhat**2 - 9*ee**2*(16*cHj3**2*MU**2 - 16*cHj3*cHl311*MU**2 + 4*cHl311**2*MU**2 - 16*cHj3*cHl322*MU**2 + 8*cHl311*cHl322*MU**2 + 4*cHl322**2*MU**2 + 96*cHj3*cHu*MU**2 - 48*cHl311*cHu*MU**2 - 48*cHl322*cHu*MU**2 + 16*cHu**2*MU**2 + 8*cHj3*cll1221*MU**2 - 4*cHl311*cll1221*MU**2 - 4*cHl322*cll1221*MU**2 + 24*cHu*cll1221*MU**2 + cll1221**2*MU**2 - 16*cHj3**2*MZ**2 + 16*cHj3*cHl311*MZ**2 - 4*cHl311**2*MZ**2 + 16*cHj3*cHl322*MZ**2 - 8*cHl311*cHl322*MZ**2 - 4*cHl322**2*MZ**2 - 16*cHu**2*MZ**2 - 8*cHj3*cll1221*MZ**2 + 4*cHl311*cll1221*MZ**2 + 4*cHl322*cll1221*MZ**2 - cll1221**2*MZ**2 + cHDD**2*(MU**2 - MZ**2) + 16*cHj1**2*(MU**2 - MZ**2) - 8*cHj1*(4*cHj3*MU**2 - 2*cHl311*MU**2 - 2*cHl322*MU**2 + 12*cHu*MU**2 + cll1221*MU**2 - 4*cHj3*MZ**2 + 2*cHl311*MZ**2 + 2*cHl322*MZ**2 - cll1221*MZ**2) + 2*cHDD*(-4*cHj3*MU**2 + 2*cHl311*MU**2 + 2*cHl322*MU**2 - 12*cHu*MU**2 - cll1221*MU**2 + 4*cHj3*MZ**2 - 2*cHl311*MZ**2 - 2*cHl322*MZ**2 + cll1221*MZ**2 + 4*cHj1*(MU**2 - MZ**2)))*vevhat**4 + 96*cHWB*(cHDD + 4*cHj1 - 4*cHj3 + 2*cHl311 + 2*cHl322 + 4*cHu - cll1221)*cth*ee**2*(2*MU**2 + MZ**2)*sth*vevhat**4 + 4*ee**2*(144*cHj3**2*MU**2 - 240*cHj3*cHl311*MU**2 + 84*cHl311**2*MU**2 - 240*cHj3*cHl322*MU**2 + 168*cHl311*cHl322*MU**2 + 84*cHl322**2*MU**2 + 864*cHj3*cHu*MU**2 - 336*cHl311*cHu*MU**2 - 336*cHl322*cHu*MU**2 + 144*cHu**2*MU**2 + 256*cHWB**2*MU**2 + 120*cHj3*cll1221*MU**2 - 84*cHl311*cll1221*MU**2 - 84*cHl322*cll1221*MU**2 + 168*cHu*cll1221*MU**2 + 21*cll1221**2*MU**2 - 144*cHj3**2*MZ**2 + 96*cHj3*cHl311*MZ**2 - 12*cHl311**2*MZ**2 + 96*cHj3*cHl322*MZ**2 - 24*cHl311*cHl322*MZ**2 - 12*cHl322**2*MZ**2 + 48*cHl311*cHu*MZ**2 + 48*cHl322*cHu*MZ**2 - 144*cHu**2*MZ**2 + 128*cHWB**2*MZ**2 - 48*cHj3*cll1221*MZ**2 + 12*cHl311*cll1221*MZ**2 + 12*cHl322*cll1221*MZ**2 - 24*cHu*cll1221*MZ**2 - 3*cll1221**2*MZ**2 + 144*cHj1**2*(MU**2 - MZ**2) + 3*cHDD**2*(7*MU**2 - MZ**2) - 24*cHj1*(-10*cHl311*MU**2 - 10*cHl322*MU**2 + 36*cHu*MU**2 + 5*cll1221*MU**2 + 4*cHl311*MZ**2 + 4*cHl322*MZ**2 - 2*cll1221*MZ**2 + 12*cHj3*(MU**2 - MZ**2)) + 6*cHDD*(4*cHj1*(5*MU**2 - 2*MZ**2) + (2*cHl311 + 2*cHl322 - 4*cHu - cll1221)*(7*MU**2 - MZ**2) + cHj3*(-20*MU**2 + 8*MZ**2)))*sth**2*vevhat**4 + 64*cHWB*(cHDD - 12*cHj1 + 12*cHj3 + 2*cHl311 + 2*cHl322 - 12*cHu - cll1221)*cth*ee**2*(2*MU**2 + MZ**2)*sth**3*vevhat**4 - 4*ee**2*(144*cHj3**2*MU**2 - 336*cHj3*cHl311*MU**2 + 68*cHl311**2*MU**2 - 336*cHj3*cHl322*MU**2 + 136*cHl311*cHl322*MU**2 + 68*cHl322**2*MU**2 + 864*cHj3*cHu*MU**2 - 240*cHl311*cHu*MU**2 - 240*cHl322*cHu*MU**2 + 144*cHu**2*MU**2 + 256*cHWB**2*MU**2 + 168*cHj3*cll1221*MU**2 - 68*cHl311*cll1221*MU**2 - 68*cHl322*cll1221*MU**2 + 120*cHu*cll1221*MU**2 + 17*cll1221**2*MU**2 - 144*cHj3**2*MZ**2 + 48*cHj3*cHl311*MZ**2 - 20*cHl311**2*MZ**2 + 48*cHj3*cHl322*MZ**2 - 40*cHl311*cHl322*MZ**2 - 20*cHl322**2*MZ**2 + 96*cHl311*cHu*MZ**2 + 96*cHl322*cHu*MZ**2 - 144*cHu**2*MZ**2 + 128*cHWB**2*MZ**2 - 24*cHj3*cll1221*MZ**2 + 20*cHl311*cll1221*MZ**2 + 20*cHl322*cll1221*MZ**2 - 48*cHu*cll1221*MZ**2 - 5*cll1221**2*MZ**2 + cHDD**2*(17*MU**2 - 5*MZ**2) + 144*cHj1**2*(MU**2 - MZ**2) + 2*cHDD*(84*cHj1*MU**2 - 84*cHj3*MU**2 + 34*cHl311*MU**2 + 34*cHl322*MU**2 - 60*cHu*MU**2 - 17*cll1221*MU**2 - 12*cHj1*MZ**2 + 12*cHj3*MZ**2 - 10*cHl311*MZ**2 - 10*cHl322*MZ**2 + 24*cHu*MZ**2 + 5*cll1221*MZ**2) + 24*cHj1*(14*cHl311*MU**2 + 14*cHl322*MU**2 - 36*cHu*MU**2 - 7*cll1221*MU**2 - 2*cHl311*MZ**2 - 2*cHl322*MZ**2 + cll1221*MZ**2 - 12*cHj3*(MU**2 - MZ**2)))*sth**4*vevhat**4 - 18432*cth*MZ**2*(cuBIm*cuWIm*(-4*MU**2 + MZ**2) + cuBRe*cuWRe*(8*MU**2 + MZ**2))*sth**7*vevhat**2*yup**2 + 4608*MZ**2*(3*cuWIm**2*(4*MU**2 - MZ**2) + 2*cuBRe**2*(8*MU**2 + MZ**2) - 3*cuWRe**2*(8*MU**2 + MZ**2) + cuBIm**2*(-8*MU**2 + 2*MZ**2))*sth**8*vevhat**2*yup**2 + 9216*cth*MZ**2*(cuBIm*cuWIm*(-4*MU**2 + MZ**2) + cuBRe*cuWRe*(8*MU**2 + MZ**2))*sth**9*vevhat**2*yup**2 + 4608*MZ**2*(-4*cuWIm**2*MU**2 + 8*cuWRe**2*MU**2 + cuWIm**2*MZ**2 + cuWRe**2*MZ**2 + cuBIm**2*(4*MU**2 - MZ**2) - cuBRe**2*(8*MU**2 + MZ**2))*sth**10*vevhat**2*yup**2 + 32*sth**4*vevhat**2*(60*cHj3*ee**2*LambdaSMEFT**2*MU**2 + 26*cHl311*ee**2*LambdaSMEFT**2*MU**2 + 26*cHl322*ee**2*LambdaSMEFT**2*MU**2 - 204*cHu*ee**2*LambdaSMEFT**2*MU**2 - 13*cll1221*ee**2*LambdaSMEFT**2*MU**2 + 84*cHj3*ee**2*LambdaSMEFT**2*MZ**2 - 14*cHl311*ee**2*LambdaSMEFT**2*MZ**2 - 14*cHl322*ee**2*LambdaSMEFT**2*MZ**2 - 48*cHu*ee**2*LambdaSMEFT**2*MZ**2 + 7*cll1221*ee**2*LambdaSMEFT**2*MZ**2 + cHDD*ee**2*LambdaSMEFT**2*(13*MU**2 - 7*MZ**2) - 12*cHj1*ee**2*LambdaSMEFT**2*(5*MU**2 + 7*MZ**2) - 144*cuBIm**2*MU**2*MZ**2*yup**2 + 288*cuBRe**2*MU**2*MZ**2*yup**2 + 864*cuWIm**2*MU**2*MZ**2*yup**2 - 1728*cuWRe**2*MU**2*MZ**2*yup**2 + 36*cuBIm**2*MZ**4*yup**2 + 36*cuBRe**2*MZ**4*yup**2 - 216*cuWIm**2*MZ**4*yup**2 - 216*cuWRe**2*MZ**4*yup**2) - 128*sth**6*vevhat**2*(24*cHj3*ee**2*LambdaSMEFT**2*MU**2 + 4*cHl311*ee**2*LambdaSMEFT**2*MU**2 + 4*cHl322*ee**2*LambdaSMEFT**2*MU**2 - 24*cHu*ee**2*LambdaSMEFT**2*MU**2 - 2*cll1221*ee**2*LambdaSMEFT**2*MU**2 + 12*cHj3*ee**2*LambdaSMEFT**2*MZ**2 + 2*cHl311*ee**2*LambdaSMEFT**2*MZ**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MZ**2 - 12*cHu*ee**2*LambdaSMEFT**2*MZ**2 - cll1221*ee**2*LambdaSMEFT**2*MZ**2 + cHDD*ee**2*LambdaSMEFT**2*(2*MU**2 + MZ**2) - 12*cHj1*ee**2*LambdaSMEFT**2*(2*MU**2 + MZ**2) - 180*cuBIm**2*MU**2*MZ**2*yup**2 + 360*cuBRe**2*MU**2*MZ**2*yup**2 + 468*cuWIm**2*MU**2*MZ**2*yup**2 - 936*cuWRe**2*MU**2*MZ**2*yup**2 + 45*cuBIm**2*MZ**4*yup**2 + 45*cuBRe**2*MZ**4*yup**2 - 117*cuWIm**2*MZ**4*yup**2 - 117*cuWRe**2*MZ**4*yup**2) - 96*sth**2*vevhat**2*(-4*cHj3*ee**2*LambdaSMEFT**2*MU**2 + 6*cHl311*ee**2*LambdaSMEFT**2*MU**2 + 6*cHl322*ee**2*LambdaSMEFT**2*MU**2 - 44*cHu*ee**2*LambdaSMEFT**2*MU**2 - 3*cll1221*ee**2*LambdaSMEFT**2*MU**2 + 16*cHj3*ee**2*LambdaSMEFT**2*MZ**2 - 6*cHl311*ee**2*LambdaSMEFT**2*MZ**2 - 6*cHl322*ee**2*LambdaSMEFT**2*MZ**2 - 4*cHu*ee**2*LambdaSMEFT**2*MZ**2 + 3*cll1221*ee**2*LambdaSMEFT**2*MZ**2 + 4*cHj1*ee**2*LambdaSMEFT**2*(MU**2 - 4*MZ**2) + 3*cHDD*ee**2*LambdaSMEFT**2*(MU**2 - MZ**2) + 48*cuWIm**2*MU**2*MZ**2*yup**2 - 96*cuWRe**2*MU**2*MZ**2*yup**2 - 12*cuWIm**2*MZ**4*yup**2 - 12*cuWRe**2*MZ**4*yup**2) - 256*cth*sth**5*vevhat**2*(8*cHWB*ee**2*LambdaSMEFT**2*(2*MU**2 + MZ**2) - 45*MZ**2*(cuBIm*cuWIm*(-4*MU**2 + MZ**2) + cuBRe*cuWRe*(8*MU**2 + MZ**2))*yup**2) + 256*cth*sth**3*vevhat**2*(7*cHWB*ee**2*LambdaSMEFT**2*(2*MU**2 + MZ**2) - 9*MZ**2*(cuBIm*cuWIm*(-4*MU**2 + MZ**2) + cuBRe*cuWRe*(8*MU**2 + MZ**2))*yup**2) - 1728*cuWRe*ee*LambdaSMEFT**2*MU*MZ**2*sth*vevhat*yup*cmath.sqrt(2) + 1728*cth*cuBRe*ee*LambdaSMEFT**2*MU*MZ**2*sth**2*vevhat*yup*cmath.sqrt(2) + 13248*cuWRe*ee*LambdaSMEFT**2*MU*MZ**2*sth**3*vevhat*yup*cmath.sqrt(2) - 11520*cth*cuBRe*ee*LambdaSMEFT**2*MU*MZ**2*sth**4*vevhat*yup*cmath.sqrt(2) - 36864*cuWRe*ee*LambdaSMEFT**2*MU*MZ**2*sth**5*vevhat*yup*cmath.sqrt(2) + 25344*cth*cuBRe*ee*LambdaSMEFT**2*MU*MZ**2*sth**6*vevhat*yup*cmath.sqrt(2) + 43776*cuWRe*ee*LambdaSMEFT**2*MU*MZ**2*sth**7*vevhat*yup*cmath.sqrt(2) - 18432*cth*cuBRe*ee*LambdaSMEFT**2*MU*MZ**2*sth**8*vevhat*yup*cmath.sqrt(2) - 18432*cuWRe*ee*LambdaSMEFT**2*MU*MZ**2*sth**9*vevhat*yup*cmath.sqrt(2) + 432*(cHDD + 4*cHj1 - 4*cHj3 + 2*cHl311 + 2*cHl322 + 4*cHu - cll1221)*cuWRe*ee*MU*MZ**2*sth*vevhat**3*yup*cmath.sqrt(2) - 144*cth*(3*cHDD*cuBRe + 12*cHj1*cuBRe - 12*cHj3*cuBRe + 6*cHl311*cuBRe + 6*cHl322*cuBRe + 12*cHu*cuBRe - 3*cll1221*cuBRe - 32*cHWB*cuWRe)*ee*MU*MZ**2*sth**2*vevhat**3*yup*cmath.sqrt(2) - 144*(32*cHWB*cuBRe + (7*cHDD + 60*cHj1 - 60*cHj3 + 14*cHl311 + 14*cHl322 + 60*cHu - 7*cll1221)*cuWRe)*ee*MU*MZ**2*sth**3*vevhat**3*yup*cmath.sqrt(2) + 576*cth*(cHDD*cuBRe + 12*cHj1*cuBRe - 12*cHj3*cuBRe + 2*cHl311*cuBRe + 2*cHl322*cuBRe + 12*cHu*cuBRe - cll1221*cuBRe - 24*cHWB*cuWRe)*ee*MU*MZ**2*sth**4*vevhat**3*yup*cmath.sqrt(2) + 13824*(cHWB*cuBRe + (cHj1 - cHj3 + cHu)*cuWRe)*ee*MU*MZ**2*sth**5*vevhat**3*yup*cmath.sqrt(2) + 576*cth*(cHDD*cuBRe - 12*cHj1*cuBRe + 12*cHj3*cuBRe + 2*cHl311*cuBRe + 2*cHl322*cuBRe - 12*cHu*cuBRe - cll1221*cuBRe + 16*cHWB*cuWRe)*ee*MU*MZ**2*sth**6*vevhat**3*yup*cmath.sqrt(2) - 576*(16*cHWB*cuBRe + (-cHDD + 12*cHj1 - 12*cHj3 - 2*cHl311 - 2*cHl322 + 12*cHu + cll1221)*cuWRe)*ee*MU*MZ**2*sth**7*vevhat**3*yup*cmath.sqrt(2))*cmath.sqrt(-4*MU**2 + MZ**2))/(4608.*cth**2*cmath.pi*LambdaSMEFT**4*MZ**2*sth**2*(1 - 2*sth**2)**2)',
(P.ve,P.ve__tilde__):'(MZ*(4*ee*LambdaSMEFT**2 + (-cHDD - 4*cHl111 + 2*cHl311 - 2*cHl322 + cll1221)*ee*vevhat**2)**2)/(1536.*cth**2*cmath.pi*LambdaSMEFT**4*sth**2)',
(P.vm,P.vm__tilde__):'(MZ*(4*ee*LambdaSMEFT**2 + (-cHDD - 4*cHl122 - 2*cHl311 + 2*cHl322 + cll1221)*ee*vevhat**2)**2)/(1536.*cth**2*cmath.pi*LambdaSMEFT**4*sth**2)',
(P.vt,P.vt__tilde__):'(MZ*(-4*ee*LambdaSMEFT**2 + (cHDD + 4*cHl133 + 2*cHl311 + 2*cHl322 - 4*cHl333 - cll1221)*ee*vevhat**2)**2)/(1536.*cth**2*cmath.pi*LambdaSMEFT**4*sth**2)',
(P.W__minus__,P.W__plus__):'-((768*ee**2*LambdaSMEFT**4*MW**6 + 1088*ee**2*LambdaSMEFT**4*MW**4*MZ**2 - 256*ee**2*LambdaSMEFT**4*MW**2*MZ**4 - 16*ee**2*LambdaSMEFT**4*MZ**6 - 1920*cW*ee*LambdaSMEFT**2*MW**2*MZ**4*(4*MW**2 - MZ**2)*sth - 16*(5*ee**2*LambdaSMEFT**4*(48*MW**6 + 68*MW**4*MZ**2 - 16*MW**2*MZ**4 - MZ**6) - 8*MW**2*MZ**2*(9*cW**2*(8*MW**6 - 6*MW**4*MZ**2 + 9*MW**2*MZ**4 - 2*MZ**6) - cWtil**2*(36*MW**6 + 6*MW**4*MZ**2 - 11*MW**2*MZ**4 + 2*MZ**6)))*sth**2 + 9600*cW*ee*LambdaSMEFT**2*MW**2*MZ**4*(4*MW**2 - MZ**2)*sth**3 + 128*(ee**2*LambdaSMEFT**4*(48*MW**6 + 68*MW**4*MZ**2 - 16*MW**2*MZ**4 - MZ**6) - 5*MW**2*MZ**2*(9*cW**2*(8*MW**6 - 6*MW**4*MZ**2 + 9*MW**2*MZ**4 - 2*MZ**6) - cWtil**2*(36*MW**6 + 6*MW**4*MZ**2 - 11*MW**2*MZ**4 + 2*MZ**6)))*sth**4 - 15360*cW*ee*LambdaSMEFT**2*MW**2*MZ**4*(4*MW**2 - MZ**2)*sth**5 - 64*(ee**2*LambdaSMEFT**4*(48*MW**6 + 68*MW**4*MZ**2 - 16*MW**2*MZ**4 - MZ**6) - 16*MW**2*MZ**2*(9*cW**2*(8*MW**6 - 6*MW**4*MZ**2 + 9*MW**2*MZ**4 - 2*MZ**6) - cWtil**2*(36*MW**6 + 6*MW**4*MZ**2 - 11*MW**2*MZ**4 + 2*MZ**6)))*sth**6 + 7680*cW*ee*LambdaSMEFT**2*MW**2*MZ**4*(4*MW**2 - MZ**2)*sth**7 - 512*MW**2*MZ**2*(9*cW**2*(8*MW**6 - 6*MW**4*MZ**2 + 9*MW**2*MZ**4 - 2*MZ**6) - cWtil**2*(36*MW**6 + 6*MW**4*MZ**2 - 11*MW**2*MZ**4 + 2*MZ**6))*sth**8 - 8*(cHDD + 2*cHl311 + 2*cHl322 - cll1221)*ee**2*LambdaSMEFT**2*(48*MW**6 + 68*MW**4*MZ**2 - 16*MW**2*MZ**4 - MZ**6)*vevhat**2 + 480*(cHDD + 2*cHl311 + 2*cHl322 - cll1221)*cW*ee*MW**2*MZ**4*(4*MW**2 - MZ**2)*sth*vevhat**2 - 64*cHWB*cth*ee**2*LambdaSMEFT**2*(24*MW**6 + 54*MW**4*MZ**2 - 11*MW**2*MZ**4 - MZ**6)*sth*vevhat**2 + 24*(cHDD + 2*cHl311 + 2*cHl322 - cll1221)*ee**2*LambdaSMEFT**2*(48*MW**6 + 68*MW**4*MZ**2 - 16*MW**2*MZ**4 - MZ**6)*sth**2*vevhat**2 - 256*cth*ee*MW**2*MZ**2*(-(cHWBtil*cWtil*(6*MW**4 - 4*MW**2*MZ**2 + MZ**4)) + 3*cHWB*cW*(4*MW**4 - 17*MW**2*MZ**2 + 4*MZ**4))*sth**2*vevhat**2 - 1440*(cHDD + 2*cHl311 + 2*cHl322 - cll1221)*cW*ee*MW**2*MZ**4*(4*MW**2 - MZ**2)*sth**3*vevhat**2 + 64*cHWB*cth*ee**2*LambdaSMEFT**2*(48*MW**6 + 148*MW**4*MZ**2 - 28*MW**2*MZ**4 - 3*MZ**6)*sth**3*vevhat**2 - 16*(cHDD + 2*cHl311 + 2*cHl322 - cll1221)*ee**2*LambdaSMEFT**2*(48*MW**6 + 68*MW**4*MZ**2 - 16*MW**2*MZ**4 - MZ**6)*sth**4*vevhat**2 + 256*cth*ee*MW**2*MZ**2*(-4*cHWBtil*cWtil*(6*MW**4 - 4*MW**2*MZ**2 + MZ**4) + 3*cHWB*cW*(16*MW**4 - 48*MW**2*MZ**2 + 11*MZ**4))*sth**4*vevhat**2 + 960*(cHDD + 2*cHl311 + 2*cHl322 - cll1221)*cW*ee*MW**2*MZ**4*(4*MW**2 - MZ**2)*sth**5*vevhat**2 + 128*cHWB*cth*ee**2*LambdaSMEFT**2*MZ**2*(-40*MW**4 + 6*MW**2*MZ**2 + MZ**4)*sth**5*vevhat**2 - 512*cth*ee*MW**2*MZ**2*(-2*cHWBtil*cWtil*(6*MW**4 - 4*MW**2*MZ**2 + MZ**4) + 3*cHWB*cW*(8*MW**4 - 14*MW**2*MZ**2 + 3*MZ**4))*sth**6*vevhat**2 + (cHDD + 2*cHl311 + 2*cHl322 - cll1221)**2*ee**2*(48*MW**6 + 68*MW**4*MZ**2 - 16*MW**2*MZ**4 - MZ**6)*vevhat**4 + 16*cHWB*(cHDD + 2*cHl311 + 2*cHl322 - cll1221)*cth*ee**2*(24*MW**6 + 54*MW**4*MZ**2 - 11*MW**2*MZ**4 - MZ**6)*sth*vevhat**4 + ee**2*(-192*cHl322**2*MW**6 + 768*cHWB**2*MW**6 + 192*cHl322*cll1221*MW**6 - 48*cll1221**2*MW**6 - 272*cHl322**2*MW**4*MZ**2 + 2624*cHWB**2*MW**4*MZ**2 - 128*cHWBtil**2*MW**4*MZ**2 + 272*cHl322*cll1221*MW**4*MZ**2 - 68*cll1221**2*MW**4*MZ**2 + 64*cHl322**2*MW**2*MZ**4 - 448*cHWB**2*MW**2*MZ**4 - 64*cHWBtil**2*MW**2*MZ**4 - 64*cHl322*cll1221*MW**2*MZ**4 + 16*cll1221**2*MW**2*MZ**4 + 4*cHl322**2*MZ**6 - 64*cHWB**2*MZ**6 - 4*cHl322*cll1221*MZ**6 + cll1221**2*MZ**6 - 4*cHl311**2*(48*MW**6 + 68*MW**4*MZ**2 - 16*MW**2*MZ**4 - MZ**6) - 4*cHl311*(2*cHl322 - cll1221)*(48*MW**6 + 68*MW**4*MZ**2 - 16*MW**2*MZ**4 - MZ**6) - 2*cHDD*(2*cHl311 + 2*cHl322 - cll1221)*(48*MW**6 + 68*MW**4*MZ**2 - 16*MW**2*MZ**4 - MZ**6) + cHDD**2*(-48*MW**6 - 68*MW**4*MZ**2 + 16*MW**2*MZ**4 + MZ**6))*sth**2*vevhat**4 + 16*cHWB*(cHDD + 2*cHl311 + 2*cHl322 - cll1221)*cth*ee**2*MZ**2*(-40*MW**4 + 6*MW**2*MZ**2 + MZ**4)*sth**3*vevhat**4 + 128*ee**2*MZ**2*(2*cHWBtil**2*MW**2*(2*MW**2 + MZ**2) + cHWB**2*(-28*MW**4 + 3*MW**2*MZ**2 + MZ**4))*sth**4*vevhat**4 - 64*ee**2*MZ**2*(4*cHWBtil**2*MW**2*(2*MW**2 + MZ**2) + cHWB**2*(-16*MW**4 + MZ**4))*sth**6*vevhat**4)*cmath.sqrt(-4*MW**2 + MZ**2))/(3072.*cmath.pi*LambdaSMEFT**4*MW**4*MZ**2*sth**2*(1 - 2*sth**2)**2)'})
| 2,938.202247
| 18,079
| 0.578788
| 60,507
| 261,500
| 2.496951
| 0.004512
| 0.056591
| 0.055016
| 0.085681
| 0.947797
| 0.935962
| 0.911459
| 0.890795
| 0.866021
| 0.833793
| 0
| 0.218462
| 0.091178
| 261,500
| 88
| 18,080
| 2,971.590909
| 0.417257
| 0.000585
| 0
| 0
| 0
| 0.623188
| 0.986688
| 0.560587
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.028986
| 0
| 0.028986
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
7b006541c633ec057a7eecb1f7a1bb7445d761b3
| 3,352
|
py
|
Python
|
qa327_test/frontend/test_other.py
|
15vrs/cmpe-327
|
aeb1de94604eb0d151ffdd9431b4149f2ec30f9c
|
[
"MIT"
] | null | null | null |
qa327_test/frontend/test_other.py
|
15vrs/cmpe-327
|
aeb1de94604eb0d151ffdd9431b4149f2ec30f9c
|
[
"MIT"
] | null | null | null |
qa327_test/frontend/test_other.py
|
15vrs/cmpe-327
|
aeb1de94604eb0d151ffdd9431b4149f2ec30f9c
|
[
"MIT"
] | null | null | null |
import pytest
from seleniumbase import BaseCase
from qa327_test.conftest import base_url
from unittest.mock import patch
from qa327.models import db, User
from werkzeug.security import generate_password_hash, check_password_hash
# Mock a sample user
test_user = User(
email='test_frontend@test.com',
name='test_frontend',
password=generate_password_hash('Password!'),
balance=0
)
# Sample user with an unhashed password
test_user_unhashed = User(
email='test_frontend@test.com',
name='test_frontend',
password='Password!',
)
class OtherTest(BaseCase):
def test_R8_1a(self, *_):
"""
When not logged in return a 404 error for any other requests except the authorized ones
"""
# Navigate to /logout (Invalidate any logged-in sessions)
self.open(base_url + '/logout')
# Navigate to /badrequest
self.open(base_url + '/badrequest')
# Verify current page displays 404 error message by checking title
self.assert_title("404 Not Found")
@patch('qa327.backend.get_user', return_value=test_user)
def test_R8_1b(self, *_):
"""
When logged in return a 404 error for any other requests except the authorized ones
"""
# Navigate to /logout (Invalidate any logged-in sessions)
self.open(base_url + '/logout')
# Navigate to /login
self.open(base_url + '/login')
# Enter `test_user.email` in `#email` element
self.type("#email", test_user_unhashed.email)
# Enter `test_user.password` in `#password` element
self.type("#password", test_user_unhashed.password)
# Click on `#btn-submit` element
self.click('input[type="submit"]')
# Navigate to /badrequest
self.open(base_url + '/badrequest')
# Verify current page displays 404 error message by checking title
self.assert_title("404 Not Found")
def test_R8_1c(self, *_):
"""
When not logged in ensure a 404 error is not sent for the authorized requests
"""
# Navigate to /logout (Invalidate any logged-in sessions)
self.open(base_url + '/logout')
# Navigate to /login
self.open(base_url + '/login')
# Verify current page does not display the 404 error message by checking content of `#message`
self.assert_element("#message")
self.assert_text("", "#message")
@patch('qa327.backend.get_user', return_value=test_user)
def test_R8_1d(self, *_):
"""
When logged in ensure a 404 error is not sent for the authorized requests
"""
# Navigate to /logout
self.open(base_url + '/logout')
# Navigate to /login
self.open(base_url + '/login')
# Enter `test_user.email` in `#email` element
self.type("#email", test_user_unhashed.email)
# Enter `test_user.password` in `#password` element
self.type("#password", test_user_unhashed.password)
# Click on `#btn-submit` element
self.click('input[type="submit"]')
# Navigate to /
self.open(base_url + '/')
# Verify current page does not display the 404 error message by checking content of `#welcome-header`
self.assert_element("#welcome-header")
self.assert_text("Hi test_frontend", "#welcome-header")
| 38.090909
| 109
| 0.649463
| 430
| 3,352
| 4.918605
| 0.225581
| 0.04539
| 0.056738
| 0.070922
| 0.752719
| 0.742317
| 0.742317
| 0.742317
| 0.742317
| 0.742317
| 0
| 0.02015
| 0.244928
| 3,352
| 88
| 110
| 38.090909
| 0.815488
| 0.373807
| 0
| 0.5
| 1
| 0
| 0.181496
| 0.044489
| 0
| 0
| 0
| 0
| 0.130435
| 1
| 0.086957
| false
| 0.108696
| 0.130435
| 0
| 0.23913
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
7b1d997a8d045f7d063f222ade6bba70f4f1c95a
| 17
|
py
|
Python
|
WEEKS/CD_Sata-Structures/_RESOURCES/course-work/Python-Brain-Teasers/mul.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | null | null | null |
WEEKS/CD_Sata-Structures/_RESOURCES/course-work/Python-Brain-Teasers/mul.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | null | null | null |
WEEKS/CD_Sata-Structures/_RESOURCES/course-work/Python-Brain-Teasers/mul.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | null | null | null |
print(1.1 * 1.1)
| 8.5
| 16
| 0.529412
| 5
| 17
| 1.8
| 0.4
| 0.666667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.285714
| 0.176471
| 17
| 1
| 17
| 17
| 0.357143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 10
|
9ea0197cc94896e36117b312c3d352884b459e72
| 143
|
py
|
Python
|
INDICATORS/MACD.py
|
seyedalirahimi/BorsAnalysis
|
c6bd611ba0a8d29fd5a3437898810fdc0a380945
|
[
"MIT"
] | null | null | null |
INDICATORS/MACD.py
|
seyedalirahimi/BorsAnalysis
|
c6bd611ba0a8d29fd5a3437898810fdc0a380945
|
[
"MIT"
] | null | null | null |
INDICATORS/MACD.py
|
seyedalirahimi/BorsAnalysis
|
c6bd611ba0a8d29fd5a3437898810fdc0a380945
|
[
"MIT"
] | null | null | null |
from ta.trend import MACD
def macd(data, n_fast=26, n_slow=12, n_sign=9, fillna=True):
return MACD(data, n_fast, n_slow, n_sign, fillna)
| 23.833333
| 60
| 0.72028
| 29
| 143
| 3.344828
| 0.586207
| 0.164948
| 0.185567
| 0.268041
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041322
| 0.153846
| 143
| 5
| 61
| 28.6
| 0.760331
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
7b8dd3814b69109598e23aadcac9cdac410565ff
| 16,369
|
py
|
Python
|
table_3.py
|
zilunpeng/svrg_for_policy_evaluation_with_fewer_gradients
|
917d2241fd50eef9b91908a9ab033d97626a7fd8
|
[
"MIT"
] | 3
|
2020-10-05T08:15:18.000Z
|
2021-05-03T04:11:35.000Z
|
table_3.py
|
zilunpeng/svrg_for_policy_evaluation_with_fewer_gradients
|
917d2241fd50eef9b91908a9ab033d97626a7fd8
|
[
"MIT"
] | 1
|
2020-10-06T10:22:02.000Z
|
2020-12-08T04:13:06.000Z
|
table_3.py
|
zilunpeng/svrg_for_policy_evaluation_with_fewer_gradients
|
917d2241fd50eef9b91908a9ab033d97626a7fd8
|
[
"MIT"
] | 2
|
2020-10-05T08:15:31.000Z
|
2020-10-06T06:20:30.000Z
|
"""
This script contains experiment set ups for results in Table 3.
"""
import os
import pandas as pd
from experiment_Setup import Experiment_Setup
from agent_env import get_pi_env
from SVRG import *
if __name__ == '__main__':
NUM_RUNS = 50
# Random MDP (small data)
alg_settings = [
{"method": gtd2, "name": 'gtd2', "sigma_theta": 1e-4, "sigma_omega": 1e-4, 'grid_search': False,
"record_per_dataset_pass": True, "num_epoch": 100,
"record_per_epoch": False, 'num_checks': 10},
{"method": saga, "name": 'saga', "sigma_theta": 1e-2, "sigma_omega": 1e-1, 'grid_search': False,
"record_per_dataset_pass": True, "num_epoch": 100,
'inner_loop_multiplier': 1, 'record_per_epoch': False, 'num_checks': 10},
{"method": svrg_classic, "name": 'svrg', "sigma_theta": 1e-3, "sigma_omega": 1e-3, 'grid_search': False,
"record_per_dataset_pass": True, "num_epoch": 100,
'inner_loop_multiplier': 1, 'record_per_epoch': False, 'num_checks': 10},
{"method": scsg, "name": 'scsg', "sigma_theta": 1e-4, "sigma_omega": 1e-4, 'grid_search': False,
'scsg_batch_size_ratio': 0.1, "record_per_dataset_pass": True,
"num_epoch": 100, 'record_per_epoch': False, 'num_checks': 10},
{"method": batch_svrg, "name": 'batch_svrg', 'sigma_theta': 1e-3, 'sigma_omega': 1e-3, 'grid_search': False,
"record_per_dataset_pass": True, "num_epoch": 100,
'record_per_epoch': False, 'inner_loop_multiplier': 1,
"batch_svrg_init_ratio": 0.01, "batch_svrg_increment_ratio": 1.2, 'num_checks': 10},
]
results = []
for i in range(NUM_RUNS):
exp_setup = Experiment_Setup(num_epoch=100, exp_settings=alg_settings, saving_dir_path="./",
multi_process_exps=False, use_gpu=False, num_processes=1,
batch_size=100, num_workers=0)
pi_env = get_pi_env(env_type="rmdp", exp_setup=exp_setup, loading_path="", is_loading=False, saving_path="./", is_saving=True,
policy_iteration_episode=1, init_method="zero", num_data=20000)
results.extend(pi_env.run_policy_iteration())
pd.DataFrame(pi_results).to_pickle('./rmdp_small_data_results.pkl')
# Mountain Car (small data)
alg_settings = [
{"method": gtd2, "name": 'gtd2', "sigma_theta": 1, "sigma_omega": 1, 'grid_search': False,
"record_per_dataset_pass": True, "num_epoch": 100,
"record_per_epoch": False, 'num_checks': 10},
{"method": saga, "name": 'saga', "sigma_theta": 1, "sigma_omega": 1, 'grid_search': False,
"record_per_dataset_pass": True, "num_epoch": 100,
'inner_loop_multiplier': 1, 'record_per_epoch': False, 'num_checks': 10},
{"method": svrg_classic, "name": 'svrg', "sigma_theta": 1, "sigma_omega": 1, 'grid_search': False,
"record_per_dataset_pass": True, "num_epoch": 100,
'inner_loop_multiplier': 2, 'record_per_epoch': False, 'num_checks': 10},
{"method": scsg, "name": 'scsg', "sigma_theta": 1, "sigma_omega": 1, 'grid_search': False,
'scsg_batch_size_ratio': 0.1, "record_per_dataset_pass": True,
"num_epoch": 100, 'record_per_epoch': False, 'num_checks': 10},
{"method": batch_svrg, "name": 'batch_svrg', 'sigma_theta': 1, 'sigma_omega': 1, 'grid_search': False,
"record_per_dataset_pass": True, "num_epoch": 100,
'record_per_epoch': False, 'inner_loop_multiplier': 1,
"batch_svrg_init_ratio": 0.1, "batch_svrg_increment_ratio": 1.1, 'num_checks': 10},
]
results = []
for i in range(NUM_RUNS):
exp_setup = Experiment_Setup(num_epoch=100, exp_settings=alg_settings, saving_dir_path="./",
multi_process_exps=False, use_gpu=False, num_processes=1,
batch_size=100, num_workers=0)
pi_env = get_pi_env(env_type="mc", exp_setup=exp_setup, loading_path="", is_loading=False, saving_path="./", is_saving=True,
policy_iteration_episode=1, init_method="zero", num_data=20000)
results.extend(pi_env.run_policy_iteration())
pd.DataFrame(pi_results).to_pickle('./mc_small_data_results.pkl')
# Cart Pole (small data)
alg_settings = [
{"method": gtd2, "name": 'gtd2', "sigma_theta": 1, "sigma_omega": 10, 'grid_search': False,
"record_per_dataset_pass": True, "num_epoch": 100,
"record_per_epoch": False, 'num_checks': 10},
{"method": saga, "name": 'saga', "sigma_theta": 1, "sigma_omega": 1, 'grid_search': False,
"record_per_dataset_pass": True, "num_epoch": 100,
'inner_loop_multiplier': 1, 'record_per_epoch': False, 'num_checks': 10},
{"method": svrg_classic, "name": 'svrg', "sigma_theta": 1, "sigma_omega": 10, 'grid_search': False,
"record_per_dataset_pass": True, "num_epoch": 100,
'inner_loop_multiplier': 1, 'record_per_epoch': False, 'num_checks': 10},
{"method": scsg, "name": 'scsg', "sigma_theta": 1, "sigma_omega": 10, 'grid_search': False,
'scsg_batch_size_ratio': 0.1, "record_per_dataset_pass": True,
"num_epoch": 100, 'record_per_epoch': False, 'num_checks': 10},
{"method": batch_svrg, "name": 'batch_svrg', 'sigma_theta': 1, 'sigma_omega': 10, 'grid_search': False,
"record_per_dataset_pass": True, "num_epoch": 100,
'record_per_epoch': False, 'inner_loop_multiplier': 1,
"batch_svrg_init_ratio": 0.1, "batch_svrg_increment_ratio": 1.1, 'num_checks': 10},
]
results = []
for i in range(NUM_RUNS):
exp_setup = Experiment_Setup(num_epoch=100, exp_settings=alg_settings, saving_dir_path="./",
multi_process_exps=False, use_gpu=False, num_processes=1,
batch_size=100, num_workers=0)
pi_env = get_pi_env(env_type="cp", exp_setup=exp_setup, loading_path="", is_loading=False, saving_path="./", is_saving=True,
policy_iteration_episode=1, init_method="zero", num_data=20000)
results.extend(pi_env.run_policy_iteration())
pd.DataFrame(pi_results).to_pickle('./cp_small_data_results.pkl')
# Acrobot (small data)
alg_settings = [
{"method": gtd2, "name": 'gtd2', "sigma_theta": 1, "sigma_omega": 1, 'grid_search': False,
"record_per_dataset_pass": True, "num_epoch": 100,
"record_per_epoch": False, 'num_checks': 10},
{"method": saga, "name": 'saga', "sigma_theta": 0.1, "sigma_omega": 1, 'grid_search': False,
"record_per_dataset_pass": True, "num_epoch": 100,
'inner_loop_multiplier': 1, 'record_per_epoch': False, 'num_checks': 10},
{"method": svrg_classic, "name": 'svrg', "sigma_theta": 0.1, "sigma_omega": 1, 'grid_search': False,
"record_per_dataset_pass": True, "num_epoch": 100,
'inner_loop_multiplier': 2, 'record_per_epoch': False, 'num_checks': 10},
{"method": scsg, "name": 'scsg', "sigma_theta": 0.1, "sigma_omega": 1, 'grid_search': False,
'scsg_batch_size_ratio': 0.1, "record_per_dataset_pass": True,
"num_epoch": 100, 'record_per_epoch': False, 'num_checks': 10},
{"method": batch_svrg, "name": 'batch_svrg', 'sigma_theta': 0.1, 'sigma_omega': 1, 'grid_search': False,
"record_per_dataset_pass": True, "num_epoch": 100,
'record_per_epoch': False, 'inner_loop_multiplier': 1,
"batch_svrg_init_ratio": 0.1, "batch_svrg_increment_ratio": 1.1, 'num_checks': 10},
]
results = []
for i in range(NUM_RUNS):
exp_setup = Experiment_Setup(num_epoch=100, exp_settings=alg_settings, saving_dir_path="./",
multi_process_exps=False, use_gpu=False, num_processes=1,
batch_size=100, num_workers=0)
pi_env = get_pi_env(env_type="ab", exp_setup=exp_setup, loading_path="", is_loading=False, saving_path="./", is_saving=True,
policy_iteration_episode=1, init_method="zero", num_data=20000)
results.extend(pi_env.run_policy_iteration())
pd.DataFrame(pi_results).to_pickle('./ab_small_data_results.pkl')
NUM_RUNS = 20
# Random MDP (large data)
# We did not run SVRG and SAGA in this experiment because they require computing the full gradient and each method is only allowed to use the dataset once.
alg_settings = [
{"method": gtd2, "name": 'gtd2', "sigma_theta": 1e-4, "sigma_omega": 1e-3, 'grid_search': False,
"record_per_dataset_pass": True, "num_epoch": 1,
"record_per_epoch": False, 'num_checks': 10},
{"method": scsg, "name": 'scsg', "sigma_theta": 1e-3, "sigma_omega": 1e-2, 'grid_search': False,
'scsg_batch_size_ratio': 0.01, "record_per_dataset_pass": True,
"num_epoch": 1, 'record_per_epoch': False, 'num_checks': 10},
{"method": batch_svrg, "name": 'batch_svrg', 'sigma_theta': 1e-3, 'sigma_omega': 1e-2, 'grid_search': False,
"record_per_dataset_pass": True, "num_epoch": 1,
'record_per_epoch': False, 'inner_loop_multiplier': 0.05,
"batch_svrg_init_ratio": 0.01, "batch_svrg_increment_ratio": 1.1, 'num_checks': 10},
]
results = []
for i in range(NUM_RUNS):
exp_setup = Experiment_Setup(num_epoch=1, exp_settings=alg_settings, saving_dir_path="./",
multi_process_exps=False, use_gpu=False, num_processes=1,
batch_size=100, num_workers=0)
pi_env = get_pi_env(env_type="rmdp", exp_setup=exp_setup, loading_path="", is_loading=False, saving_path="./", is_saving=True,
policy_iteration_episode=1, init_method="zero", num_data=1000000)
results.extend(pi_env.run_policy_iteration())
pd.DataFrame(pi_results).to_pickle('./rmdp_large_data_results.pkl')
# Mountain Car (large data)
# We did not run SVRG and SAGA in this experiment because they require computing the full gradient and each method is only allowed to use the dataset once.
alg_settings = [
{"method": gtd2, "name": 'gtd2', "sigma_theta": 1, "sigma_omega": 1, 'grid_search': False,
"record_per_dataset_pass": True, "num_epoch": 1,
"record_per_epoch": False, 'num_checks': 10},
{"method": scsg, "name": 'scsg', "sigma_theta": 1, "sigma_omega": 1, 'grid_search': False,
'scsg_batch_size_ratio': 0.01, "record_per_dataset_pass": True,
"num_epoch": 1, 'record_per_epoch': False, 'num_checks': 10},
{"method": batch_svrg, "name": 'batch_svrg', 'sigma_theta': 1, 'sigma_omega': 1, 'grid_search': False,
"record_per_dataset_pass": True, "num_epoch": 1,
'record_per_epoch': False, 'inner_loop_multiplier': 0.2,
"batch_svrg_init_ratio": 0.001, "batch_svrg_increment_ratio": 5, 'num_checks': 10},
]
results = []
for i in range(NUM_RUNS):
exp_setup = Experiment_Setup(num_epoch=1, exp_settings=alg_settings, saving_dir_path="./",
multi_process_exps=False, use_gpu=False, num_processes=1,
batch_size=100, num_workers=0)
pi_env = get_pi_env(env_type="mc", exp_setup=exp_setup, loading_path="", is_loading=False, saving_path="./", is_saving=True,
policy_iteration_episode=1, init_method="zero", num_data=1000000)
results.extend(pi_env.run_policy_iteration())
pd.DataFrame(pi_results).to_pickle('./mc_large_data_results.pkl')
# Cart Pole (large data)
# We did not run SVRG and SAGA in this experiment because they require computing the full gradient and each method is only allowed to use the dataset once.
alg_settings = [
{"method": gtd2, "name": 'gtd2', "sigma_theta": 1, "sigma_omega": 10, 'grid_search': False,
"record_per_dataset_pass": True, "num_epoch": 1,
"record_per_epoch": False, 'num_checks': 10},
{"method": scsg, "name": 'scsg', "sigma_theta": 1, "sigma_omega": 10, 'grid_search': False,
'scsg_batch_size_ratio': 0.05, "record_per_dataset_pass": True,
"num_epoch": 1, 'record_per_epoch': False, 'num_checks': 10},
{"method": batch_svrg, "name": 'batch_svrg', 'sigma_theta': 1, 'sigma_omega': 10, 'grid_search': False,
"record_per_dataset_pass": True, "num_epoch": 1,
'record_per_epoch': False, 'inner_loop_multiplier': 0.1,
"batch_svrg_init_ratio": 0.1, "batch_svrg_increment_ratio": 1.1, 'num_checks': 10},
]
results = []
for i in range(NUM_RUNS):
exp_setup = Experiment_Setup(num_epoch=1, exp_settings=alg_settings, saving_dir_path="./",
multi_process_exps=False, use_gpu=False, num_processes=1,
batch_size=100, num_workers=0)
pi_env = get_pi_env(env_type="cp", exp_setup=exp_setup, loading_path="", is_loading=False, saving_path="./", is_saving=True,
policy_iteration_episode=1, init_method="zero", num_data=1000000)
results.extend(pi_env.run_policy_iteration())
pd.DataFrame(pi_results).to_pickle('./cp_large_data_results.pkl')
# Acrobot (large data)
# We did not run SVRG and SAGA in this experiment because they require computing the full gradient and each method is only allowed to use the dataset once.
alg_settings = [
{"method": gtd2, "name": 'gtd2', "sigma_theta": 1, "sigma_omega": 10, 'grid_search': False,
"record_per_dataset_pass": True, "num_epoch": 1,
"record_per_epoch": False, 'num_checks': 10},
{"method": scsg, "name": 'scsg', "sigma_theta": 0.1, "sigma_omega": 1, 'grid_search': False,
'scsg_batch_size_ratio': 0.05, "record_per_dataset_pass": True,
"num_epoch": 1, 'record_per_epoch': False, 'num_checks': 10},
{"method": batch_svrg, "name": 'batch_svrg', 'sigma_theta': 0.1, 'sigma_omega': 1, 'grid_search': False,
"record_per_dataset_pass": True, "num_epoch": 1,
'record_per_epoch': False, 'inner_loop_multiplier': 0.2,
"batch_svrg_init_ratio": 0.001, "batch_svrg_increment_ratio": 5, 'num_checks': 10},
]
results = []
for i in range(NUM_RUNS):
exp_setup = Experiment_Setup(num_epoch=1, exp_settings=alg_settings, saving_dir_path="./",
multi_process_exps=False, use_gpu=False, num_processes=1,
batch_size=100, num_workers=0)
pi_env = get_pi_env(env_type="ab", exp_setup=exp_setup, loading_path="", is_loading=False, saving_path="./", is_saving=True,
policy_iteration_episode=1, init_method="zero", num_data=1000000)
results.extend(pi_env.run_policy_iteration())
pd.DataFrame(pi_results).to_pickle('./ab_large_data_results.pkl')
| 63.200772
| 159
| 0.583298
| 2,044
| 16,369
| 4.301859
| 0.061644
| 0.065507
| 0.054589
| 0.072785
| 0.96247
| 0.95178
| 0.95178
| 0.95178
| 0.95178
| 0.95178
| 0
| 0.037584
| 0.284807
| 16,369
| 259
| 160
| 63.200772
| 0.713505
| 0.053088
| 0
| 0.8
| 0
| 0
| 0.297158
| 0.118605
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.16
| 0.025
| 0
| 0.025
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
c8b9dd3da60dfd39fcd797c62d75dad8de795ee3
| 32,804
|
py
|
Python
|
tests/data/test_data_manager.py
|
tjaffri/paraphrase-id-tensorflow
|
1f0bce5be6b458f7186f7ca05a584e193de483bb
|
[
"MIT"
] | 354
|
2017-05-20T08:03:57.000Z
|
2022-01-16T07:06:03.000Z
|
tests/data/test_data_manager.py
|
tjaffri/paraphrase-id-tensorflow
|
1f0bce5be6b458f7186f7ca05a584e193de483bb
|
[
"MIT"
] | 17
|
2017-05-21T18:30:22.000Z
|
2019-08-14T12:57:51.000Z
|
tests/data/test_data_manager.py
|
tjaffri/paraphrase-id-tensorflow
|
1f0bce5be6b458f7186f7ca05a584e193de483bb
|
[
"MIT"
] | 93
|
2017-05-20T20:09:59.000Z
|
2021-05-02T05:13:25.000Z
|
from numpy.testing import assert_allclose
import numpy as np
from overrides import overrides
from duplicate_questions.data.data_manager import DataManager
from duplicate_questions.data.instances.sts_instance import STSInstance
from ..common.test_case import DuplicateTestCase
class TestDataManagerTrain(DuplicateTestCase):
@overrides
def setUp(self):
super(TestDataManagerTrain, self).setUp()
self.write_duplicate_questions_train_file()
self.data_manager = DataManager(STSInstance)
def test_get_train_data_default(self):
get_train_gen, train_size = self.data_manager.get_train_data_from_file(
[self.TRAIN_FILE])
assert train_size == 3
train_gen = get_train_gen()
inputs1, labels1 = train_gen.__next__()
assert_allclose(inputs1[0], np.array([2, 0]))
assert_allclose(inputs1[1], np.array([3, 4]))
assert_allclose(labels1[0], np.array([1, 0]))
inputs2, labels2 = train_gen.__next__()
assert_allclose(inputs2[0], np.array([5, 0]))
assert_allclose(inputs2[1], np.array([6, 0]))
assert_allclose(labels2[0], np.array([0, 1]))
inputs3, labels3 = train_gen.__next__()
assert_allclose(inputs3[0], np.array([7, 0]))
assert_allclose(inputs3[1], np.array([8, 0]))
assert_allclose(labels3[0], np.array([1, 0]))
# Should raise a StopIteration
with self.assertRaises(StopIteration):
train_gen.__next__()
# Test that we can make a new train generator
new_train_gen = get_train_gen()
# Verify that the new and old generator are not the same object
assert new_train_gen != train_gen
new_inputs1, new_labels1 = new_train_gen.__next__()
assert_allclose(new_inputs1, inputs1)
assert_allclose(new_labels1, labels1)
new_inputs2, new_labels2 = new_train_gen.__next__()
assert_allclose(new_inputs2, inputs2)
assert_allclose(new_labels2, labels2)
new_inputs3, new_labels3 = new_train_gen.__next__()
assert_allclose(new_inputs3, inputs3)
assert_allclose(new_labels3, labels3)
# Should raise a StopIteration
with self.assertRaises(StopIteration):
new_train_gen.__next__()
def test_get_train_data_default_character(self):
get_train_gen, train_size = self.data_manager.get_train_data_from_file(
[self.TRAIN_FILE], mode="character")
train_gen = get_train_gen()
assert train_size == 3
inputs1, labels1 = train_gen.__next__()
assert_allclose(inputs1[0], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 10, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert_allclose(inputs1[1], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 11, 0, 0, 0],
[6, 9, 2, 7, 8, 3, 5, 4, 12, 19, 17, 18]]))
assert_allclose(labels1[0], np.array([1, 0]))
inputs2, labels2 = train_gen.__next__()
assert_allclose(inputs2[0], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 13, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert_allclose(inputs2[1], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 14, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert_allclose(labels2[0], np.array([0, 1]))
inputs3, labels3 = train_gen.__next__()
assert_allclose(inputs3[0], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 15, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert_allclose(inputs3[1], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 16, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert_allclose(labels3[0], np.array([1, 0]))
# Should raise a StopIteration
with self.assertRaises(StopIteration):
train_gen.__next__()
def test_get_train_data_default_word_and_character(self):
get_train_gen, train_size = self.data_manager.get_train_data_from_file(
[self.TRAIN_FILE], mode="word+character")
train_gen = get_train_gen()
assert train_size == 3
inputs1, labels1 = train_gen.__next__()
assert_allclose(inputs1[0], np.array([2, 0]))
assert_allclose(inputs1[1], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 10, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert_allclose(inputs1[2], np.array([3, 4]))
assert_allclose(inputs1[3], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 11, 0, 0, 0],
[6, 9, 2, 7, 8, 3, 5, 4, 12, 19, 17, 18]]))
assert_allclose(labels1[0], np.array([1, 0]))
inputs2, labels2 = train_gen.__next__()
assert_allclose(inputs2[0], np.array([5, 0]))
assert_allclose(inputs2[1], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 13, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert_allclose(inputs2[2], np.array([6, 0]))
assert_allclose(inputs2[3], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 14, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert_allclose(labels2[0], np.array([0, 1]))
inputs3, labels3 = train_gen.__next__()
assert_allclose(inputs3[0], np.array([7, 0]))
assert_allclose(inputs3[1], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 15, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert_allclose(inputs3[2], np.array([8, 0]))
assert_allclose(inputs3[3], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 16, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert_allclose(labels3[0], np.array([1, 0]))
# Should cycle back to the start
# Should raise a StopIteration
with self.assertRaises(StopIteration):
train_gen.__next__()
def test_get_train_data_pad_with_max_lens(self):
get_train_gen, train_size = self.data_manager.get_train_data_from_file(
[self.TRAIN_FILE],
max_lengths={"num_sentence_words": 1})
train_gen = get_train_gen()
assert train_size == 3
inputs1, labels1 = train_gen.__next__()
assert_allclose(inputs1[0], np.array([2]))
assert_allclose(inputs1[1], np.array([3]))
assert_allclose(labels1[0], np.array([1, 0]))
inputs2, labels2 = train_gen.__next__()
assert_allclose(inputs2[0], np.array([5]))
assert_allclose(inputs2[1], np.array([6]))
assert_allclose(labels2[0], np.array([0, 1]))
inputs3, labels3 = train_gen.__next__()
assert_allclose(inputs3[0], np.array([7]))
assert_allclose(inputs3[1], np.array([8]))
assert_allclose(labels3[0], np.array([1, 0]))
# Should raise a StopIteration
with self.assertRaises(StopIteration):
train_gen.__next__()
def test_get_train_data_with_max_instances(self):
get_train_gen, train_size = self.data_manager.get_train_data_from_file(
[self.TRAIN_FILE],
max_instances=2)
train_gen = get_train_gen()
assert train_size == 2
inputs1, labels1 = train_gen.__next__()
assert_allclose(inputs1[0], np.array([2, 0]))
assert_allclose(inputs1[1], np.array([3, 4]))
assert_allclose(labels1[0], np.array([1, 0]))
inputs2, labels2 = train_gen.__next__()
assert_allclose(inputs2[0], np.array([5, 0]))
assert_allclose(inputs2[1], np.array([6, 0]))
assert_allclose(labels2[0], np.array([0, 1]))
# Should raise a StopIteration
with self.assertRaises(StopIteration):
train_gen.__next__()
def test_get_train_data_errors(self):
with self.assertRaises(ValueError):
self.data_manager.get_train_data_from_file(
[self.TRAIN_FILE],
max_lengths={"num_sentence_words": 1},
pad=False)
with self.assertRaises(ValueError):
self.data_manager.get_train_data_from_file(
[self.TRAIN_FILE],
max_lengths={"some wrong key": 1})
def test_get_train_data_no_pad(self):
get_train_gen, train_size = self.data_manager.get_train_data_from_file(
[self.TRAIN_FILE],
pad=False)
train_gen = get_train_gen()
assert train_size == 3
inputs1, labels1 = train_gen.__next__()
assert_allclose(inputs1[0], np.array([2]))
assert_allclose(inputs1[1], np.array([3, 4]))
assert_allclose(labels1[0], np.array([1, 0]))
inputs2, labels2 = train_gen.__next__()
assert_allclose(inputs2[0], np.array([5]))
assert_allclose(inputs2[1], np.array([6]))
assert_allclose(labels2[0], np.array([0, 1]))
inputs3, labels3 = train_gen.__next__()
assert_allclose(inputs3[0], np.array([7]))
assert_allclose(inputs3[1], np.array([8]))
assert_allclose(labels3[0], np.array([1, 0]))
# Should raise a StopIteration
with self.assertRaises(StopIteration):
train_gen.__next__()
def test_generate_train_batches(self):
get_train_gen, train_size = self.data_manager.get_train_data_from_file(
[self.TRAIN_FILE])
batch_gen = DataManager.get_batch_generator(get_train_gen, 2)
new_batch_gen = DataManager.get_batch_generator(get_train_gen, 2)
# Assert that the new generator is a different object
# than the old generator.
assert new_batch_gen != batch_gen
assert train_size == 3
first_batch = batch_gen.__next__()
new_first_batch = new_batch_gen.__next__()
inputs, labels = first_batch
new_inputs, new_labels = new_first_batch
assert len(inputs) == len(new_inputs) == 2
assert len(labels) == len(new_labels) == 1
# Ensure output matches ground truth
assert_allclose(inputs[0], np.array([[2, 0], [5, 0]]))
assert_allclose(inputs[1], np.array([[3, 4], [6, 0]]))
assert_allclose(labels[0], np.array([[1, 0], [0, 1]]))
# Ensure both generators produce same results.
assert_allclose(inputs[0], new_inputs[0])
assert_allclose(inputs[1], new_inputs[1])
assert_allclose(labels[0], labels[0])
second_batch = batch_gen.__next__()
new_second_batch = new_batch_gen.__next__()
inputs, labels = second_batch
new_inputs, new_labels = new_second_batch
assert len(inputs) == len(new_inputs) == 2
assert len(labels) == len(new_labels) == 1
# Ensure output matches ground truth
assert_allclose(inputs[0], np.array([[7, 0]]))
assert_allclose(inputs[1], np.array([[8, 0]]))
assert_allclose(labels[0], np.array([[1, 0]]))
# Ensure both generators produce same results.
assert_allclose(inputs[0], new_inputs[0])
assert_allclose(inputs[1], new_inputs[1])
assert_allclose(labels[0], labels[0])
# Should raise a StopIteration
with self.assertRaises(StopIteration):
batch_gen.__next__()
new_batch_gen.__next__()
class TestDataManagerValidation(DuplicateTestCase):
@overrides
def setUp(self):
super(TestDataManagerValidation, self).setUp()
self.write_duplicate_questions_train_file()
self.write_duplicate_questions_validation_file()
self.data_manager = DataManager(STSInstance)
self.data_manager.get_train_data_from_file([self.TRAIN_FILE])
def test_get_validation_data_default(self):
get_val_gen, val_size = self.data_manager.get_validation_data_from_file(
[self.VALIDATION_FILE])
assert val_size == 3
val_gen = get_val_gen()
inputs1, labels1 = val_gen.__next__()
assert_allclose(inputs1[0], np.array([2, 0]))
assert_allclose(inputs1[1], np.array([3, 1]))
assert_allclose(labels1[0], np.array([1, 0]))
inputs2, labels2 = val_gen.__next__()
assert_allclose(inputs2[0], np.array([1, 0]))
assert_allclose(inputs2[1], np.array([1, 0]))
assert_allclose(labels2[0], np.array([0, 1]))
inputs3, labels3 = val_gen.__next__()
assert_allclose(inputs3[0], np.array([7, 0]))
assert_allclose(inputs3[1], np.array([8, 1]))
assert_allclose(labels3[0], np.array([1, 0]))
# Should raise a StopIteration
with self.assertRaises(StopIteration):
val_gen.__next__()
# Test that we can make a new val generator
new_val_gen = get_val_gen()
# Verify that the new and old generator are not the same object
assert new_val_gen != val_gen
new_inputs1, new_labels1 = new_val_gen.__next__()
assert_allclose(new_inputs1, inputs1)
assert_allclose(new_labels1, labels1)
new_inputs2, new_labels2 = new_val_gen.__next__()
assert_allclose(new_inputs2, inputs2)
assert_allclose(new_labels2, labels2)
new_inputs3, new_labels3 = new_val_gen.__next__()
assert_allclose(new_inputs3, inputs3)
assert_allclose(new_labels3, labels3)
# Should raise a StopIteration
with self.assertRaises(StopIteration):
new_val_gen.__next__()
def test_get_validation_data_default_character(self):
get_val_gen, val_size = self.data_manager.get_validation_data_from_file(
[self.VALIDATION_FILE], mode="character")
assert val_size == 3
val_gen = get_val_gen()
inputs1, labels1 = val_gen.__next__()
assert_allclose(inputs1[0], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 10, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert_allclose(inputs1[1], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 11, 0, 0, 0],
[6, 9, 2, 7, 8, 3, 5, 4, 1, 0, 0, 0]]))
assert_allclose(labels1[0], np.array([1, 0]))
inputs2, labels2 = val_gen.__next__()
assert_allclose(inputs2[0], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert_allclose(inputs2[1], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 10, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert_allclose(labels2[0], np.array([0, 1]))
inputs3, labels3 = val_gen.__next__()
assert_allclose(inputs3[0], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 15, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert_allclose(inputs3[1], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 16, 0, 0, 0],
[6, 9, 2, 7, 8, 3, 5, 4, 10, 10, 0, 0]]))
assert_allclose(labels3[0], np.array([1, 0]))
# Should raise a StopIteration
with self.assertRaises(StopIteration):
val_gen.__next__()
def test_get_validation_data_default_word_and_character(self):
get_val_gen, val_size = self.data_manager.get_validation_data_from_file(
[self.VALIDATION_FILE], mode="word+character")
val_gen = get_val_gen()
assert val_size == 3
inputs1, labels1 = val_gen.__next__()
assert_allclose(inputs1[0], np.array([2, 0]))
assert_allclose(inputs1[1], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 10, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert_allclose(inputs1[2], np.array([3, 1]))
assert_allclose(inputs1[3], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 11, 0, 0, 0],
[6, 9, 2, 7, 8, 3, 5, 4, 1, 0, 0, 0]]))
assert_allclose(labels1[0], np.array([1, 0]))
inputs2, labels2 = val_gen.__next__()
assert_allclose(inputs2[0], np.array([1, 0]))
assert_allclose(inputs2[1], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert_allclose(inputs2[2], np.array([1, 0]))
assert_allclose(inputs2[3], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 10, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert_allclose(labels2[0], np.array([0, 1]))
inputs3, labels3 = val_gen.__next__()
assert_allclose(inputs3[0], np.array([7, 0]))
assert_allclose(inputs3[1], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 15, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert_allclose(inputs3[2], np.array([8, 1]))
assert_allclose(inputs3[3], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 16, 0, 0, 0],
[6, 9, 2, 7, 8, 3, 5, 4, 10, 10, 0, 0]]))
assert_allclose(labels3[0], np.array([1, 0]))
# Should raise a StopIteration
with self.assertRaises(StopIteration):
val_gen.__next__()
def test_get_validation_data_pad_with_max_lens(self):
get_val_gen, val_size = self.data_manager.get_validation_data_from_file(
[self.VALIDATION_FILE],
max_lengths={"num_sentence_words": 1})
val_gen = get_val_gen()
assert val_size == 3
inputs1, labels1 = val_gen.__next__()
assert_allclose(inputs1[0], np.array([2]))
assert_allclose(inputs1[1], np.array([3]))
assert_allclose(labels1[0], np.array([1, 0]))
inputs2, labels2 = val_gen.__next__()
assert_allclose(inputs2[0], np.array([1]))
assert_allclose(inputs2[1], np.array([1]))
assert_allclose(labels2[0], np.array([0, 1]))
inputs3, labels3 = val_gen.__next__()
assert_allclose(inputs3[0], np.array([7]))
assert_allclose(inputs3[1], np.array([8]))
assert_allclose(labels3[0], np.array([1, 0]))
# Should raise a StopIteration
with self.assertRaises(StopIteration):
val_gen.__next__()
def test_get_validation_data_with_max_instances(self):
get_val_gen, val_size = self.data_manager.get_validation_data_from_file(
[self.VALIDATION_FILE],
max_instances=2)
val_size == 2
val_gen = get_val_gen()
inputs1, labels1 = val_gen.__next__()
assert_allclose(inputs1[0], np.array([2, 0]))
assert_allclose(inputs1[1], np.array([3, 1]))
assert_allclose(labels1[0], np.array([1, 0]))
inputs2, labels2 = val_gen.__next__()
assert_allclose(inputs2[0], np.array([1, 0]))
assert_allclose(inputs2[1], np.array([1, 0]))
assert_allclose(labels2[0], np.array([0, 1]))
# Should raise a StopIteration
with self.assertRaises(StopIteration):
val_gen.__next__()
def test_get_validation_data_errors(self):
with self.assertRaises(ValueError):
self.data_manager.get_validation_data_from_file(
[self.VALIDATION_FILE],
max_lengths={"num_sentence_words": 1},
pad=False)
with self.assertRaises(ValueError):
self.data_manager.get_validation_data_from_file(
[self.VALIDATION_FILE],
max_lengths={"some wrong key": 1})
def test_get_validation_data_no_pad(self):
get_val_gen, val_size = self.data_manager.get_validation_data_from_file(
[self.VALIDATION_FILE],
pad=False)
assert val_size == 3
val_gen = get_val_gen()
inputs1, labels1 = val_gen.__next__()
assert_allclose(inputs1[0], np.array([2]))
assert_allclose(inputs1[1], np.array([3, 1]))
assert_allclose(labels1[0], np.array([1, 0]))
inputs2, labels2 = val_gen.__next__()
assert_allclose(inputs2[0], np.array([1]))
assert_allclose(inputs2[1], np.array([1]))
assert_allclose(labels2[0], np.array([0, 1]))
inputs3, labels3 = val_gen.__next__()
assert_allclose(inputs3[0], np.array([7]))
assert_allclose(inputs3[1], np.array([8, 1, 1]))
assert_allclose(labels3[0], np.array([1, 0]))
# Should raise a StopIteration
with self.assertRaises(StopIteration):
val_gen.__next__()
def test_generate_validation_batches(self):
get_val_gen, val_size = self.data_manager.get_validation_data_from_file(
[self.VALIDATION_FILE])
batch_gen = self.data_manager.get_batch_generator(get_val_gen, 2)
new_batch_gen = DataManager.get_batch_generator(get_val_gen, 2)
assert val_size == 3
# Assert that the new generator is a different object
# than the old generator.
assert new_batch_gen != batch_gen
first_batch = batch_gen.__next__()
new_first_batch = new_batch_gen.__next__()
inputs, labels = first_batch
new_inputs, new_labels = new_first_batch
assert len(inputs) == len(new_inputs) == 2
assert len(labels) == len(new_labels) == 1
# Ensure output matches ground truth.
assert_allclose(inputs[0], np.array([[2, 0], [1, 0]]))
assert_allclose(inputs[1], np.array([[3, 1], [1, 0]]))
assert_allclose(labels[0], np.array([[1, 0], [0, 1]]))
# Ensure both generators produce same results.
assert_allclose(inputs[0], new_inputs[0])
assert_allclose(inputs[1], new_inputs[1])
assert_allclose(labels[0], labels[0])
second_batch = batch_gen.__next__()
new_second_batch = new_batch_gen.__next__()
inputs, labels = second_batch
new_inputs, new_labels = new_second_batch
assert len(inputs) == len(new_inputs) == 2
assert len(labels) == len(new_labels) == 1
# Ensure output matches ground truth.
assert_allclose(inputs[0], np.array([[7, 0]]))
assert_allclose(inputs[1], np.array([[8, 1]]))
assert_allclose(labels[0], np.array([[1, 0]]))
# Ensure both generators produce same results.
assert_allclose(inputs[0], new_inputs[0])
assert_allclose(inputs[1], new_inputs[1])
assert_allclose(labels[0], labels[0])
# Should raise a StopIteration
with self.assertRaises(StopIteration):
batch_gen.__next__()
new_batch_gen.__next__()
class TestDataManagerTest(DuplicateTestCase):
@overrides
def setUp(self):
super(TestDataManagerTest, self).setUp()
self.write_duplicate_questions_train_file()
self.write_duplicate_questions_test_file()
self.data_manager = DataManager(STSInstance)
self.data_manager.get_train_data_from_file([self.TRAIN_FILE])
def test_get_test_data_default(self):
get_test_gen, test_size = self.data_manager.get_test_data_from_file(
[self.TEST_FILE])
assert test_size == 3
test_gen = get_test_gen()
inputs1, labels1 = test_gen.__next__()
assert_allclose(inputs1[0], np.array([2, 1]))
assert_allclose(inputs1[1], np.array([1, 0]))
inputs2, labels2 = test_gen.__next__()
assert_allclose(inputs2[0], np.array([4, 0]))
assert_allclose(inputs2[1], np.array([5, 1]))
inputs3, labels3 = test_gen.__next__()
assert_allclose(inputs3[0], np.array([6, 0]))
assert_allclose(inputs3[1], np.array([7, 0]))
# Should raise a StopIteration
with self.assertRaises(StopIteration):
test_gen.__next__()
# Test that we can make a new test generator
new_test_gen = get_test_gen()
# Verify that the new and old generator are not the same object
assert new_test_gen != test_gen
new_inputs1, new_labels1 = new_test_gen.__next__()
assert_allclose(new_inputs1, inputs1)
assert_allclose(new_labels1, labels1)
new_inputs2, new_labels2 = new_test_gen.__next__()
assert_allclose(new_inputs2, inputs2)
assert_allclose(new_labels2, labels2)
new_inputs3, new_labels3 = new_test_gen.__next__()
assert_allclose(new_inputs3, inputs3)
assert_allclose(new_labels3, labels3)
# Should raise a StopIteration
with self.assertRaises(StopIteration):
new_test_gen.__next__()
def test_get_test_data_default_character(self):
get_test_gen, test_size = self.data_manager.get_test_data_from_file(
[self.TEST_FILE], mode="character")
test_gen = get_test_gen()
assert test_size == 3
inputs1, labels = test_gen.__next__()
assert_allclose(inputs1[0], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 10, 0, 0, 0],
[6, 9, 2, 7, 8, 3, 5, 4, 9, 4, 1, 10]]))
assert_allclose(inputs1[1], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 9, 4, 1, 11],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert len(labels) == 0
inputs2, labels = test_gen.__next__()
assert_allclose(inputs2[0], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 12, 19, 17, 18],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert_allclose(inputs2[1], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 13, 0, 0, 0],
[6, 9, 2, 7, 8, 3, 5, 4, 9, 4, 1, 12]]))
assert len(labels) == 0
inputs3, labels = test_gen.__next__()
assert_allclose(inputs3[0], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 14, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert_allclose(inputs3[1], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 15, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert len(labels) == 0
# Should raise a StopIteration
with self.assertRaises(StopIteration):
test_gen.__next__()
def test_get_test_data_default_word_and_character(self):
get_test_gen, test_size = self.data_manager.get_test_data_from_file(
[self.TEST_FILE], mode="word+character")
test_gen = get_test_gen()
assert test_size == 3
inputs1, labels = test_gen.__next__()
assert_allclose(inputs1[0], np.array([2, 1]))
assert_allclose(inputs1[1], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 10, 0, 0, 0],
[6, 9, 2, 7, 8, 3, 5, 4, 9, 4, 1, 10]]))
assert_allclose(inputs1[2], np.array([1, 0]))
assert_allclose(inputs1[3], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 9, 4, 1, 11],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert len(labels) == 0
inputs2, labels = test_gen.__next__()
assert_allclose(inputs2[0], np.array([4, 0]))
assert_allclose(inputs2[1], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 12, 19, 17, 18],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert_allclose(inputs2[2], np.array([5, 1]))
assert_allclose(inputs2[3], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 13, 0, 0, 0],
[6, 9, 2, 7, 8, 3, 5, 4, 9, 4, 1, 12]]))
assert len(labels) == 0
inputs3, labels = test_gen.__next__()
assert_allclose(inputs3[0], np.array([6, 0]))
assert_allclose(inputs3[1], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 14, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert_allclose(inputs3[2], np.array([7, 0]))
assert_allclose(inputs3[3], np.array([[6, 9, 2, 7, 8, 3, 5, 4, 15, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]))
assert len(labels) == 0
# Should raise a StopIteration
with self.assertRaises(StopIteration):
test_gen.__next__()
def test_get_test_data_pad_with_max_lens(self):
get_test_gen, test_size = self.data_manager.get_test_data_from_file(
[self.TEST_FILE],
max_lengths={"num_sentence_words": 1})
test_gen = get_test_gen()
assert test_size == 3
inputs, labels = test_gen.__next__()
assert_allclose(inputs[0], np.array([2]))
assert_allclose(inputs[1], np.array([1]))
assert len(labels) == 0
inputs, labels = test_gen.__next__()
assert_allclose(inputs[0], np.array([4]))
assert_allclose(inputs[1], np.array([5]))
assert len(labels) == 0
inputs, labels = test_gen.__next__()
assert_allclose(inputs[0], np.array([6]))
assert_allclose(inputs[1], np.array([7]))
assert len(labels) == 0
# Should raise a StopIteration
with self.assertRaises(StopIteration):
test_gen.__next__()
def test_get_test_data_with_max_instances(self):
get_test_gen, test_size = self.data_manager.get_test_data_from_file(
[self.TEST_FILE],
max_instances=2)
test_gen = get_test_gen()
assert test_size == 2
inputs, labels = test_gen.__next__()
assert_allclose(inputs[0], np.array([2, 1]))
assert_allclose(inputs[1], np.array([1, 0]))
assert len(labels) == 0
inputs, labels = test_gen.__next__()
assert_allclose(inputs[0], np.array([4, 0]))
assert_allclose(inputs[1], np.array([5, 1]))
assert len(labels) == 0
# Should raise a StopIteration
with self.assertRaises(StopIteration):
test_gen.__next__()
def test_get_test_data_errors(self):
with self.assertRaises(ValueError):
self.data_manager.get_test_data_from_file(
[self.TEST_FILE],
max_lengths={"num_sentence_words": 1},
pad=False)
with self.assertRaises(ValueError):
self.data_manager.get_test_data_from_file(
[self.TEST_FILE],
max_lengths={"some wrong key": 1})
def test_get_test_data_no_pad(self):
get_test_gen, test_size = self.data_manager.get_test_data_from_file(
[self.TEST_FILE],
pad=False)
test_gen = get_test_gen()
assert test_size == 3
inputs, labels = test_gen.__next__()
assert_allclose(inputs[0], np.array([2, 1, 2]))
assert_allclose(inputs[1], np.array([1]))
assert len(labels) == 0
inputs, labels = test_gen.__next__()
assert_allclose(inputs[0], np.array([4]))
assert_allclose(inputs[1], np.array([5, 1]))
assert len(labels) == 0
inputs, labels = test_gen.__next__()
assert_allclose(inputs[0], np.array([6]))
assert_allclose(inputs[1], np.array([7]))
assert len(labels) == 0
# Should raise a StopIteration
with self.assertRaises(StopIteration):
test_gen.__next__()
def test_generate_test_batches(self):
get_test_gen, test_size = self.data_manager.get_test_data_from_file(
[self.TEST_FILE])
batch_gen = self.data_manager.get_batch_generator(get_test_gen, 2)
new_batch_gen = DataManager.get_batch_generator(get_test_gen, 2)
# Assert that the new generator is a different object
# than the old generator.
assert new_batch_gen != batch_gen
assert test_size == 3
first_batch = batch_gen.__next__()
new_first_batch = new_batch_gen.__next__()
inputs, labels = first_batch
new_inputs, new_labels = new_first_batch
assert len(inputs) == 2
assert len(labels) == 0
# Ensure output matches ground truth
assert_allclose(inputs[0], np.array([[2, 1], [4, 0]]))
assert_allclose(inputs[1], np.array([[1, 0], [5, 1]]))
# Ensure both generators produce same results.
assert_allclose(inputs[0], new_inputs[0])
assert_allclose(inputs[1], new_inputs[1])
second_batch = batch_gen.__next__()
new_second_batch = new_batch_gen.__next__()
inputs, labels = second_batch
new_inputs, new_labels = new_second_batch
assert len(inputs) == 2
assert len(labels) == 0
# Ensure output matches ground truth
assert_allclose(inputs[0], np.array([[6, 0]]))
assert_allclose(inputs[1], np.array([[7, 0]]))
# Ensure both generators produce same results.
assert_allclose(inputs[0], new_inputs[0])
assert_allclose(inputs[1], new_inputs[1])
with self.assertRaises(StopIteration):
batch_gen.__next__()
new_batch_gen.__next__()
| 44.269906
| 89
| 0.579198
| 4,521
| 32,804
| 3.898031
| 0.028312
| 0.042898
| 0.057198
| 0.067639
| 0.960506
| 0.9512
| 0.930716
| 0.921069
| 0.910288
| 0.896158
| 0
| 0.073173
| 0.285941
| 32,804
| 740
| 90
| 44.32973
| 0.679175
| 0.052494
| 0
| 0.794872
| 0
| 0
| 0.007057
| 0
| 0
| 0
| 0
| 0
| 0.490598
| 1
| 0.046154
| false
| 0
| 0.010256
| 0
| 0.061538
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c8d123449c9c2430e58ec8090f193afb71eedcf9
| 225
|
py
|
Python
|
src/HarvestBalanceCalculator/__init__.py
|
jrochette/harvest-balance-calculator
|
fb56c0709b76cb1f8eaba4dd479776a3d2376335
|
[
"WTFPL"
] | 14
|
2017-11-27T02:32:34.000Z
|
2019-01-14T14:15:29.000Z
|
src/HarvestBalanceCalculator/__init__.py
|
jrochette/harvest-balance-calculator
|
fb56c0709b76cb1f8eaba4dd479776a3d2376335
|
[
"WTFPL"
] | 30
|
2017-11-26T19:15:31.000Z
|
2021-05-07T01:42:14.000Z
|
src/HarvestBalanceCalculator/__init__.py
|
jrochette/harvest-balance-calculator
|
fb56c0709b76cb1f8eaba4dd479776a3d2376335
|
[
"WTFPL"
] | 6
|
2017-11-26T18:56:40.000Z
|
2018-09-04T12:45:20.000Z
|
from HarvestBalanceCalculator.HarvestTimeEntries import HarvestTimeEntries
from HarvestBalanceCalculator.WorkingPreference import WorkingPreference
from HarvestBalanceCalculator.WorkingTimeInterval import WorkingTimeInterval
| 56.25
| 76
| 0.933333
| 15
| 225
| 14
| 0.4
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.053333
| 225
| 3
| 77
| 75
| 0.985915
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cde709456cda4af9f17446c839fcf0d7196c079c
| 29,766
|
py
|
Python
|
lib/score_visualization.py
|
fosfrancesco/music-score-diff
|
cd513da922efd7cf4f69eb87262369dc0889bf1a
|
[
"MIT"
] | 4
|
2019-11-26T13:25:58.000Z
|
2022-03-11T10:48:35.000Z
|
lib/score_visualization.py
|
fosfrancesco/music-score-diff
|
cd513da922efd7cf4f69eb87262369dc0889bf1a
|
[
"MIT"
] | null | null | null |
lib/score_visualization.py
|
fosfrancesco/music-score-diff
|
cd513da922efd7cf4f69eb87262369dc0889bf1a
|
[
"MIT"
] | 1
|
2021-05-01T18:29:39.000Z
|
2021-05-01T18:29:39.000Z
|
import re
import music21 as m21
import operator
import copy
import math
from pathlib import Path
import os
from collections.abc import Iterable
import lib.NotationLinear as nlin
INS_COLOR = "red" # "green"
DEL_COLOR = "red"
SUB_COLOR = "red" # "blue"
def annotate_differences(score1, score2, operations):
for op in operations:
# bar
if op[0] == "insbar":
assert type(op[2]) == nlin.Bar
# color all the notes in the inserted score2 measure using INS_COLOR
measure2 = score2.recurse().getElementById(op[2].measure)
textExp = m21.expressions.TextExpression("inserted measure")
textExp.style.color = INS_COLOR
measure2.insert(0, textExp)
measure2.style.color = INS_COLOR # this apparently does nothing
for el in measure2.recurse().notesAndRests:
el.style.color = INS_COLOR
elif op[0] == "delbar":
assert type(op[1]) == nlin.Bar
# color all the notes in the deleted score1 measure using DEL_COLOR
measure1 = score1.recurse().getElementById(op[1].measure)
textExp = m21.expressions.TextExpression("deleted measure")
textExp.style.color = DEL_COLOR
measure1.insert(0, textExp)
measure1.style.color = DEL_COLOR # this apparently does nothing
for el in measure1.recurse().notesAndRests:
el.style.color = DEL_COLOR
# voices
elif op[0] == "voiceins":
assert type(op[2]) == nlin.Voice
# color all the notes in the inserted score2 voice using INS_COLOR
voice2 = score2.recurse().getElementById(op[2].voice)
textExp = m21.expressions.TextExpression("inserted voice")
textExp.style.color = INS_COLOR
voice2.insert(0, textExp)
voice2.style.color = INS_COLOR # this apparently does nothing
for el in voice2.recurse().notesAndRests:
el.style.color = INS_COLOR
elif op[0] == "voicedel":
assert type(op[1]) == nlin.Voice
# color all the notes in the deleted score1 voice using DEL_COLOR
voice1 = score1.recurse().getElementById(op[1].voice)
textExp = m21.expressions.TextExpression("deleted voice")
textExp.style.color = DEL_COLOR
voice1.insert(0, textExp)
voice1.style.color = DEL_COLOR # this apparently does nothing
for el in voice1.recurse().notesAndRests:
el.style.color = DEL_COLOR
# note
elif op[0] == "noteins":
assert type(op[2]) == nlin.AnnotatedNote
# color the inserted score2 general note (note, chord, or rest) using INS_COLOR
note2 = score2.recurse().getElementById(op[2].general_note)
note2.style.color = INS_COLOR
if "Rest" in note2.classes:
textExp = m21.expressions.TextExpression("inserted rest")
elif "Chord" in note2.classes:
textExp = m21.expressions.TextExpression("inserted chord")
else:
textExp = m21.expressions.TextExpression("inserted note")
textExp.style.color = SUB_COLOR
note2.activeSite.insert(note2.offset, textExp)
elif op[0] == "notedel":
assert type(op[1]) == nlin.AnnotatedNote
# color the deleted score1 general note (note, chord, or rest) using DEL_COLOR
note1 = score1.recurse().getElementById(op[1].general_note)
note1.style.color = DEL_COLOR
if "Rest" in note1.classes:
textExp = m21.expressions.TextExpression("deleted rest")
elif "Chord" in note1.classes:
textExp = m21.expressions.TextExpression("deleted chord")
else:
textExp = m21.expressions.TextExpression("deleted note")
textExp.style.color = SUB_COLOR
note1.activeSite.insert(note1.offset, textExp)
# pitch
elif op[0] == "pitchnameedit":
assert type(op[1]) == nlin.AnnotatedNote
assert type(op[2]) == nlin.AnnotatedNote
assert len(op) == 5 # the indices must be there
# color the changed note (in both scores) using SUB_COLOR
chord1 = score1.recurse().getElementById(op[1].general_note)
note1 = chord1
if "Chord" in note1.classes:
# color just the indexed note in the chord
idx = op[4][0]
note1 = note1.notes[idx]
note1.style.color = SUB_COLOR
textExp = m21.expressions.TextExpression("changed pitch")
textExp.style.color = SUB_COLOR
if note1.activeSite is not None:
note1.activeSite.insert(note1.offset, textExp)
else:
chord1.activeSite.insert(chord1.offset, textExp)
chord2 = score2.recurse().getElementById(op[2].general_note)
note2 = chord2
if "Chord" in note2.classes:
# color just the indexed note in the chord
idx = op[4][1]
note2 = note2.notes[idx]
note2.style.color = SUB_COLOR
textExp = m21.expressions.TextExpression("changed pitch")
textExp.style.color = SUB_COLOR
if note2.activeSite is not None:
note2.activeSite.insert(note2.offset, textExp)
else:
chord2.activeSite.insert(chord2.offset, textExp)
elif op[0] == "inspitch":
assert type(op[2]) == nlin.AnnotatedNote
assert len(op) == 5 # the indices must be there
# color the inserted note in score2 using INS_COLOR
chord2 = score2.recurse().getElementById(op[2].general_note)
note2 = chord2
if "Chord" in note2.classes:
# color just the indexed note in the chord
idx = op[4][1]
note2 = note2.notes[idx]
note2.style.color = INS_COLOR
if "Rest" in note2.classes:
textExp = m21.expressions.TextExpression("inserted rest")
else:
textExp = m21.expressions.TextExpression("inserted note")
textExp.style.color = INS_COLOR
if note2.activeSite is not None:
note2.activeSite.insert(note2.offset, textExp)
else:
chord2.activeSite.insert(chord2.offset, textExp)
elif op[0] == "delpitch":
assert type(op[1]) == nlin.AnnotatedNote
assert len(op) == 5 # the indices must be there
# color the deleted note in score1 using DEL_COLOR
chord1 = score1.recurse().getElementById(op[1].general_note)
note1 = chord1
if "Chord" in note1.classes:
# color just the indexed note in the chord
idx = op[4][0]
note1 = note1.notes[idx]
note1.style.color = DEL_COLOR
if "Rest" in note1.classes:
textExp = m21.expressions.TextExpression("deleted rest")
else:
textExp = m21.expressions.TextExpression("deleted note")
textExp.style.color = DEL_COLOR
if note1.activeSite is not None:
note1.activeSite.insert(note1.offset, textExp)
else:
chord1.activeSite.insert(chord1.offset, textExp)
elif op[0] == "headedit":
assert type(op[1]) == nlin.AnnotatedNote
assert type(op[2]) == nlin.AnnotatedNote
# color the changed note/rest/chord (in both scores) using SUB_COLOR
note1 = score1.recurse().getElementById(op[1].general_note)
note1.style.color = SUB_COLOR
textExp = m21.expressions.TextExpression("changed note head")
textExp.style.color = SUB_COLOR
note1.activeSite.insert(note1.offset, textExp)
note2 = score2.recurse().getElementById(op[2].general_note)
note2.style.color = SUB_COLOR
textExp = m21.expressions.TextExpression("changed note head")
textExp.style.color = SUB_COLOR
note2.activeSite.insert(note2.offset, textExp)
# beam
elif op[0] == "insbeam":
assert type(op[1]) == nlin.AnnotatedNote
assert type(op[2]) == nlin.AnnotatedNote
# color the modified note in both scores using INS_COLOR
note1 = score1.recurse().getElementById(op[1].general_note)
note1.style.color = INS_COLOR
if hasattr(note1, 'beams'):
for beam in note1.beams:
beam.style.color = INS_COLOR # this apparently does nothing
textExp = m21.expressions.TextExpression("increased flags")
textExp.style.color = INS_COLOR
note1.activeSite.insert(note1.offset, textExp)
note2 = score2.recurse().getElementById(op[2].general_note)
note2.style.color = INS_COLOR
if hasattr(note1, 'beams'):
for beam in note2.beams:
beam.style.color = INS_COLOR # this apparently does nothing
textExp = m21.expressions.TextExpression("increased flags")
textExp.style.color = INS_COLOR
note2.activeSite.insert(note2.offset, textExp)
elif op[0] == "delbeam":
assert type(op[1]) == nlin.AnnotatedNote
assert type(op[2]) == nlin.AnnotatedNote
# color the modified note in both scores using DEL_COLOR
note1 = score1.recurse().getElementById(op[1].general_note)
note1.style.color = DEL_COLOR
if hasattr(note1, 'beams'):
for beam in note1.beams:
beam.style.color = DEL_COLOR # this apparently does nothing
textExp = m21.expressions.TextExpression("decreased flags")
textExp.style.color = SUB_COLOR
note1.activeSite.insert(note1.offset, textExp)
note2 = score2.recurse().getElementById(op[2].general_note)
note2.style.color = DEL_COLOR
if hasattr(note1, 'beams'):
for beam in note2.beams:
beam.style.color = DEL_COLOR # this apparently does nothing
textExp = m21.expressions.TextExpression("decreased flags")
textExp.style.color = DEL_COLOR
note2.activeSite.insert(note2.offset, textExp)
elif op[0] == "editbeam":
assert type(op[1]) == nlin.AnnotatedNote
assert type(op[2]) == nlin.AnnotatedNote
# color the changed beam (in both scores) using SUB_COLOR
note1 = score1.recurse().getElementById(op[1].general_note)
note1.style.color = SUB_COLOR
if hasattr(note1, 'beams'):
for beam in note1.beams:
beam.style.color = SUB_COLOR # this apparently does nothing
textExp = m21.expressions.TextExpression("changed flags")
textExp.style.color = SUB_COLOR
note1.activeSite.insert(note1.offset, textExp)
note2 = score2.recurse().getElementById(op[2].general_note)
note2.style.color = SUB_COLOR
if hasattr(note1, 'beams'):
for beam in note2.beams:
beam.style.color = SUB_COLOR # this apparently does nothing
textExp = m21.expressions.TextExpression("changed flags")
textExp.style.color = SUB_COLOR
note2.activeSite.insert(note2.offset, textExp)
# accident
elif op[0] == "accidentins":
assert type(op[1]) == nlin.AnnotatedNote
assert type(op[2]) == nlin.AnnotatedNote
assert len(op) == 5 # the indices must be there
# color the modified note in both scores using INS_COLOR
chord1 = score1.recurse().getElementById(op[1].general_note)
note1 = chord1
if "Chord" in note1.classes:
# color only the indexed note's accidental in the chord
idx = op[4][0]
note1 = note1.notes[idx]
if note1.pitch.accidental:
note1.pitch.accidental.style.color = INS_COLOR
note1.style.color = INS_COLOR
textExp = m21.expressions.TextExpression("inserted accidental")
textExp.style.color = INS_COLOR
if note1.activeSite is not None:
note1.activeSite.insert(note1.offset, textExp)
else:
chord1.activeSite.insert(chord1.offset, textExp)
chord2 = score2.recurse().getElementById(op[2].general_note)
note2 = chord2
if "Chord" in note2.classes:
# color only the indexed note's accidental in the chord
idx = op[4][1]
note2 = note2.notes[idx]
if note2.pitch.accidental:
note2.pitch.accidental.style.color = INS_COLOR
note2.style.color = INS_COLOR
textExp = m21.expressions.TextExpression("inserted accidental")
textExp.style.color = INS_COLOR
if note2.activeSite is not None:
note2.activeSite.insert(note2.offset, textExp)
else:
chord2.activeSite.insert(chord2.offset, textExp)
elif op[0] == "accidentdel":
assert type(op[1]) == nlin.AnnotatedNote
assert type(op[2]) == nlin.AnnotatedNote
assert len(op) == 5 # the indices must be there
# color the modified note in both scores using DEL_COLOR
chord1 = score1.recurse().getElementById(op[1].general_note)
note1 = chord1
if "Chord" in note1.classes:
# color only the indexed note's accidental in the chord
idx = op[4][0]
note1 = note1.notes[idx]
if note1.pitch.accidental:
note1.pitch.accidental.style.color = DEL_COLOR
note1.style.color = DEL_COLOR
textExp = m21.expressions.TextExpression("deleted accidental")
textExp.style.color = DEL_COLOR
if note1.activeSite is not None:
note1.activeSite.insert(note1.offset, textExp)
else:
chord1.activeSite.insert(chord1.offset, textExp)
chord2 = score2.recurse().getElementById(op[2].general_note)
note2 = chord2
if "Chord" in note2.classes:
# color only the indexed note's accidental in the chord
idx = op[4][1]
note2 = note2.notes[idx]
if note2.pitch.accidental:
note2.pitch.accidental.style.color = DEL_COLOR
note2.style.color = DEL_COLOR
textExp = m21.expressions.TextExpression("deleted accidental")
textExp.style.color = DEL_COLOR
if note2.activeSite is not None:
note2.activeSite.insert(note2.offset, textExp)
else:
chord2.activeSite.insert(chord2.offset, textExp)
elif op[0] == "accidentedit":
assert type(op[1]) == nlin.AnnotatedNote
assert type(op[2]) == nlin.AnnotatedNote
assert len(op) == 5 # the indices must be there
# color the changed accidental (in both scores) using SUB_COLOR
chord1 = score1.recurse().getElementById(op[1].general_note)
note1 = chord1
if "Chord" in note1.classes:
# color just the indexed note in the chord
idx = op[4][0]
note1 = note1.notes[idx]
if note1.pitch.accidental:
note1.pitch.accidental.style.color = SUB_COLOR
note1.style.color = SUB_COLOR
textExp = m21.expressions.TextExpression("changed accidental")
textExp.style.color = SUB_COLOR
if note1.activeSite is not None:
note1.activeSite.insert(note1.offset, textExp)
else:
chord1.activeSite.insert(chord1.offset, textExp)
chord2 = score2.recurse().getElementById(op[2].general_note)
note2 = chord2
if "Chord" in note2.classes:
# color just the indexed note in the chord
idx = op[4][1]
note2 = note2.notes[idx]
if note2.pitch.accidental:
note2.pitch.accidental.style.color = SUB_COLOR
note2.style.color = SUB_COLOR
textExp = m21.expressions.TextExpression("changed accidental")
textExp.style.color = SUB_COLOR
if note2.activeSite is not None:
note2.activeSite.insert(note2.offset, textExp)
else:
chord2.activeSite.insert(chord2.offset, textExp)
elif op[0] == "dotins":
assert type(op[1]) == nlin.AnnotatedNote
assert type(op[2]) == nlin.AnnotatedNote
# In music21, the dots are not separately colorable from the note,
# so we will just color the modified note here in both scores, using SUB_COLOR
note1 = score1.recurse().getElementById(op[1].general_note)
note1.style.color = SUB_COLOR
textExp = m21.expressions.TextExpression("inserted dot")
textExp.style.color = SUB_COLOR
note1.activeSite.insert(note1.offset, textExp)
note2 = score2.recurse().getElementById(op[2].general_note)
note2.style.color = SUB_COLOR
textExp = m21.expressions.TextExpression("inserted dot")
textExp.style.color = SUB_COLOR
note2.activeSite.insert(note2.offset, textExp)
elif op[0] == "dotdel":
assert type(op[1]) == nlin.AnnotatedNote
assert type(op[2]) == nlin.AnnotatedNote
# In music21, the dots are not separately colorable from the note,
# so we will just color the modified note here in both scores, using SUB_COLOR
note1 = score1.recurse().getElementById(op[1].general_note)
note1.style.color = SUB_COLOR
textExp = m21.expressions.TextExpression("deleted dot")
textExp.style.color = SUB_COLOR
note1.activeSite.insert(note1.offset, textExp)
note2 = score2.recurse().getElementById(op[2].general_note)
note2.style.color = SUB_COLOR
textExp = m21.expressions.TextExpression("deleted dot")
textExp.style.color = SUB_COLOR
note2.activeSite.insert(note2.offset, textExp)
# tuplets
elif op[0] == "instuplet":
assert type(op[1]) == nlin.AnnotatedNote
assert type(op[2]) == nlin.AnnotatedNote
note1 = score1.recurse().getElementById(op[1].general_note)
note1.style.color = SUB_COLOR
textExp = m21.expressions.TextExpression("inserted tuplet")
textExp.style.color = SUB_COLOR
note1.activeSite.insert(note1.offset, textExp)
note2 = score2.recurse().getElementById(op[2].general_note)
note2.style.color = SUB_COLOR
textExp = m21.expressions.TextExpression("inserted tuplet")
textExp.style.color = SUB_COLOR
note2.activeSite.insert(note2.offset, textExp)
elif op[0] == "deltuplet":
assert type(op[1]) == nlin.AnnotatedNote
assert type(op[2]) == nlin.AnnotatedNote
note1 = score1.recurse().getElementById(op[1].general_note)
note1.style.color = SUB_COLOR
textExp = m21.expressions.TextExpression("deleted tuplet")
textExp.style.color = SUB_COLOR
note1.activeSite.insert(note1.offset, textExp)
note2 = score2.recurse().getElementById(op[2].general_note)
note2.style.color = SUB_COLOR
textExp = m21.expressions.TextExpression("deleted tuplet")
textExp.style.color = SUB_COLOR
note2.activeSite.insert(note2.offset, textExp)
elif op[0] == "edittuplet":
assert type(op[1]) == nlin.AnnotatedNote
assert type(op[2]) == nlin.AnnotatedNote
note1 = score1.recurse().getElementById(op[1].general_note)
note1.style.color = SUB_COLOR
textExp = m21.expressions.TextExpression("changed tuplet")
textExp.style.color = SUB_COLOR
note1.activeSite.insert(note1.offset, textExp)
note2 = score2.recurse().getElementById(op[2].general_note)
note2.style.color = SUB_COLOR
textExp = m21.expressions.TextExpression("changed tuplet")
textExp.style.color = SUB_COLOR
note2.activeSite.insert(note2.offset, textExp)
# ties
elif op[0] == "tieins":
assert type(op[1]) == nlin.AnnotatedNote
assert type(op[2]) == nlin.AnnotatedNote
assert len(op) == 5 # the indices must be there
# Color the modified note here in both scores, using INS_COLOR
chord1 = score1.recurse().getElementById(op[1].general_note)
note1 = chord1
if "Chord" in note1.classes:
# color just the indexed note in the chord
idx = op[4][0]
note1 = note1.notes[idx]
note1.style.color = INS_COLOR
textExp = m21.expressions.TextExpression("inserted tie")
textExp.style.color = INS_COLOR
if note1.activeSite is not None:
note1.activeSite.insert(note1.offset, textExp)
else:
chord1.activeSite.insert(chord1.offset, textExp)
chord2 = score2.recurse().getElementById(op[2].general_note)
note2 = chord2
if "Chord" in note2.classes:
# color just the indexed note in the chord
idx = op[4][1]
note2 = note2.notes[idx]
note2.style.color = INS_COLOR
textExp = m21.expressions.TextExpression("inserted tie")
textExp.style.color = INS_COLOR
if note2.activeSite is not None:
note2.activeSite.insert(note2.offset, textExp)
else:
chord2.activeSite.insert(chord2.offset, textExp)
elif op[0] == "tiedel":
assert type(op[1]) == nlin.AnnotatedNote
assert type(op[2]) == nlin.AnnotatedNote
assert len(op) == 5 # the indices must be there
# Color the modified note in both scores, using DEL_COLOR
chord1 = score1.recurse().getElementById(op[1].general_note)
note1 = chord1
if "Chord" in note1.classes:
# color just the indexed note in the chord
idx = op[4][0]
note1 = note1.notes[idx]
note1.style.color = DEL_COLOR
textExp = m21.expressions.TextExpression("deleted tie")
textExp.style.color = DEL_COLOR
if note1.activeSite is not None:
note1.activeSite.insert(note1.offset, textExp)
else:
chord1.activeSite.insert(chord1.offset, textExp)
chord2 = score2.recurse().getElementById(op[2].general_note)
note2 = chord2
if "Chord" in note2.classes:
# color just the indexed note in the chord
idx = op[4][1]
note2 = note2.notes[idx]
note2.style.color = DEL_COLOR
textExp = m21.expressions.TextExpression("deleted tie")
textExp.style.color = DEL_COLOR
if note2.activeSite is not None:
note2.activeSite.insert(note2.offset, textExp)
else:
chord2.activeSite.insert(chord2.offset, textExp)
# expressions
elif op[0] == "insexpression":
assert type(op[1]) == nlin.AnnotatedNote
assert type(op[2]) == nlin.AnnotatedNote
# color the note in both scores using INS_COLOR
note1 = score1.recurse().getElementById(op[1].general_note)
note1.style.color = INS_COLOR
textExp = m21.expressions.TextExpression("inserted expression")
textExp.style.color = INS_COLOR
note1.activeSite.insert(note1.offset, textExp)
note2 = score2.recurse().getElementById(op[2].general_note)
note2.style.color = INS_COLOR
textExp = m21.expressions.TextExpression("inserted expression")
textExp.style.color = INS_COLOR
note2.activeSite.insert(note2.offset, textExp)
elif op[0] == "delexpression":
assert type(op[1]) == nlin.AnnotatedNote
assert type(op[2]) == nlin.AnnotatedNote
# color the deleted expression in score1 using DEL_COLOR
note1 = score1.recurse().getElementById(op[1].general_note)
note1.style.color = DEL_COLOR
textExp = m21.expressions.TextExpression("deleted expression")
textExp.style.color = DEL_COLOR
note1.activeSite.insert(note1.offset, textExp)
note2 = score2.recurse().getElementById(op[2].general_note)
note2.style.color = DEL_COLOR
textExp = m21.expressions.TextExpression("deleted expression")
textExp.style.color = DEL_COLOR
note2.activeSite.insert(note2.offset, textExp)
elif op[0] == "editexpression":
assert type(op[1]) == nlin.AnnotatedNote
assert type(op[2]) == nlin.AnnotatedNote
# color the changed beam (in both scores) using SUB_COLOR
note1 = score1.recurse().getElementById(op[1].general_note)
note1.style.color = SUB_COLOR
textExp = m21.expressions.TextExpression("changed expression")
textExp.style.color = SUB_COLOR
note1.activeSite.insert(note1.offset, textExp)
note2 = score2.recurse().getElementById(op[2].general_note)
note2.style.color = SUB_COLOR
textExp = m21.expressions.TextExpression("changed expression")
textExp.style.color = SUB_COLOR
note2.activeSite.insert(note2.offset, textExp)
# articulations
elif op[0] == "insarticulation":
assert type(op[1]) == nlin.AnnotatedNote
assert type(op[2]) == nlin.AnnotatedNote
# color the modified note in both scores using INS_COLOR
note1 = score1.recurse().getElementById(op[1].general_note)
note1.style.color = INS_COLOR
textExp = m21.expressions.TextExpression("inserted articulation")
textExp.style.color = INS_COLOR
note1.activeSite.insert(note1.offset, textExp)
note2 = score2.recurse().getElementById(op[2].general_note)
note2.style.color = INS_COLOR
textExp = m21.expressions.TextExpression("inserted articulation")
textExp.style.color = INS_COLOR
note2.activeSite.insert(note2.offset, textExp)
elif op[0] == "delarticulation":
assert type(op[1]) == nlin.AnnotatedNote
assert type(op[2]) == nlin.AnnotatedNote
# color the modified note in both scores using DEL_COLOR
note1 = score1.recurse().getElementById(op[1].general_note)
note1.style.color = DEL_COLOR
textExp = m21.expressions.TextExpression("deleted articulation")
textExp.style.color = DEL_COLOR
note1.activeSite.insert(note1.offset, textExp)
note2 = score2.recurse().getElementById(op[2].general_note)
note2.style.color = DEL_COLOR
textExp = m21.expressions.TextExpression("deleted articulation")
textExp.style.color = DEL_COLOR
note2.activeSite.insert(note2.offset, textExp)
elif op[0] == "editarticulation":
assert type(op[1]) == nlin.AnnotatedNote
assert type(op[2]) == nlin.AnnotatedNote
# color the modified note (in both scores) using SUB_COLOR
note1 = score1.recurse().getElementById(op[1].general_note)
note1.style.color = SUB_COLOR
textExp = m21.expressions.TextExpression("changed articulation")
textExp.style.color = SUB_COLOR
note1.activeSite.insert(note1.offset, textExp)
note2 = score2.recurse().getElementById(op[2].general_note)
note2.style.color = SUB_COLOR
textExp = m21.expressions.TextExpression("changed articulation")
textExp.style.color = SUB_COLOR
note2.activeSite.insert(note2.offset, textExp)
else:
print(
"Annotation type {} not yet supported for visualization".format(op[0])
)
def show_differences(score1: m21.stream.Score, score2: m21.stream.Score):
# display the two (annotated) scores
originalComposer1: str = None
originalComposer2: str = None
if score1.metadata is None:
score1.metadata = m21.metadata.Metadata()
if score2.metadata is None:
score2.metadata = m21.metadata.Metadata()
originalComposer1 = score1.metadata.composer
if originalComposer1 is None:
score1.metadata.composer = "score1"
else:
score1.metadata.composer = "score1 " + originalComposer1
originalComposer2 = score2.metadata.composer
if originalComposer2 is None:
score2.metadata.composer = "score2"
else:
score2.metadata.composer = "score2 " + originalComposer2
score1.show('musicxml.pdf', makeNotation=False)
score2.show('musicxml.pdf', makeNotation=False)
| 46.292379
| 91
| 0.595109
| 3,260
| 29,766
| 5.37454
| 0.054294
| 0.066206
| 0.067119
| 0.111866
| 0.905999
| 0.873181
| 0.868158
| 0.85914
| 0.84082
| 0.840591
| 0
| 0.035465
| 0.311328
| 29,766
| 642
| 92
| 46.364486
| 0.819259
| 0.099106
| 0
| 0.815589
| 0
| 0
| 0.051642
| 0
| 0
| 0
| 0
| 0
| 0.110266
| 1
| 0.003802
| false
| 0
| 0.01711
| 0
| 0.020913
| 0.001901
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b55490bd1df8a800a6e312417255076162a95990
| 25,141
|
py
|
Python
|
py2gcode/cnc_pocket.py
|
iorodeo/py2gcode
|
aa1785a4b65510ee12c22f2e73dbf567ab037192
|
[
"Apache-2.0"
] | 1
|
2020-07-23T19:03:34.000Z
|
2020-07-23T19:03:34.000Z
|
py2gcode/cnc_pocket.py
|
iorodeo/py2gcode
|
aa1785a4b65510ee12c22f2e73dbf567ab037192
|
[
"Apache-2.0"
] | null | null | null |
py2gcode/cnc_pocket.py
|
iorodeo/py2gcode
|
aa1785a4b65510ee12c22f2e73dbf567ab037192
|
[
"Apache-2.0"
] | null | null | null |
"""
Copyright 2013 IO Rodeo Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
import math
import gcode_cmd
import cnc_path
import cnc_routine
FLOAT_TOLERANCE = 1.0e-12
class RectPocketXY(cnc_routine.SafeZRoutine):
def __init__(self,param):
"""
Generates toolpath for cutting a simple rectangulare pocket.
param dict
keys values
--------------------------------------------------------------
centerX = center position x-coord
centerY = center position y-coord
width = pocket width
height = pocket height
depth = pocket depth
startZ = height at which to start cutting
safeZ = safe tool height
overlap = tool path overlap (fractional value)
overlapFinsh = tool path overlap for bottom layer (optional)
maxCutDepth = maximum per pass cutting depth
toolDiam = diameter of tool
cornerCut = add corner cutout (Ture/False)
direction = cut direction cw or ccw
startDwell = dwell duration before start (optional)
cornerMargin = margin for corner cuts (options) default = 0.0
"""
super(RectPocketXY,self).__init__(param)
def makeListOfCmds(self):
# Retreive numerical parameters and convert to float
cx = float(self.param['centerX'])
cy = float(self.param['centerY'])
width = abs(float(self.param['width']))
height = abs(float(self.param['height']))
depth = abs(float(self.param['depth']))
startZ = float(self.param['startZ'])
overlap = float(self.param['overlap'])
try:
overlapFinish = self.param['overlapFinish']
except KeyError:
overlapFinish = overlap
overlapFinish = float(overlapFinish)
maxCutDepth = float(self.param['maxCutDepth'])
toolDiam = abs(float(self.param['toolDiam']))
try:
startDwell = self.param['startDwell']
except KeyError:
startDwell = 0.0
startDwell = abs(float(startDwell))
try:
cornerMargin = self.param['cornerMargin']
except KeyError:
cornerMargin = 0.0
cornerMargin = float(cornerMargin)
try:
coolingPause = self.param['coolingPause']
except KeyError:
coolingPause = None
# Check params
checkRectPocketOverlap(overlap)
checkRectPocketOverlap(overlapFinish)
# Get rectangular path parameters
if self.param['direction'] == 'cw':
sgnDir = 1.0
else:
sgnDir = -1.0
x0 = cx + 0.5*(-width + toolDiam)
x1 = cx + 0.5*( width - toolDiam)
y0 = cy + 0.5*sgnDir*(-height + toolDiam)
y1 = cy + 0.5*sgnDir*( height - toolDiam)
point0 = x0,y0
point1 = x1,y1
# Move to safe height, then to start x,y and then to start z
self.addStartComment()
self.addRapidMoveToSafeZ()
self.addRapidMoveToPos(x=x0,y=y0,comment='start x,y')
self.addDwell(startDwell)
self.addMoveToStartZ()
# Get z cutting parameters
stopZ = startZ - depth
prevZ = startZ
currZ = max([startZ - maxCutDepth, stopZ])
done = False
passCnt = 0
while not done:
passCnt+=1
# Lead-in to cut depth
self.addComment('pass {0} lead-in'.format(passCnt))
leadInRect = cnc_path.RectPath(
point0,
point1,
plane='xy',
helix=(prevZ,currZ)
)
self.listOfCmds.extend(leadInRect.listOfCmds)
# Cut filled rectangular path
self.addComment('pass {0} filled rectangle'.format(passCnt))
if currZ == stopZ:
passOverlap = overlapFinish
else:
passOverlap = overlap
stepSize = toolDiam - passOverlap*toolDiam
stepSize = min([stepSize, abs(x1-x0), abs(y1-y0)])
numStepX = int(math.ceil(0.5*width/stepSize))
numStepY = int(math.ceil(0.5*height/stepSize))
numStep = min([numStepX, numStepY])
if not self.param['cornerCut']:
rectPath = cnc_path.FilledRectPath(
point0,
point1,
stepSize,
numStep,
plane='xy'
)
else:
cutLen = 0.5*toolDiam*(math.sqrt(2.0) - 1.0) + cornerMargin
rectPath = cnc_path.FilledRectWithCornerCutPath(
point0,
point1,
stepSize,
numStep,
cutLen,
plane='xy'
)
self.listOfCmds.extend(rectPath.listOfCmds)
# Get next z position
if currZ <= stopZ:
done = True
else:
if coolingPause is not None:
self.addRapidMoveToSafeZ()
self.listOfCmds.append(gcode_cmd.Dwell(coolingPause))
self.listOfCmds.append(gcode_cmd.LinearFeed(z=currZ))
prevZ = currZ
currZ = max([currZ - maxCutDepth, stopZ])
self.addRapidMoveToSafeZ()
self.addEndComment()
class RectAnnulusPocketXY(cnc_routine.SafeZRoutine):
def __init__(self,param):
"""
Generates toolpath for cutting a simple rectangulare pocket.
params dict
keys values
--------------------------------------------------------------
centerX = center position x-coord
centerY = center position y-coord
width = pocket outer width
height = pocket inner width
thickness = thickness
depth = pocket depth
startZ = height at which to start cutting
safeZ = safe tool height
overlap = tool path overlap (fractional value)
overlapFinsh = tool path overlap for bottom layer (optional)
maxCutDepth = maximum per pass cutting depth
toolDiam = diameter of tool
cornerCut = add corner cutout (Ture/False)
direction = cut direction cw or ccw
startDwell = dwell duration before start (optional)
cornerMargin = margin for corner cuts (options) default = 0.0
"""
super(RectAnnulusPocketXY,self).__init__(param)
def makeListOfCmds(self):
# Retreive numerical parameters and convert to float and check
cx = float(self.param['centerX'])
cy = float(self.param['centerY'])
width = abs(float(self.param['width']))
height = abs(float(self.param['height']))
thickness = abs(float(self.param['thickness']))
depth = abs(float(self.param['depth']))
startZ = float(self.param['startZ'])
overlap = float(self.param['overlap'])
try:
overlapFinish = self.param['overlapFinish']
except KeyError:
overlapFinish = overlap
overlapFinish = float(overlapFinish)
maxCutDepth = float(self.param['maxCutDepth'])
toolDiam = abs(float(self.param['toolDiam']))
try:
startDwell = self.param['startDwell']
except KeyError:
startDwell = 0.0
startDwell = abs(float(startDwell))
try:
cornerMargin = self.param['cornerMargin']
except KeyError:
cornerMargin = 0.0
cornerMargin = float(cornerMargin)
self.listOfCmds = []
# Check params
assert toolDiam <= thickness, 'toolDiam too large for annulus thickness'
checkRectPocketOverlap(overlap)
checkRectPocketOverlap(overlapFinish)
# Get sign for rectangular toolpaths based on direction
if self.param['direction'] == 'cw':
sgnDir = 1.0
else:
sgnDir = -1.0
# Outer toolpath rectangle
outerX0 = cx + 0.5*(-width + toolDiam)
outerX1 = cx + 0.5*( width - toolDiam)
outerY0 = cy + 0.5*sgnDir*(-height + toolDiam)
outerY1 = cy + 0.5*sgnDir*( height - toolDiam)
outerPoint0 = outerX0, outerY0
outerPoint1 = outerX1, outerY1
# Inner toolpath rectangle
innerX0 = cx + 0.5*(-width - toolDiam) + thickness
innerX1 = cx + 0.5*( width + toolDiam) - thickness
innerY0 = cy + sgnDir*(0.5*(-height - toolDiam) + thickness)
innerY1 = cy + sgnDir*(0.5*( height + toolDiam) - thickness)
innerPoint0 = innerX0, innerY0
innerPoint1 = innerX1, innerY1
# Move to safe height, then to start x,y and then to start z
self.addStartComment()
self.addRapidMoveToSafeZ()
self.addRapidMoveToPos(x=outerX0,y=outerY0,comment='start x,y')
self.addDwell(startDwell)
self.addMoveToStartZ()
# Get z cutting parameters
stopZ = startZ - depth
prevZ = startZ
currZ = max([startZ - maxCutDepth, stopZ])
done = False
passCnt = 0
while not done:
passCnt+=1
# Lead-in to cut depth
self.addComment('pass {0} lead-in'.format(passCnt))
leadInRect = cnc_path.RectPath(
outerPoint0,
outerPoint1,
plane='xy',
helix=(prevZ,currZ)
)
self.listOfCmds.extend(leadInRect.listOfCmds)
# Cut filled rectangular path
self.addComment('pass {0} filled rectangle'.format(passCnt))
if currZ == stopZ:
passOverlap = overlapFinish
else:
passOverlap = overlap
if abs(toolDiam - thickness) <= FLOAT_TOLERANCE:
numStep = 0
stepSize = 0.0
else:
stepSizePrelim = toolDiam - passOverlap*toolDiam
numStep = int(math.floor((thickness - toolDiam)/stepSizePrelim)) + 1
stepSize = (thickness -toolDiam)/float(numStep)
if not self.param['cornerCut']:
rectPath = cnc_path.FilledRectPath(
outerPoint0,
outerPoint1,
stepSize,
numStep,
plane='xy'
)
else:
cutLen = 0.5*toolDiam*(math.sqrt(2.0) - 1.0) + cornerMargin
rectPath = cnc_path.FilledRectWithCornerCutPath(
outerPoint0,
outerPoint1,
stepSize,
numStep,
cutLen,
plane='xy'
)
self.listOfCmds.extend(rectPath.listOfCmds)
test0 = abs(outerX0 - innerX0) > FLOAT_TOLERANCE
test1 = abs(outerX0 - innerX0) > (thickness - ((numStep-1)*stepSize + toolDiam))
if test0 and test1:
rectPath = cnc_path.RectPath(innerPoint0, innerPoint1)
self.listOfCmds.extend(rectPath.listOfCmds)
# Get next z position
if currZ <= stopZ:
done = True
prevZ = currZ
currZ = max([currZ - maxCutDepth, stopZ])
# Move to safe z and add end comment
self.addRapidMoveToSafeZ()
self.addEndComment()
class CircPocketXY(cnc_routine.SafeZRoutine):
def __init__(self,param):
"""
param dict:
keys values
--------------------------------------------------------------
centerX = center x-coordinate
centerY = center y-coordinate
radius = radius
depth = pocket depth
startZ = height at which to start cutting
safeZ = safe tool height
overlap = tool path overlap (fractional value)
overlapFinsh = tool path overlap for bottom layer (optional)
maxCutDepth = maximum per pass cutting depth
toolDiam = diameter of tool
direction = cut direction cw or ccw
startDwell = dwell duration before start (optional)
"""
super(CircPocketXY,self).__init__(param)
def makeListOfCmds(self):
# Retreive numerical parameters and convert to float
cx = float(self.param['centerX'])
cy = float(self.param['centerY'])
radius = abs(float(self.param['radius']))
depth = abs(float(self.param['depth']))
startZ = float(self.param['startZ'])
overlap = float(self.param['overlap'])
try:
overlapFinish = self.param['overlapFinish']
except KeyError:
overlapFinish = overlap
overlapFinish = float(overlapFinish)
maxCutDepth = float(self.param['maxCutDepth'])
toolDiam = abs(float(self.param['toolDiam']))
try:
startDwell = self.param['startDwell']
except KeyError:
startDwell = 0.0
startDwell = abs(float(startDwell))
try:
coolingPause = self.param['coolingPause']
except KeyError:
coolingPause = None
# Check params
if overlap < 0.0 or overlap >= 1.0:
raise ValueError('overlap must >=0 and < 1')
if 2*radius <= toolDiam:
raise ValueError('circle diameter must be > tool diameter')
# Get circle cutting parameters - assumes startAngle=0
adjustedRadius = radius - 0.5*toolDiam
x0 = cx + adjustedRadius
y0 = cy
# Move to safe height, then to start x,y and then to start z
self.addStartComment()
self.addRapidMoveToSafeZ()
self.addRapidMoveToPos(x=x0,y=y0,comment='start x,y')
self.addDwell(startDwell)
self.addMoveToStartZ()
# Get z cutting parameters
stopZ = startZ - depth
prevZ = startZ
currZ = max([startZ - maxCutDepth, stopZ])
done = False
passCnt = 0
while not done:
passCnt+=1
# Add lead-in
self.addComment('pass {0} lead-in'.format(passCnt))
moveToStartCmd = gcode_cmd.LinearFeed(x=x0,y=y0)
self.listOfCmds.append(moveToStartCmd)
leadInPath = cnc_path.CircPath(
(cx,cy),
adjustedRadius,
startAng=0,
plane='xy',
direction=self.param['direction'],
turns=1,
helix=(prevZ,currZ)
)
self.listOfCmds.extend(leadInPath.listOfCmds)
# Add filled circle
self.addComment('pass {0} filled circle'.format(passCnt))
if currZ == stopZ:
passOverlap = overlapFinish
else:
passOverlap = overlap
stepSize = toolDiam - passOverlap*toolDiam
numStep = int(math.ceil(adjustedRadius/stepSize))
circPath = cnc_path.FilledCircPath(
(cx,cy),
adjustedRadius,
stepSize,
numStep,
startAng=0,
plane='xy',
direction=self.param['direction'],
turns=1
)
self.listOfCmds.extend(circPath.listOfCmds)
centerMoveCmd = gcode_cmd.LinearFeed(x=cx,y=cy)
self.listOfCmds.append(centerMoveCmd)
## Get next z position
#if currZ <= stopZ:
# done = True
#prevZ = currZ
#currZ = max([currZ - maxCutDepth, stopZ])
# Get next z position
if currZ <= stopZ:
done = True
else:
if coolingPause is not None:
self.addRapidMoveToSafeZ()
self.listOfCmds.append(gcode_cmd.Dwell(coolingPause))
self.listOfCmds.append(gcode_cmd.LinearFeed(z=currZ))
prevZ = currZ
currZ = max([currZ - maxCutDepth, stopZ])
# Move to safe z and add end comment
self.addRapidMoveToSafeZ()
self.addEndComment()
class CircAnnulusPocketXY(cnc_routine.SafeZRoutine):
def __init__(self,param):
"""
param dict:
keys values
--------------------------------------------------------------
centerX = center x-coordinate
centerY = center y-coordinate
radius = radius
thickness = thickness
depth = pocket depth
startZ = height at which to start cutting
safeZ = safe tool height
overlap = tool path overlap (fractional value)
overlapFinsh = tool path overlap for bottom layer (optional)
maxCutDepth = maximum per pass cutting depth
toolDiam = diameter of tool
direction = cut direction cw or ccw
startDwell = dwell duration before start (optional)
"""
super(CircAnnulusPocketXY,self).__init__(param)
def makeListOfCmds(self):
# Retreive numerical parameters and convert to float
cx = float(self.param['centerX'])
cy = float(self.param['centerY'])
radius = abs(float(self.param['radius']))
thickness = abs(float(self.param['thickness']))
depth = abs(float(self.param['depth']))
startZ = float(self.param['startZ'])
overlap = float(self.param['overlap'])
try:
overlapFinish = self.param['overlapFinish']
except KeyError:
overlapFinish = overlap
overlapFinish = float(overlapFinish)
maxCutDepth = float(self.param['maxCutDepth'])
toolDiam = abs(float(self.param['toolDiam']))
try:
startDwell = self.param['startDwell']
except KeyError:
startDwell = 0.0
startDwell = abs(float(startDwell))
# Check params
if overlap < 0.0 or overlap >= 1.0:
raise ValueError('overlap must >=0 and < 1')
if 2*radius <= toolDiam:
raise ValueError('circle diameter must be > tool diameter')
if thickness > radius:
raise ValueError('thickness must be <= radius')
if toolDiam > thickness:
raise ValueError('toolDiam must be <= thickness')
# Get circle cutting parameters - assumes startAngle=0
adjustedRadius = radius - 0.5*toolDiam
x0 = cx + adjustedRadius
y0 = cy
# Move to safe height, then to start x,y and then to start z
self.addStartComment()
self.addRapidMoveToSafeZ()
self.addRapidMoveToPos(x=x0,y=y0,comment='start x,y')
self.addDwell(startDwell)
self.addMoveToStartZ()
# Get z cutting parameters
stopZ = startZ - depth
prevZ = startZ
currZ = max([startZ - maxCutDepth, stopZ])
done = False
passCnt = 0
while not done:
passCnt+=1
# Add lead-in
self.addComment('pass {0} lead-in'.format(passCnt))
moveToStartCmd = gcode_cmd.LinearFeed(x=x0,y=y0)
self.listOfCmds.append(moveToStartCmd)
leadInPath = cnc_path.CircPath(
(cx,cy),
adjustedRadius,
startAng=0,
plane='xy',
direction=self.param['direction'],
turns=1,
helix=(prevZ,currZ)
)
self.listOfCmds.extend(leadInPath.listOfCmds)
# Add filled circle
self.addComment('pass {0} filled circle'.format(passCnt))
if currZ == stopZ:
passOverlap = overlapFinish
else:
passOverlap = overlap
if abs(toolDiam - thickness) <= FLOAT_TOLERANCE:
numStep = 0
stepSize = 0.0
else:
stepSizePrelim = toolDiam - passOverlap*toolDiam
numStep = int(math.floor((thickness - toolDiam)/stepSizePrelim)) + 1
stepSize = (thickness -toolDiam)/float(numStep)
circPath = cnc_path.FilledCircPath(
(cx,cy),
adjustedRadius,
stepSize,
numStep,
startAng=0,
plane='xy',
direction=self.param['direction'],
turns=1
)
self.listOfCmds.extend(circPath.listOfCmds)
# Get next z position
if currZ <= stopZ:
done = True
prevZ = currZ
currZ = max([currZ - maxCutDepth, stopZ])
# Move to safe z and add end comment
self.addRapidMoveToSafeZ()
self.addEndComment()
# Utility functions
# --------------------------------------------------------------------------------------
def checkRectPocketOverlap(overlap):
minOverlap = (1.0 - 1.0/math.sqrt(2.0))/1.0
assertMsg = ' overlap must be >= {0} and < 1.0'.format(minOverlap)
assert (overlap >= minOverlap and overlap < 1.0), assertMsg
# ---------------------------------------------------------------------------------------
if __name__ == '__main__':
prog = gcode_cmd.GCodeProg()
prog.add(gcode_cmd.GenericStart())
prog.add(gcode_cmd.Space())
prog.add(gcode_cmd.FeedRate(100.0))
if 0:
param = {
'centerX' : 0.0,
'centerY' : 0.0,
'width' : 2.0,
'height' : 1.0,
'depth' : 2*0.04,
'startZ' : 0.0,
'safeZ' : 0.5,
'overlap' : 0.3,
'overlapFinish' : 0.5,
'maxCutDepth' : 0.04,
'toolDiam' : 0.25,
'cornerCut' : False,
'direction' : 'ccw',
'startDwell' : 2.0,
}
pocket = RectPocketXY(param)
if 0:
param = {
'centerX' : 0.0,
'centerY' : 0.0,
'width' : 1.0,
'height' : 1.0,
'thickness' : 0.35,
'depth' : 0.3,
'startZ' : 0.0,
'safeZ' : 0.5,
'overlap' : 0.3,
'overlapFinish' : 0.6,
'maxCutDepth' : 0.1,
'toolDiam' : 0.2,
'cornerCut' : False,
'direction' : 'ccw',
'startDwell' : 2.0,
}
pocket = RectAnnulusPocketXY(param)
if 0:
param = {
'centerX' : 0.0,
'centerY' : 0.0,
'radius' : 0.4,
'depth' : 0.4,
'startZ' : 0.0,
'safeZ' : 0.5,
'overlap' : 0.1,
'overlapFinish' : 0.1,
'maxCutDepth' : 0.2,
'toolDiam' : 0.25,
'direction' : 'ccw',
'startDwell' : 2.0,
}
pocket = CircPocketXY(param)
if 1:
param = {
'centerX' : 0.0,
'centerY' : 0.0,
'radius' : 1.0,
'thickness' : 0.4,
'depth' : 0.4,
'startZ' : 0.0,
'safeZ' : 0.5,
'overlap' : 0.5,
'overlapFinish' : 0.5,
'maxCutDepth' : 0.2,
'toolDiam' : 0.125,
'direction' : 'ccw',
'startDwell' : 2.0,
}
pocket = CircAnnulusPocketXY(param)
prog.add(pocket)
prog.add(gcode_cmd.Space())
prog.add(gcode_cmd.End(),comment=True)
print(prog)
prog.write('test.ngc')
| 34.869626
| 94
| 0.505986
| 2,308
| 25,141
| 5.477036
| 0.12435
| 0.042718
| 0.03987
| 0.021517
| 0.829286
| 0.801994
| 0.783087
| 0.777866
| 0.768135
| 0.754371
| 0
| 0.021229
| 0.389205
| 25,141
| 720
| 95
| 34.918056
| 0.801967
| 0.199316
| 0
| 0.798755
| 0
| 0
| 0.071513
| 0
| 0
| 0
| 0
| 0
| 0.006224
| 1
| 0.018672
| false
| 0.058091
| 0.010373
| 0
| 0.037344
| 0.004149
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
b59ad60000def89d311dc823c0fe7f7888a489b5
| 13,096
|
py
|
Python
|
E/E 1331 Jordan Smiley.py
|
zielman/Codeforces-solutions
|
636f11a9eb10939d09d2e50ddc5ec53327d0b7ab
|
[
"MIT"
] | null | null | null |
E/E 1331 Jordan Smiley.py
|
zielman/Codeforces-solutions
|
636f11a9eb10939d09d2e50ddc5ec53327d0b7ab
|
[
"MIT"
] | 1
|
2021-05-05T17:05:03.000Z
|
2021-05-05T17:05:03.000Z
|
E/E 1331 Jordan Smiley.py
|
zielman/Codeforces-solutions
|
636f11a9eb10939d09d2e50ddc5ec53327d0b7ab
|
[
"MIT"
] | null | null | null |
# https://codeforces.com/problemset/problem/1331/E
'''
# download picture from the problem and open in paint, then fill the inside with black color, save as pic.png
from PIL import Image
img = Image.open('pic.png')
js = []
row = []
for x in range(64):
for y in range(64):
i = 9 + (15 * x)
j = 9 + (15 * y)
colors = img.getpixel((i,j))
if colors[0] == 0:
row.append(1) #black
else:
row.append(0) #white
js.append(row)
row = []
print(js)
'''
js = [[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0,
1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1], [1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0,
0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1], [1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0,
0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1], [1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1,
1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1], [1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1,
0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1], [1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0,
1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1], [1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1], [1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1], [1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1], [1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1], [1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1], [0, 0, 1, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0], [1, 1, 1, 0, 1, 0, 1, 1,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 1, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1, 1, 1, 0,
1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0,
1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0], [0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0], [0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0], [1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0], [1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1], [1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1], [1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1], [1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1], [1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1], [1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1], [1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1], [1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1], [1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
1, 1, 1, 1], [1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]
row, col = map(int, input().split())
print("IN" if js[col][row] != 1 else "OUT")
| 161.679012
| 3,478
| 0.338424
| 4,192
| 13,096
| 1.057252
| 0.01312
| 0.911101
| 1.169675
| 1.348375
| 0.924188
| 0.924188
| 0.924188
| 0.924188
| 0.924188
| 0.924188
| 0
| 0.46991
| 0.331323
| 13,096
| 81
| 3,479
| 161.679012
| 0.0362
| 0.040623
| 0
| 0
| 0
| 0
| 0.000398
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.019608
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 14
|
b5a05c83fc761f4b7231000487a05c0a986d3490
| 83,483
|
py
|
Python
|
toolbox/TensorflowTools.py
|
gozian2811/slic_multilevel
|
af0b1132e055bb95512f11a28ee55ee51b2f3295
|
[
"Apache-2.0"
] | null | null | null |
toolbox/TensorflowTools.py
|
gozian2811/slic_multilevel
|
af0b1132e055bb95512f11a28ee55ee51b2f3295
|
[
"Apache-2.0"
] | null | null | null |
toolbox/TensorflowTools.py
|
gozian2811/slic_multilevel
|
af0b1132e055bb95512f11a28ee55ee51b2f3295
|
[
"Apache-2.0"
] | null | null | null |
import math
import tensorflow as tf
def mlc_archi_3(input, keep_prob):
with tf.name_scope("Archi-3"):
# input size is batch_sizex20x20x6
# 5x5x3 is the kernel size of conv1,1 is the input depth,64 is the number output channel
w_conv1 = tf.Variable(tf.random_normal([3,5,5,1,64],stddev=0.001),dtype=tf.float32,name='w_conv1')
b_conv1 = tf.Variable(tf.constant(0.01,shape=[64]),dtype=tf.float32,name='b_conv1')
out_conv1 = tf.nn.relu(tf.add(tf.nn.conv3d(input,w_conv1,strides=[1,1,1,1,1],padding='VALID'),b_conv1))
out_conv1 = tf.nn.dropout(out_conv1,keep_prob)
# max pooling ,pooling layer has no effect on the data size
hidden_conv1 = tf.nn.max_pool3d(out_conv1,strides=[1,1,1,1,1],ksize=[1,2,2,2,1],padding='SAME')
# after conv1 ,the output size is batch_sizex4x16x16x64([batch_size,in_deep,width,height,output_deep])
w_conv2 = tf.Variable(tf.random_normal([3,5, 5, 64,64], stddev=0.001), dtype=tf.float32,name='w_conv2')
b_conv2 = tf.Variable(tf.constant(0.01, shape=[64]), dtype=tf.float32, name='b_conv2')
out_conv2 = tf.nn.relu(tf.add(tf.nn.conv3d(hidden_conv1, w_conv2, strides=[1, 1, 1,1, 1], padding='VALID'), b_conv2))
out_conv2 = tf.nn.dropout(out_conv2, keep_prob)
# after conv2 ,the output size is batch_sizex2x12x12x64([batch_size,in_deep,width,height,output_deep])
w_conv3 = tf.Variable(tf.random_normal([3,5, 5, 64,64], stddev=0.001), dtype=tf.float32,
name='w_conv3')
b_conv3 = tf.Variable(tf.constant(0.01, shape=[64]), dtype=tf.float32, name='b_conv3')
out_conv3 = tf.nn.relu(tf.add(tf.nn.conv3d(out_conv2, w_conv3, strides=[1, 1, 1, 1,1], padding='VALID'),b_conv3))
out_conv3 = tf.nn.dropout(out_conv3, keep_prob)
out_conv3_shape = tf.shape(out_conv3)
tf.summary.scalar('out_conv3_shape', out_conv3_shape[0])
#print(out_conv3)
# after conv2 ,the output size is batch_sizex2x8x8x64([batch_size,in_deep,width,height,output_deep])
# all feature map flatten to one dimension vector,this vector will be much long
out_conv3 = tf.reshape(out_conv3,[-1,64*28*28*20])
w_fc1 = tf.Variable(tf.random_normal([64*28*28*20,250],stddev=0.001),name='w_fc1')
out_fc1 = tf.nn.relu(tf.add(tf.matmul(out_conv3,w_fc1),tf.constant(0.001,shape=[250])))
out_fc1 = tf.nn.dropout(out_fc1,keep_prob)
out_fc1_shape = tf.shape(out_fc1)
tf.summary.scalar('out_fc1_shape', out_fc1_shape[0])
w_fc2 = tf.Variable(tf.random_normal([250, 2], stddev=0.001), name='w_fc2')
out_fc2 = tf.nn.relu(tf.add(tf.matmul(out_fc1, w_fc2), tf.constant(0.001, shape=[2])))
out_fc2 = tf.nn.dropout(out_fc2, keep_prob)
w_sm = tf.Variable(tf.random_normal([2, 2], stddev=0.001), name='w_sm')
b_sm = tf.constant(0.001, shape=[2])
out_sm = tf.nn.softmax(tf.add(tf.matmul(out_fc2, w_sm), b_sm))
return out_sm
def volume_net_l5_56(input, dropout_rate=0.3):
keep_prob = tf.constant(1-dropout_rate, dtype=tf.float32)
w_conv1 = tf.Variable(tf.random_normal([3,3,3,1,16],stddev=0.1),dtype=tf.float32,name='w_conv1')
b_conv1 = tf.Variable(tf.random_normal(shape=[16], stddev=0.1),dtype=tf.float32,name='b_conv1')
out_conv1 = tf.nn.relu(tf.add(tf.nn.conv3d(input,w_conv1,strides=[1,1,1,1,1],padding='VALID'),b_conv1))
dropout_conv1 = tf.nn.dropout(out_conv1,keep_prob)
hidden_conv1 = tf.nn.max_pool3d(dropout_conv1,strides=[1,2,2,2,1],ksize=[1,2,2,2,1],padding='SAME')
# after conv1 ,the output size is batch_sizex27x27x27x16([batch_size,in_deep,width,height,output_deep])
w_conv2 = tf.Variable(tf.random_normal([4,4,4,16,32], stddev=0.1), dtype=tf.float32,name='w_conv2')
b_conv2 = tf.Variable(tf.random_normal(shape=[32], stddev=0.1), dtype=tf.float32, name='b_conv2')
out_conv2 = tf.nn.relu(tf.add(tf.nn.conv3d(hidden_conv1, w_conv2, strides=[1,1,1,1,1], padding='VALID'), b_conv2))
dropout_conv2 = tf.nn.dropout(out_conv2, keep_prob)
hidden_conv2 = tf.nn.max_pool3d(dropout_conv2,strides=[1,2,2,2,1],ksize=[1,2,2,2,1],padding='SAME')
# after conv2 ,the output size is batch_sizex12x12x12x32([batch_size,in_deep,width,height,output_deep])
w_conv3 = tf.Variable(tf.random_normal([5,5,5,32,64], stddev=0.1), dtype=tf.float32, name='w_conv3')
b_conv3 = tf.Variable(tf.random_normal(shape=[64], stddev=0.1), dtype=tf.float32, name='b_conv3')
out_conv3 = tf.nn.relu(tf.add(tf.nn.conv3d(hidden_conv2, w_conv3, strides=[1,1,1,1,1], padding='VALID'), b_conv3))
dropout_conv3 = tf.nn.dropout(out_conv3, keep_prob)
hidden_conv3 = tf.nn.max_pool3d(dropout_conv3,strides=[1,2,2,2,1],ksize=[1,2,2,2,1],padding='SAME')
# after conv3 ,the output size is batch_sizex4x4x4x64([batch_size,in_deep,width,height,output_deep])
# all feature map flatten to one dimension vector,this vector will be much long
flattened_conv3 = tf.reshape(hidden_conv3,[-1,64*4*4*4])
w_fc1 = tf.Variable(tf.random_normal([64*4*4*4,128],stddev=0.1),name='w_fc1')
b_fc1 = tf.Variable(tf.random_normal(shape=[128], stddev=0.1), name='b_fc1')
out_fc1 = tf.nn.relu(tf.add(tf.matmul(flattened_conv3,w_fc1),b_fc1))
dropout_fc1 = tf.nn.dropout(out_fc1,keep_prob)
w_fc2 = tf.Variable(tf.random_normal([128, 2], stddev=0.1), name='w_fc2')
b_fc2 = tf.Variable(tf.random_normal(shape=[2], stddev=0.1), name='b_fc2')
out_fc2 = tf.add(tf.matmul(out_fc1, w_fc2), b_fc2)
out_sm = tf.nn.softmax(out_fc2)
#return hidden_conv1, hidden_conv2, hidden_conv3, out_fc1, dropout_fc1, w_fc2, b_fc2, out_fc2, out_sm
return out_fc2, out_sm
def volume_net2_l5_56(input, dropout_rate=0.3):
keep_prob = tf.constant(1-dropout_rate, dtype=tf.float32)
w_conv1 = tf.Variable(tf.random_normal([3,3,3,1,16],stddev=0.1),dtype=tf.float32,name='w_conv1')
b_conv1 = tf.Variable(tf.random_normal(shape=[16], stddev=0.1),dtype=tf.float32,name='b_conv1')
out_conv1 = tf.nn.relu(tf.add(tf.nn.conv3d(input,w_conv1,strides=[1,1,1,1,1],padding='VALID'),b_conv1))
dropout_conv1 = tf.nn.dropout(out_conv1,keep_prob)
hidden_conv1 = tf.nn.max_pool3d(dropout_conv1,strides=[1,2,2,2,1],ksize=[1,2,2,2,1],padding='SAME')
# after conv1 ,the output size is batch_sizex27x27x27x16([batch_size,in_deep,width,height,output_deep])
w_conv2 = tf.Variable(tf.random_normal([4,4,4,16,32], stddev=0.1), dtype=tf.float32,name='w_conv2')
b_conv2 = tf.Variable(tf.random_normal(shape=[32], stddev=0.1), dtype=tf.float32, name='b_conv2')
out_conv2 = tf.nn.relu(tf.add(tf.nn.conv3d(hidden_conv1, w_conv2, strides=[1,1,1,1,1], padding='VALID'), b_conv2))
dropout_conv2 = tf.nn.dropout(out_conv2, keep_prob)
hidden_conv2 = tf.nn.max_pool3d(dropout_conv2,strides=[1,2,2,2,1],ksize=[1,2,2,2,1],padding='SAME')
# after conv2 ,the output size is batch_sizex12x12x12x32([batch_size,in_deep,width,height,output_deep])
w_conv3 = tf.Variable(tf.random_normal([5,5,5,32,64], stddev=0.1), dtype=tf.float32, name='w_conv3')
b_conv3 = tf.Variable(tf.random_normal(shape=[64], stddev=0.1), dtype=tf.float32, name='b_conv3')
out_conv3 = tf.nn.relu(tf.add(tf.nn.conv3d(hidden_conv2, w_conv3, strides=[1,1,1,1,1], padding='VALID'), b_conv3))
dropout_conv3 = tf.nn.dropout(out_conv3, keep_prob)
hidden_conv3 = tf.nn.max_pool3d(dropout_conv3,strides=[1,2,2,2,1],ksize=[1,2,2,2,1],padding='SAME')
# after conv3 ,the output size is batch_sizex5x5x5x64([batch_size,in_deep,width,height,output_deep])
w_conv4 = tf.Variable(tf.random_normal([4,4,4,64,128], stddev=0.1), dtype=tf.float32, name='w_conv4')
b_conv4 = tf.Variable(tf.random_normal(shape=[128], stddev=0.1), dtype=tf.float32, name='b_conv4')
out_conv4 = tf.nn.relu(
tf.add(tf.nn.conv3d(hidden_conv3, w_conv4, strides=[1, 1, 1, 1, 1], padding='VALID'), b_conv4))
dropout_conv4 = tf.nn.dropout(out_conv4, keep_prob)
#hidden_conv4 = tf.nn.max_pool3d(dropout_conv4, strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv3 ,the output size is batch_sizex4x4x4x128([batch_size,in_deep,width,height,output_deep])
# all feature map flatten to one dimension vector,this vector will be much long
flattened_conv4 = tf.reshape(dropout_conv4,[-1,128])
w_fc1 = tf.Variable(tf.random_normal([128,2],stddev=0.1),name='w_fc1')
b_fc1 = tf.Variable(tf.random_normal(shape=[2], stddev=0.1), name='b_fc1')
out_fc1 = tf.nn.relu(tf.add(tf.matmul(flattened_conv4,w_fc1),b_fc1))
out_sm = tf.nn.softmax(out_fc1)
#return hidden_conv1, hidden_conv2, hidden_conv3, out_fc1, dropout_fc1, w_fc2, b_fc2, out_fc2, out_sm
return out_fc1, out_sm
def volume_net_l6_56(input, dropout_rate=0.3):
keep_prob = tf.constant(1-dropout_rate, dtype=tf.float32)
w_conv1 = tf.Variable(tf.random_normal([3,3,3,1,16],stddev=0.1),dtype=tf.float32,name='w_conv1')
b_conv1 = tf.Variable(tf.random_normal(shape=[16], stddev=0.1),dtype=tf.float32,name='b_conv1')
out_conv1 = tf.nn.relu(tf.add(tf.nn.conv3d(input,w_conv1,strides=[1,1,1,1,1],padding='VALID'),b_conv1))
dropout_conv1 = tf.nn.dropout(out_conv1,keep_prob)
hidden_conv1 = tf.nn.max_pool3d(dropout_conv1,strides=[1,2,2,2,1],ksize=[1,2,2,2,1],padding='SAME')
# after conv1 ,the output size is batch_sizex27x27x27x16([batch_size,in_deep,width,height,output_deep])
w_conv2 = tf.Variable(tf.random_normal([4,4,4,16,32], stddev=0.1), dtype=tf.float32,name='w_conv2')
b_conv2 = tf.Variable(tf.random_normal(shape=[32], stddev=0.1), dtype=tf.float32, name='b_conv2')
out_conv2 = tf.nn.relu(tf.add(tf.nn.conv3d(hidden_conv1, w_conv2, strides=[1,1,1,1,1], padding='VALID'), b_conv2))
dropout_conv2 = tf.nn.dropout(out_conv2, keep_prob)
hidden_conv2 = tf.nn.max_pool3d(dropout_conv2,strides=[1,2,2,2,1],ksize=[1,2,2,2,1],padding='SAME')
# after conv2 ,the output size is batch_sizex12x12x12x32([batch_size,in_deep,width,height,output_deep])
w_conv3 = tf.Variable(tf.random_normal([5,5,5,32,64], stddev=0.1), dtype=tf.float32, name='w_conv3')
b_conv3 = tf.Variable(tf.random_normal(shape=[64], stddev=0.1), dtype=tf.float32, name='b_conv3')
out_conv3 = tf.nn.relu(tf.add(tf.nn.conv3d(hidden_conv2, w_conv3, strides=[1,1,1,1,1], padding='VALID'), b_conv3))
dropout_conv3 = tf.nn.dropout(out_conv3, keep_prob)
hidden_conv3 = tf.nn.max_pool3d(dropout_conv3,strides=[1,2,2,2,1],ksize=[1,2,2,2,1],padding='SAME')
# after conv3 ,the output size is batch_sizex4x4x4x64([batch_size,in_deep,width,height,output_deep])
w_conv4 = tf.Variable(tf.random_normal([4,4,4,64,128], stddev=0.1), dtype=tf.float32, name='w_conv4')
b_conv4 = tf.Variable(tf.random_normal(shape=[128], stddev=0.1), dtype=tf.float32, name='b_conv4')
out_conv4 = tf.nn.relu(
tf.add(tf.nn.conv3d(hidden_conv3, w_conv4, strides=[1, 1, 1, 1, 1], padding='VALID'), b_conv4))
dropout_conv4 = tf.nn.dropout(out_conv4, keep_prob)
#hidden_conv4 = tf.nn.max_pool3d(dropout_conv4, strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv3 ,the output size is batch_sizex4x4x4x128([batch_size,in_deep,width,height,output_deep])
# all feature map flatten to one dimension vector,this vector will be much long
flattened_conv4 = tf.reshape(dropout_conv4,[-1,128])
w_fc1 = tf.Variable(tf.random_normal([128,128],stddev=0.1),name='w_fc1')
b_fc1 = tf.Variable(tf.random_normal(shape=[128], stddev=0.1), name='b_fc1')
out_fc1 = tf.nn.relu(tf.add(tf.matmul(flattened_conv4,w_fc1),b_fc1))
#dropout_fc1 = tf.nn.dropout(out_fc1,keep_prob)
w_fc2 = tf.Variable(tf.random_normal([128, 2], stddev=0.1), name='w_fc2')
b_fc2 = tf.Variable(tf.random_normal(shape=[2], stddev=0.1), name='b_fc2')
out_fc2 = tf.add(tf.matmul(out_fc1, w_fc2), b_fc2)
out_sm = tf.nn.softmax(out_fc2)
#return hidden_conv1, hidden_conv2, hidden_conv3, out_fc1, dropout_fc1, w_fc2, b_fc2, out_fc2, out_sm
return out_fc2, out_sm
def volume_net2_l6_56(input, dropout_rate=0.3):
keep_prob = tf.constant(1-dropout_rate, dtype=tf.float32)
w_conv1 = tf.Variable(tf.random_normal([3,3,3,1,16],stddev=0.1),dtype=tf.float32,name='w_conv1')
b_conv1 = tf.Variable(tf.random_normal(shape=[16], stddev=0.1),dtype=tf.float32,name='b_conv1')
out_conv1 = tf.nn.relu(tf.add(tf.nn.conv3d(input,w_conv1,strides=[1,1,1,1,1],padding='VALID'),b_conv1))
dropout_conv1 = tf.nn.dropout(out_conv1,keep_prob)
hidden_conv1 = tf.nn.max_pool3d(dropout_conv1,strides=[1,2,2,2,1],ksize=[1,2,2,2,1],padding='SAME')
# after conv1 ,the output size is batch_sizex27x27x27x16([batch_size,in_deep,width,height,output_deep])
w_conv2 = tf.Variable(tf.random_normal([4,4,4,16,32], stddev=0.1), dtype=tf.float32,name='w_conv2')
b_conv2 = tf.Variable(tf.random_normal(shape=[32], stddev=0.1), dtype=tf.float32, name='b_conv2')
out_conv2 = tf.nn.relu(tf.add(tf.nn.conv3d(hidden_conv1, w_conv2, strides=[1,1,1,1,1], padding='VALID'), b_conv2))
dropout_conv2 = tf.nn.dropout(out_conv2, keep_prob)
hidden_conv2 = tf.nn.max_pool3d(dropout_conv2,strides=[1,2,2,2,1],ksize=[1,2,2,2,1],padding='SAME')
# after conv2 ,the output size is batch_sizex12x12x12x32([batch_size,in_deep,width,height,output_deep])
w_conv3 = tf.Variable(tf.random_normal([3,3,3,32,64], stddev=0.1), dtype=tf.float32, name='w_conv3')
b_conv3 = tf.Variable(tf.random_normal(shape=[64], stddev=0.1), dtype=tf.float32, name='b_conv3')
out_conv3 = tf.nn.relu(tf.add(tf.nn.conv3d(hidden_conv2, w_conv3, strides=[1,1,1,1,1], padding='VALID'), b_conv3))
dropout_conv3 = tf.nn.dropout(out_conv3, keep_prob)
hidden_conv3 = tf.nn.max_pool3d(dropout_conv3,strides=[1,2,2,2,1],ksize=[1,2,2,2,1],padding='SAME')
# after conv3 ,the output size is batch_sizex4x4x4x64([batch_size,in_deep,width,height,output_deep])
w_conv4 = tf.Variable(tf.random_normal([5,5,5,64,128], stddev=0.1), dtype=tf.float32, name='w_conv4')
b_conv4 = tf.Variable(tf.random_normal(shape=[128], stddev=0.1), dtype=tf.float32, name='b_conv4')
out_conv4 = tf.nn.relu(
tf.add(tf.nn.conv3d(hidden_conv3, w_conv4, strides=[1, 1, 1, 1, 1], padding='VALID'), b_conv4))
dropout_conv4 = tf.nn.dropout(out_conv4, keep_prob)
#hidden_conv4 = tf.nn.max_pool3d(dropout_conv4, strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv3 ,the output size is batch_sizex4x4x4x128([batch_size,in_deep,width,height,output_deep])
# all feature map flatten to one dimension vector,this vector will be much long
flattened_conv4 = tf.reshape(dropout_conv4,[-1,128])
w_fc1 = tf.Variable(tf.random_normal([128,128],stddev=0.1),name='w_fc1')
b_fc1 = tf.Variable(tf.random_normal(shape=[128], stddev=0.1), name='b_fc1')
out_fc1 = tf.nn.relu(tf.add(tf.matmul(flattened_conv4,w_fc1),b_fc1))
#dropout_fc1 = tf.nn.dropout(out_fc1,keep_prob)
w_fc2 = tf.Variable(tf.random_normal([128, 2], stddev=0.1), name='w_fc2')
b_fc2 = tf.Variable(tf.random_normal(shape=[2], stddev=0.1), name='b_fc2')
out_fc2 = tf.add(tf.matmul(out_fc1, w_fc2), b_fc2)
out_sm = tf.nn.softmax(out_fc2)
#return hidden_conv1, hidden_conv2, hidden_conv3, out_fc1, dropout_fc1, w_fc2, b_fc2, out_fc2, out_sm
return out_fc2, out_sm
def volume_net3_l6_56(input, dropout_rate=0.3):
keep_prob = tf.constant(1-dropout_rate, dtype=tf.float32)
w_conv1 = tf.Variable(tf.random_normal([3,3,3,1,64],stddev=0.1),dtype=tf.float32,name='w_conv1')
b_conv1 = tf.Variable(tf.random_normal(shape=[64], stddev=0.1),dtype=tf.float32,name='b_conv1')
out_conv1 = tf.nn.relu(tf.add(tf.nn.conv3d(input,w_conv1,strides=[1,1,1,1,1],padding='VALID'),b_conv1))
dropout_conv1 = tf.nn.dropout(out_conv1,keep_prob)
hidden_conv1 = tf.nn.max_pool3d(dropout_conv1,strides=[1,2,2,2,1],ksize=[1,2,2,2,1],padding='SAME')
# after conv1 ,the output size is batch_sizex27x27x27x16([batch_size,in_deep,width,height,output_deep])
w_conv2 = tf.Variable(tf.random_normal([4,4,4,64,128], stddev=0.1), dtype=tf.float32,name='w_conv2')
b_conv2 = tf.Variable(tf.random_normal(shape=[128], stddev=0.1), dtype=tf.float32, name='b_conv2')
out_conv2 = tf.nn.relu(tf.add(tf.nn.conv3d(hidden_conv1, w_conv2, strides=[1,1,1,1,1], padding='VALID'), b_conv2))
dropout_conv2 = tf.nn.dropout(out_conv2, keep_prob)
hidden_conv2 = tf.nn.max_pool3d(dropout_conv2,strides=[1,2,2,2,1],ksize=[1,2,2,2,1],padding='SAME')
# after conv2 ,the output size is batch_sizex12x12x12x32([batch_size,in_deep,width,height,output_deep])
w_conv3 = tf.Variable(tf.random_normal([3,3,3,128,256], stddev=0.1), dtype=tf.float32, name='w_conv3')
b_conv3 = tf.Variable(tf.random_normal(shape=[256], stddev=0.1), dtype=tf.float32, name='b_conv3')
out_conv3 = tf.nn.relu(tf.add(tf.nn.conv3d(hidden_conv2, w_conv3, strides=[1,1,1,1,1], padding='VALID'), b_conv3))
dropout_conv3 = tf.nn.dropout(out_conv3, keep_prob)
hidden_conv3 = tf.nn.max_pool3d(dropout_conv3,strides=[1,2,2,2,1],ksize=[1,2,2,2,1],padding='SAME')
# after conv3 ,the output size is batch_sizex5x5x5x64([batch_size,in_deep,width,height,output_deep])
w_conv4 = tf.Variable(tf.random_normal([5,5,5,256,512], stddev=0.1), dtype=tf.float32, name='w_conv4')
b_conv4 = tf.Variable(tf.random_normal(shape=[512], stddev=0.1), dtype=tf.float32, name='b_conv4')
out_conv4 = tf.nn.relu(
tf.add(tf.nn.conv3d(hidden_conv3, w_conv4, strides=[1, 1, 1, 1, 1], padding='VALID'), b_conv4))
dropout_conv4 = tf.nn.dropout(out_conv4, keep_prob)
#hidden_conv4 = tf.nn.max_pool3d(dropout_conv4, strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv3 ,the output size is batch_sizex4x4x4x128([batch_size,in_deep,width,height,output_deep])
# all feature map flatten to one dimension vector,this vector will be much long
flattened_conv4 = tf.reshape(dropout_conv4,[-1,512])
w_fc1 = tf.Variable(tf.random_normal([512,512],stddev=0.1),name='w_fc1')
b_fc1 = tf.Variable(tf.random_normal(shape=[512], stddev=0.1), name='b_fc1')
out_fc1 = tf.nn.relu(tf.add(tf.matmul(flattened_conv4,w_fc1),b_fc1))
dropout_fc1 = tf.nn.dropout(out_fc1,keep_prob)
w_fc2 = tf.Variable(tf.random_normal([512, 2], stddev=0.1), name='w_fc2')
b_fc2 = tf.Variable(tf.random_normal(shape=[2], stddev=0.1), name='b_fc2')
out_fc2 = tf.add(tf.matmul(out_fc1, w_fc2), b_fc2)
out_sm = tf.nn.softmax(out_fc2)
#return hidden_conv1, hidden_conv2, hidden_conv3, out_fc1, dropout_fc1, w_fc2, b_fc2, out_fc2, out_sm
return out_fc2, out_sm
def volume_net4_l6_56(input, dropout_rate=0.3):
keep_prob = tf.constant(1-dropout_rate, dtype=tf.float32)
w_conv1 = tf.Variable(tf.random_normal([3,3,3,1,64],stddev=0.1),dtype=tf.float32,name='w_conv1')
b_conv1 = tf.Variable(tf.random_normal(shape=[64], stddev=0.1),dtype=tf.float32,name='b_conv1')
out_conv1 = tf.nn.relu(tf.add(tf.nn.conv3d(input,w_conv1,strides=[1,1,1,1,1],padding='VALID'),b_conv1))
dropout_conv1 = tf.nn.dropout(out_conv1,keep_prob)
hidden_conv1 = tf.nn.max_pool3d(dropout_conv1,strides=[1,2,2,2,1],ksize=[1,2,2,2,1],padding='SAME')
# after conv1 ,the output size is batch_sizex27x27x27x16([batch_size,in_deep,width,height,output_deep])
w_conv2 = tf.Variable(tf.random_normal([4,4,4,64,128], stddev=0.1), dtype=tf.float32,name='w_conv2')
b_conv2 = tf.Variable(tf.random_normal(shape=[128], stddev=0.1), dtype=tf.float32, name='b_conv2')
out_conv2 = tf.nn.relu(tf.add(tf.nn.conv3d(hidden_conv1, w_conv2, strides=[1,1,1,1,1], padding='VALID'), b_conv2))
dropout_conv2 = tf.nn.dropout(out_conv2, keep_prob)
hidden_conv2 = tf.nn.max_pool3d(dropout_conv2,strides=[1,2,2,2,1],ksize=[1,2,2,2,1],padding='SAME')
# after conv2 ,the output size is batch_sizex12x12x12x32([batch_size,in_deep,width,height,output_deep])
w_conv3 = tf.Variable(tf.random_normal([5,5,5,128,256], stddev=0.1), dtype=tf.float32, name='w_conv3')
b_conv3 = tf.Variable(tf.random_normal(shape=[256], stddev=0.1), dtype=tf.float32, name='b_conv3')
out_conv3 = tf.nn.relu(tf.add(tf.nn.conv3d(hidden_conv2, w_conv3, strides=[1,1,1,1,1], padding='VALID'), b_conv3))
dropout_conv3 = tf.nn.dropout(out_conv3, keep_prob)
hidden_conv3 = tf.nn.max_pool3d(dropout_conv3,strides=[1,2,2,2,1],ksize=[1,2,2,2,1],padding='SAME')
# after conv3 ,the output size is batch_sizex4x4x4x64([batch_size,in_deep,width,height,output_deep])
w_conv4 = tf.Variable(tf.random_normal([4,4,4,256,512], stddev=0.1), dtype=tf.float32, name='w_conv4')
b_conv4 = tf.Variable(tf.random_normal(shape=[512], stddev=0.1), dtype=tf.float32, name='b_conv4')
out_conv4 = tf.nn.relu(
tf.add(tf.nn.conv3d(hidden_conv3, w_conv4, strides=[1, 1, 1, 1, 1], padding='VALID'), b_conv4))
dropout_conv4 = tf.nn.dropout(out_conv4, keep_prob)
#hidden_conv4 = tf.nn.max_pool3d(dropout_conv4, strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv3 ,the output size is batch_sizex4x4x4x128([batch_size,in_deep,width,height,output_deep])
# all feature map flatten to one dimension vector,this vector will be much long
flattened_conv4 = tf.reshape(dropout_conv4,[-1,512])
w_fc1 = tf.Variable(tf.random_normal([512,512],stddev=0.1),name='w_fc1')
b_fc1 = tf.Variable(tf.random_normal(shape=[512], stddev=0.1), name='b_fc1')
out_fc1 = tf.nn.relu(tf.add(tf.matmul(flattened_conv4,w_fc1),b_fc1))
dropout_fc1 = tf.nn.dropout(out_fc1,keep_prob)
w_fc2 = tf.Variable(tf.random_normal([512, 2], stddev=0.1), name='w_fc2')
b_fc2 = tf.Variable(tf.random_normal(shape=[2], stddev=0.1), name='b_fc2')
out_fc2 = tf.add(tf.matmul(out_fc1, w_fc2), b_fc2)
out_sm = tf.nn.softmax(out_fc2)
#return hidden_conv1, hidden_conv2, hidden_conv3, out_fc1, dropout_fc1, w_fc2, b_fc2, out_fc2, out_sm
return out_fc2, out_sm
def volume_bnnet_l6_56(input):
bn_variance_epsilon = tf.constant(0.0000000000001, dtype=tf.float32)
w_conv1 = tf.Variable(tf.random_normal([3, 3, 3, 1, 16], stddev=0.1), dtype=tf.float32, name='w_conv1')
out_conv1 = tf.nn.conv3d(input, w_conv1, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean1, bn_var1 = tf.nn.moments(out_conv1, [i for i in range(len(out_conv1.shape))])
r_bn1 = tf.Variable([1], dtype=tf.float32, name='r_bn1')
b_bn1 = tf.Variable(tf.random_normal([16], stddev=0.1), dtype=tf.float32, name='b_bn1')
out_bn1 = tf.nn.batch_normalization(out_conv1, bn_mean1, bn_var1, b_bn1, r_bn1, bn_variance_epsilon)
hidden_conv1 = tf.nn.max_pool3d(tf.nn.relu(out_bn1), strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1],
padding='SAME')
# after conv1 ,the output size is batch_sizex27x27x27x16([batch_size,in_deep,width,height,output_deep])
w_conv2 = tf.Variable(tf.random_normal([4, 4, 4, 16, 32], stddev=0.1), dtype=tf.float32, name='w_conv2')
out_conv2 = tf.nn.conv3d(hidden_conv1, w_conv2, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean2, bn_var2 = tf.nn.moments(out_conv2, [i for i in range(len(out_conv2.shape))])
r_bn2 = tf.Variable([1], dtype=tf.float32, name='r_bn2')
b_bn2 = tf.Variable(tf.random_normal([32], stddev=0.1), dtype=tf.float32, name='b_bn2')
out_bn2 = tf.nn.batch_normalization(out_conv2, bn_mean2, bn_var2, b_bn2, r_bn2, bn_variance_epsilon)
hidden_conv2 = tf.nn.max_pool3d(tf.nn.relu(out_bn2), strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1],
padding='SAME')
# after conv2 ,the output size is batch_sizex12x12x12x32([batch_size,in_deep,width,height,output_deep])
w_conv3 = tf.Variable(tf.random_normal([5, 5, 5, 32, 64], stddev=0.1), dtype=tf.float32, name='w_conv3')
out_conv3 = tf.nn.conv3d(hidden_conv2, w_conv3, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean3, bn_var3 = tf.nn.moments(out_conv3, [i for i in range(len(out_conv3.shape))])
r_bn3 = tf.Variable([1], dtype=tf.float32, name='r_bn3')
b_bn3 = tf.Variable(tf.random_normal([64], stddev=0.1), dtype=tf.float32, name='b_bn3')
out_bn3 = tf.nn.batch_normalization(out_conv3, bn_mean3, bn_var3, b_bn3, r_bn3, bn_variance_epsilon)
hidden_conv3 = tf.nn.max_pool3d(tf.nn.relu(out_bn3), strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1],
padding='SAME')
# after conv3 ,the output size is batch_sizex4x4x4x64([batch_size,in_deep,width,height,output_deep])
w_conv4 = tf.Variable(tf.random_normal([4, 4, 4, 64, 128], stddev=0.1), dtype=tf.float32, name='w_conv4')
out_conv4 = tf.nn.conv3d(hidden_conv3, w_conv4, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean4, bn_var4 = tf.nn.moments(out_conv4, [i for i in range(len(out_conv4.shape))])
r_bn4 = tf.Variable([1], dtype=tf.float32, name='r_bn4')
b_bn4 = tf.Variable(tf.random_normal([128], stddev=0.1), dtype=tf.float32, name='b_bn4')
out_bn4 = tf.nn.batch_normalization(out_conv4, bn_mean4, bn_var4, b_bn4, r_bn4, bn_variance_epsilon)
hidden_conv4 = tf.nn.relu(out_bn4)
# after conv3 ,the output size is batch_sizex4x4x4x128([batch_size,in_deep,width,height,output_deep])
# all feature map flatten to one dimension vector,this vector will be much long
flattened_conv4 = tf.reshape(hidden_conv4, [-1, 128])
w_fc1 = tf.Variable(tf.random_normal([128, 128], stddev=0.1), name='w_fc1')
out_fc1 = tf.matmul(flattened_conv4, w_fc1)
r_bn5 = tf.Variable([1], dtype=tf.float32, name='r_bn5')
b_bn5 = tf.Variable(tf.random_normal([128], stddev=0.1), dtype=tf.float32, name='b_bn5')
bn_mean5, bn_var5 = tf.nn.moments(out_fc1, [i for i in range(len(out_fc1.shape))])
out_bn5 = tf.nn.batch_normalization(out_fc1, bn_mean5, bn_var5, b_bn5, r_bn5, bn_variance_epsilon)
out_rl5 = tf.nn.relu(out_bn5)
# dropout_fc1 = tf.nn.dropout(out_fc1,keep_prob)
w_fc2 = tf.Variable(tf.random_normal([128, 2], stddev=0.1), name='w_fc2')
b_fc2 = tf.Variable(tf.random_normal(shape=[2], stddev=0.1), name='b_fc2')
out_fc2 = tf.add(tf.matmul(out_rl5, w_fc2), b_fc2)
out_sm = tf.nn.softmax(out_fc2)
# return b_bn1, w_conv1, w_conv2, out_conv1, out_bn1, hidden_conv1, hidden_conv2, hidden_conv3, out_fc1, out_fc2, out_sm
return out_fc2, out_sm
def volume_bnnet2_l6_56(input):
bn_variance_epsilon = tf.constant(0.0000000000001, dtype=tf.float32)
w_conv1 = tf.Variable(tf.random_normal([3, 3, 3, 1, 64], stddev=0.1), dtype=tf.float32, name='w_conv1')
out_conv1 = tf.nn.conv3d(input, w_conv1, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean1, bn_var1 = tf.nn.moments(out_conv1, [i for i in range(len(out_conv1.shape))])
r_bn1 = tf.Variable([1], dtype=tf.float32, name='r_bn1')
b_bn1 = tf.Variable(tf.random_normal([64], stddev=0.1), dtype=tf.float32, name='b_bn1')
out_bn1 = tf.nn.batch_normalization(out_conv1, bn_mean1, bn_var1, b_bn1, r_bn1, bn_variance_epsilon)
hidden_conv1 = tf.nn.max_pool3d(tf.nn.relu(out_bn1), strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv1 ,the output size is batch_sizex27x27x27x16([batch_size,in_deep,width,height,output_deep])
w_conv2 = tf.Variable(tf.random_normal([4, 4, 4, 64, 128], stddev=0.1), dtype=tf.float32, name='w_conv2')
out_conv2 = tf.nn.conv3d(hidden_conv1, w_conv2, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean2, bn_var2 = tf.nn.moments(out_conv2, [i for i in range(len(out_conv2.shape))])
r_bn2 = tf.Variable([1], dtype=tf.float32, name='r_bn2')
b_bn2 = tf.Variable(tf.random_normal([128], stddev=0.1), dtype=tf.float32, name='b_bn2')
out_bn2 = tf.nn.batch_normalization(out_conv2, bn_mean2, bn_var2, b_bn2, r_bn2, bn_variance_epsilon)
hidden_conv2 = tf.nn.max_pool3d(tf.nn.relu(out_bn2), strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv2 ,the output size is batch_sizex12x12x12x32([batch_size,in_deep,width,height,output_deep])
w_conv3 = tf.Variable(tf.random_normal([5, 5, 5, 128, 256], stddev=0.1), dtype=tf.float32, name='w_conv3')
out_conv3 = tf.nn.conv3d(hidden_conv2, w_conv3, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean3, bn_var3 = tf.nn.moments(out_conv3, [i for i in range(len(out_conv3.shape))])
r_bn3 = tf.Variable([1], dtype=tf.float32, name='r_bn3')
b_bn3 = tf.Variable(tf.random_normal([256], stddev=0.1), dtype=tf.float32, name='b_bn3')
out_bn3 = tf.nn.batch_normalization(out_conv3, bn_mean3, bn_var3, b_bn3, r_bn3, bn_variance_epsilon)
hidden_conv3 = tf.nn.max_pool3d(tf.nn.relu(out_bn3), strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv3 ,the output size is batch_sizex4x4x4x64([batch_size,in_deep,width,height,output_deep])
w_conv4 = tf.Variable(tf.random_normal([4, 4, 4, 256, 512], stddev=0.1), dtype=tf.float32, name='w_conv4')
out_conv4 = tf.nn.conv3d(hidden_conv3, w_conv4, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean4, bn_var4 = tf.nn.moments(out_conv4, [i for i in range(len(out_conv4.shape))])
r_bn4 = tf.Variable([1], dtype=tf.float32, name='r_bn4')
b_bn4 = tf.Variable(tf.random_normal([512], stddev=0.1), dtype=tf.float32, name='b_bn4')
out_bn4 = tf.nn.batch_normalization(out_conv4, bn_mean4, bn_var4, b_bn4, r_bn4, bn_variance_epsilon)
hidden_conv4 = tf.nn.relu(out_bn4)
# after conv3 ,the output size is batch_sizex4x4x4x128([batch_size,in_deep,width,height,output_deep])
# all feature map flatten to one dimension vector,this vector will be much long
flattened_conv4 = tf.reshape(hidden_conv4, [-1, 512])
w_fc1 = tf.Variable(tf.random_normal([512, 512], stddev=0.1), name='w_fc1')
out_fc1 = tf.matmul(flattened_conv4, w_fc1)
r_bn5 = tf.Variable([1], dtype=tf.float32, name='r_bn5')
b_bn5 = tf.Variable(tf.random_normal([512], stddev=0.1), dtype=tf.float32, name='b_bn5')
bn_mean5, bn_var5 = tf.nn.moments(out_fc1, [i for i in range(len(out_fc1.shape))])
out_bn5 = tf.nn.batch_normalization(out_fc1, bn_mean5, bn_var5, b_bn5, r_bn5, bn_variance_epsilon)
out_rl5 = tf.nn.relu(out_bn5)
# dropout_fc1 = tf.nn.dropout(out_fc1,keep_prob)
w_fc2 = tf.Variable(tf.random_normal([512, 2], stddev=0.1), name='w_fc2')
b_fc2 = tf.Variable(tf.random_normal(shape=[2], stddev=0.1), name='b_fc2')
out_fc2 = tf.add(tf.matmul(out_rl5, w_fc2), b_fc2)
out_sm = tf.nn.softmax(out_fc2)
#return b_bn1, w_conv1, w_conv2, out_conv1, out_bn1, hidden_conv1, hidden_conv2, hidden_conv3, out_fc1, out_fc2, out_sm
return out_fc2, out_sm
def volume_bnnet3_l6_56(input):
bn_variance_epsilon = tf.constant(0.0000000000001, dtype=tf.float32)
w_conv1 = tf.Variable(tf.random_normal([3, 3, 3, 1, 64], stddev=0.1), dtype=tf.float32, name='w_conv1')
out_conv1 = tf.nn.conv3d(input, w_conv1, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean1, bn_var1 = tf.nn.moments(out_conv1, [i for i in range(len(out_conv1.shape))])
#r_bn1 = tf.Variable([1], dtype=tf.float32, name='r_bn1')
b_bn1 = tf.Variable(tf.random_normal([64], stddev=0.1), dtype=tf.float32, name='b_bn1')
out_bn1 = tf.nn.batch_normalization(out_conv1, bn_mean1, bn_var1, b_bn1, None, bn_variance_epsilon)
hidden_conv1 = tf.nn.max_pool3d(tf.nn.relu(out_bn1), strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv1 ,the output size is batch_sizex27x27x27x16([batch_size,in_deep,width,height,output_deep])
w_conv2 = tf.Variable(tf.random_normal([4, 4, 4, 64, 128], stddev=0.1), dtype=tf.float32, name='w_conv2')
out_conv2 = tf.nn.conv3d(hidden_conv1, w_conv2, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean2, bn_var2 = tf.nn.moments(out_conv2, [i for i in range(len(out_conv2.shape))])
#r_bn2 = tf.Variable([1], dtype=tf.float32, name='r_bn2')
b_bn2 = tf.Variable(tf.random_normal([128], stddev=0.1), dtype=tf.float32, name='b_bn2')
out_bn2 = tf.nn.batch_normalization(out_conv2, bn_mean2, bn_var2, b_bn2, None, bn_variance_epsilon)
hidden_conv2 = tf.nn.max_pool3d(tf.nn.relu(out_bn2), strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv2 ,the output size is batch_sizex12x12x12x32([batch_size,in_deep,width,height,output_deep])
w_conv3 = tf.Variable(tf.random_normal([5, 5, 5, 128, 256], stddev=0.1), dtype=tf.float32, name='w_conv3')
out_conv3 = tf.nn.conv3d(hidden_conv2, w_conv3, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean3, bn_var3 = tf.nn.moments(out_conv3, [i for i in range(len(out_conv3.shape))])
#r_bn3 = tf.Variable([1], dtype=tf.float32, name='r_bn3')
b_bn3 = tf.Variable(tf.random_normal([256], stddev=0.1), dtype=tf.float32, name='b_bn3')
out_bn3 = tf.nn.batch_normalization(out_conv3, bn_mean3, bn_var3, b_bn3, None, bn_variance_epsilon)
hidden_conv3 = tf.nn.max_pool3d(tf.nn.relu(out_bn3), strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv3 ,the output size is batch_sizex4x4x4x64([batch_size,in_deep,width,height,output_deep])
w_conv4 = tf.Variable(tf.random_normal([4, 4, 4, 256, 512], stddev=0.1), dtype=tf.float32, name='w_conv4')
out_conv4 = tf.nn.conv3d(hidden_conv3, w_conv4, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean4, bn_var4 = tf.nn.moments(out_conv4, [i for i in range(len(out_conv4.shape))])
#r_bn4 = tf.Variable([1], dtype=tf.float32, name='r_bn4')
b_bn4 = tf.Variable(tf.random_normal([512], stddev=0.1), dtype=tf.float32, name='b_bn4')
out_bn4 = tf.nn.batch_normalization(out_conv4, bn_mean4, bn_var4, b_bn4, None, bn_variance_epsilon)
hidden_conv4 = tf.nn.relu(out_bn4)
# after conv3 ,the output size is batch_sizex4x4x4x128([batch_size,in_deep,width,height,output_deep])
# all feature map flatten to one dimension vector,this vector will be much long
flattened_conv4 = tf.reshape(hidden_conv4, [-1, 512])
w_fc1 = tf.Variable(tf.random_normal([512, 512], stddev=0.1), name='w_fc1')
out_fc1 = tf.matmul(flattened_conv4, w_fc1)
#r_bn5 = tf.Variable([1], dtype=tf.float32, name='r_bn5')
b_bn5 = tf.Variable(tf.random_normal([512], stddev=0.1), dtype=tf.float32, name='b_bn5')
bn_mean5, bn_var5 = tf.nn.moments(out_fc1, [i for i in range(len(out_fc1.shape))])
out_bn5 = tf.nn.batch_normalization(out_fc1, bn_mean5, bn_var5, b_bn5, None, bn_variance_epsilon)
out_rl5 = tf.nn.relu(out_bn5)
# dropout_fc1 = tf.nn.dropout(out_fc1,keep_prob)
w_fc2 = tf.Variable(tf.random_normal([512, 2], stddev=0.1), name='w_fc2')
b_fc2 = tf.Variable(tf.random_normal(shape=[2], stddev=0.1), name='b_fc2')
out_fc2 = tf.add(tf.matmul(out_rl5, w_fc2), b_fc2)
out_sm = tf.nn.softmax(out_fc2)
#return b_bn1, w_conv1, w_conv2, out_conv1, out_bn1, hidden_conv1, hidden_conv2, hidden_conv3, out_fc1, out_fc2, out_sm
return out_fc2, out_sm
def volume_bnnet_zerobias_l6_56(input):
bn_variance_epsilon = tf.constant(0.0000000000001, dtype=tf.float32)
w_conv1 = tf.Variable(tf.random_normal([3, 3, 3, 1, 64], stddev=0.1), dtype=tf.float32, name='w_conv1')
out_conv1 = tf.nn.conv3d(input, w_conv1, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean1, bn_var1 = tf.nn.moments(out_conv1, [i for i in range(len(out_conv1.shape))])
r_bn1 = tf.Variable([1], dtype=tf.float32, name='r_bn1')
b_bn1 = tf.Variable(tf.zeros([64]), dtype=tf.float32, name='b_bn1')
out_bn1 = tf.nn.batch_normalization(out_conv1, bn_mean1, bn_var1, b_bn1, r_bn1, bn_variance_epsilon)
hidden_conv1 = tf.nn.max_pool3d(tf.nn.relu(out_bn1), strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv1 ,the output size is batch_sizex27x27x27x16([batch_size,in_deep,width,height,output_deep])
w_conv2 = tf.Variable(tf.random_normal([4, 4, 4, 64, 128], stddev=0.1), dtype=tf.float32, name='w_conv2')
out_conv2 = tf.nn.conv3d(hidden_conv1, w_conv2, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean2, bn_var2 = tf.nn.moments(out_conv2, [i for i in range(len(out_conv2.shape))])
r_bn2 = tf.Variable([1], dtype=tf.float32, name='r_bn2')
b_bn2 = tf.Variable(tf.zeros([128]), dtype=tf.float32, name='b_bn2')
out_bn2 = tf.nn.batch_normalization(out_conv2, bn_mean2, bn_var2, b_bn2, r_bn2, bn_variance_epsilon)
hidden_conv2 = tf.nn.max_pool3d(tf.nn.relu(out_bn2), strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv2 ,the output size is batch_sizex12x12x12x32([batch_size,in_deep,width,height,output_deep])
w_conv3 = tf.Variable(tf.random_normal([5, 5, 5, 128, 256], stddev=0.1), dtype=tf.float32, name='w_conv3')
out_conv3 = tf.nn.conv3d(hidden_conv2, w_conv3, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean3, bn_var3 = tf.nn.moments(out_conv3, [i for i in range(len(out_conv3.shape))])
r_bn3 = tf.Variable([1], dtype=tf.float32, name='r_bn3')
b_bn3 = tf.Variable(tf.zeros([256]), dtype=tf.float32, name='b_bn3')
out_bn3 = tf.nn.batch_normalization(out_conv3, bn_mean3, bn_var3, b_bn3, r_bn3, bn_variance_epsilon)
hidden_conv3 = tf.nn.max_pool3d(tf.nn.relu(out_bn3), strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv3 ,the output size is batch_sizex4x4x4x64([batch_size,in_deep,width,height,output_deep])
w_conv4 = tf.Variable(tf.random_normal([4, 4, 4, 256, 512], stddev=0.1), dtype=tf.float32, name='w_conv4')
out_conv4 = tf.nn.conv3d(hidden_conv3, w_conv4, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean4, bn_var4 = tf.nn.moments(out_conv4, [i for i in range(len(out_conv4.shape))])
r_bn4 = tf.Variable([1], dtype=tf.float32, name='r_bn4')
b_bn4 = tf.Variable(tf.zeros([512]), dtype=tf.float32, name='b_bn4')
out_bn4 = tf.nn.batch_normalization(out_conv4, bn_mean4, bn_var4, b_bn4, r_bn4, bn_variance_epsilon)
hidden_conv4 = tf.nn.relu(out_bn4)
# after conv3 ,the output size is batch_sizex4x4x4x128([batch_size,in_deep,width,height,output_deep])
# all feature map flatten to one dimension vector,this vector will be much long
flattened_conv4 = tf.reshape(hidden_conv4, [-1, 512])
w_fc1 = tf.Variable(tf.random_normal([512, 512], stddev=0.1), name='w_fc1')
out_fc1 = tf.matmul(flattened_conv4, w_fc1)
r_bn5 = tf.Variable([1], dtype=tf.float32, name='r_bn5')
b_bn5 = tf.Variable(tf.zeros([512]), dtype=tf.float32, name='b_bn5')
bn_mean5, bn_var5 = tf.nn.moments(out_fc1, [i for i in range(len(out_fc1.shape))])
out_bn5 = tf.nn.batch_normalization(out_fc1, bn_mean5, bn_var5, b_bn5, r_bn5, bn_variance_epsilon)
out_rl5 = tf.nn.relu(out_bn5)
# dropout_fc1 = tf.nn.dropout(out_fc1,keep_prob)
w_fc2 = tf.Variable(tf.random_normal([512, 2], stddev=0.1), name='w_fc2')
b_fc2 = tf.Variable(tf.zeros([2]), name='b_fc2')
out_fc2 = tf.add(tf.matmul(out_rl5, w_fc2), b_fc2)
out_sm = tf.nn.softmax(out_fc2)
#return b_bn1, w_conv1, w_conv2, out_conv1, out_bn1, hidden_conv1, hidden_conv2, hidden_conv3, out_fc1, out_fc2, out_sm
return out_fc2, out_sm
def volume_bnnet_flbias_l6_56(input, positive_confidence=0.01):
#batch normalization net for focal loss training with bias fine tuned for positive_confidence
bn_variance_epsilon = tf.constant(0.0000000000001, dtype=tf.float32)
w_conv1 = tf.Variable(tf.random_normal([3, 3, 3, 1, 64], stddev=0.1), dtype=tf.float32, name='w_conv1')
out_conv1 = tf.nn.conv3d(input, w_conv1, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean1, bn_var1 = tf.nn.moments(out_conv1, [i for i in range(len(out_conv1.shape))])
r_bn1 = tf.Variable([1], dtype=tf.float32, name='r_bn1')
b_bn1 = tf.Variable(tf.random_normal([64], stddev=0.1), dtype=tf.float32, name='b_bn1')
out_bn1 = tf.nn.batch_normalization(out_conv1, bn_mean1, bn_var1, b_bn1, r_bn1, bn_variance_epsilon)
hidden_conv1 = tf.nn.max_pool3d(tf.nn.relu(out_bn1), strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv1 ,the output size is batch_sizex27x27x27x16([batch_size,in_deep,width,height,output_deep])
w_conv2 = tf.Variable(tf.random_normal([4, 4, 4, 64, 128], stddev=0.1), dtype=tf.float32, name='w_conv2')
out_conv2 = tf.nn.conv3d(hidden_conv1, w_conv2, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean2, bn_var2 = tf.nn.moments(out_conv2, [i for i in range(len(out_conv2.shape))])
r_bn2 = tf.Variable([1], dtype=tf.float32, name='r_bn2')
b_bn2 = tf.Variable(tf.random_normal([128], stddev=0.1), dtype=tf.float32, name='b_bn2')
out_bn2 = tf.nn.batch_normalization(out_conv2, bn_mean2, bn_var2, b_bn2, r_bn2, bn_variance_epsilon)
hidden_conv2 = tf.nn.max_pool3d(tf.nn.relu(out_bn2), strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv2 ,the output size is batch_sizex12x12x12x32([batch_size,in_deep,width,height,output_deep])
w_conv3 = tf.Variable(tf.random_normal([5, 5, 5, 128, 256], stddev=0.1), dtype=tf.float32, name='w_conv3')
out_conv3 = tf.nn.conv3d(hidden_conv2, w_conv3, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean3, bn_var3 = tf.nn.moments(out_conv3, [i for i in range(len(out_conv3.shape))])
r_bn3 = tf.Variable([1], dtype=tf.float32, name='r_bn3')
b_bn3 = tf.Variable(tf.random_normal([256], stddev=0.1), dtype=tf.float32, name='b_bn3')
out_bn3 = tf.nn.batch_normalization(out_conv3, bn_mean3, bn_var3, b_bn3, r_bn3, bn_variance_epsilon)
hidden_conv3 = tf.nn.max_pool3d(tf.nn.relu(out_bn3), strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv3 ,the output size is batch_sizex4x4x4x64([batch_size,in_deep,width,height,output_deep])
w_conv4 = tf.Variable(tf.random_normal([4, 4, 4, 256, 512], stddev=0.1), dtype=tf.float32, name='w_conv4')
out_conv4 = tf.nn.conv3d(hidden_conv3, w_conv4, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean4, bn_var4 = tf.nn.moments(out_conv4, [i for i in range(len(out_conv4.shape))])
r_bn4 = tf.Variable([1], dtype=tf.float32, name='r_bn4')
b_bn4 = tf.Variable(tf.random_normal([512], stddev=0.1), dtype=tf.float32, name='b_bn4')
out_bn4 = tf.nn.batch_normalization(out_conv4, bn_mean4, bn_var4, b_bn4, r_bn4, bn_variance_epsilon)
hidden_conv4 = tf.nn.relu(out_bn4)
# after conv3 ,the output size is batch_sizex4x4x4x128([batch_size,in_deep,width,height,output_deep])
# all feature map flatten to one dimension vector,this vector will be much long
flattened_conv4 = tf.reshape(hidden_conv4, [-1, 512])
w_fc1 = tf.Variable(tf.random_normal([512, 512], stddev=0.1), name='w_fc1')
out_fc1 = tf.matmul(flattened_conv4, w_fc1)
r_bn5 = tf.Variable([1], dtype=tf.float32, name='r_bn5')
b_bn5 = tf.Variable(tf.random_normal([512], stddev=0.1), dtype=tf.float32, name='b_bn5')
bn_mean5, bn_var5 = tf.nn.moments(out_fc1, [i for i in range(len(out_fc1.shape))])
out_bn5 = tf.nn.batch_normalization(out_fc1, bn_mean5, bn_var5, b_bn5, r_bn5, bn_variance_epsilon)
out_rl5 = tf.nn.relu(out_bn5)
# dropout_fc1 = tf.nn.dropout(out_fc1,keep_prob)
w_fc2 = tf.Variable(tf.random_normal([512, 2], stddev=0.1), name='w_fc2')
#b_fc2 = tf.Variable(tf.random_normal(shape=[2], mean=0, stddev=0.1) + tf.constant([-math.log((1-positive_confidence)/positive_confidence), math.log((1-positive_confidence)/positive_confidence)]), name='b_fc2')
b_fc2 = tf.Variable([-math.log((1-positive_confidence)/positive_confidence), math.log((1-positive_confidence)/positive_confidence)], name='b_fc2')
out_fc2 = tf.add(tf.matmul(out_rl5, w_fc2), b_fc2)
out_sm = tf.nn.softmax(out_fc2)
#return b_bn1, w_conv1, w_conv2, out_conv1, out_bn1, hidden_conv1, hidden_conv2, hidden_conv3, out_fc1, out_fc2, out_sm
return out_fc2, out_sm
def volume_bndo_flbias_l6_56(input, positive_confidence=0.01, dropout_rate=0.3):
#batch normalization net for focal loss training with bias fine tuned for positive_confidence
#dropout layer adapted
bn_variance_epsilon = tf.constant(0.0000000000001, dtype=tf.float32)
keep_prob = tf.constant(1-dropout_rate, dtype=tf.float32)
w_conv1 = tf.Variable(tf.random_normal([3, 3, 3, 1, 64], stddev=0.1), dtype=tf.float32, name='w_conv1')
out_conv1 = tf.nn.conv3d(input, w_conv1, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean1, bn_var1 = tf.nn.moments(out_conv1, [i for i in range(len(out_conv1.shape))])
r_bn1 = tf.Variable([1], dtype=tf.float32, name='r_bn1')
b_bn1 = tf.Variable(tf.random_normal([64], stddev=0.1), dtype=tf.float32, name='b_bn1')
out_bn1 = tf.nn.batch_normalization(out_conv1, bn_mean1, bn_var1, b_bn1, r_bn1, bn_variance_epsilon)
out_dropout1 = tf.nn.dropout(tf.nn.relu(out_bn1), keep_prob)
hidden_conv1 = tf.nn.max_pool3d(out_dropout1, strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
#del out_conv1, out_bn1, out_dropout1
# after conv1 ,the output size is batch_sizex27x27x27x16([batch_size,in_deep,width,height,output_deep])
w_conv2 = tf.Variable(tf.random_normal([4, 4, 4, 64, 128], stddev=0.1), dtype=tf.float32, name='w_conv2')
out_conv2 = tf.nn.conv3d(hidden_conv1, w_conv2, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean2, bn_var2 = tf.nn.moments(out_conv2, [i for i in range(len(out_conv2.shape))])
r_bn2 = tf.Variable([1], dtype=tf.float32, name='r_bn2')
b_bn2 = tf.Variable(tf.random_normal([128], stddev=0.1), dtype=tf.float32, name='b_bn2')
out_bn2 = tf.nn.batch_normalization(out_conv2, bn_mean2, bn_var2, b_bn2, r_bn2, bn_variance_epsilon)
out_dropout2 = tf.nn.dropout(tf.nn.relu(out_bn2), keep_prob)
hidden_conv2 = tf.nn.max_pool3d(out_dropout2, strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
#del out_conv2, out_bn2, out_dropout2
# after conv2 ,the output size is batch_sizex12x12x12x32([batch_size,in_deep,width,height,output_deep])
w_conv3 = tf.Variable(tf.random_normal([5, 5, 5, 128, 256], stddev=0.1), dtype=tf.float32, name='w_conv3')
out_conv3 = tf.nn.conv3d(hidden_conv2, w_conv3, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean3, bn_var3 = tf.nn.moments(out_conv3, [i for i in range(len(out_conv3.shape))])
r_bn3 = tf.Variable([1], dtype=tf.float32, name='r_bn3')
b_bn3 = tf.Variable(tf.random_normal([256], stddev=0.1), dtype=tf.float32, name='b_bn3')
out_bn3 = tf.nn.batch_normalization(out_conv3, bn_mean3, bn_var3, b_bn3, r_bn3, bn_variance_epsilon)
out_dropout3 = tf.nn.dropout(tf.nn.relu(out_bn3), keep_prob)
hidden_conv3 = tf.nn.max_pool3d(out_dropout3, strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
#del out_conv3, out_bn3, out_dropout3
# after conv3 ,the output size is batch_sizex4x4x4x64([batch_size,in_deep,width,height,output_deep])
w_conv4 = tf.Variable(tf.random_normal([4, 4, 4, 256, 512], stddev=0.1), dtype=tf.float32, name='w_conv4')
out_conv4 = tf.nn.conv3d(hidden_conv3, w_conv4, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean4, bn_var4 = tf.nn.moments(out_conv4, [i for i in range(len(out_conv4.shape))])
r_bn4 = tf.Variable([1], dtype=tf.float32, name='r_bn4')
b_bn4 = tf.Variable(tf.random_normal([512], stddev=0.1), dtype=tf.float32, name='b_bn4')
out_bn4 = tf.nn.batch_normalization(out_conv4, bn_mean4, bn_var4, b_bn4, r_bn4, bn_variance_epsilon)
out_dropout4 = tf.nn.dropout(tf.nn.relu(out_bn4), keep_prob)
hidden_conv4 = tf.nn.max_pool3d(out_dropout4, strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
#del out_conv4, out_bn4, out_dropout4
# after conv3 ,the output size is batch_sizex4x4x4x128([batch_size,in_deep,width,height,output_deep])
# all feature map flatten to one dimension vector,this vector will be much long
flattened_conv4 = tf.reshape(hidden_conv4, [-1, 512])
w_fc1 = tf.Variable(tf.random_normal([512, 512], stddev=0.1), name='w_fc1')
out_fc1 = tf.matmul(flattened_conv4, w_fc1)
r_bn5 = tf.Variable([1], dtype=tf.float32, name='r_bn5')
b_bn5 = tf.Variable(tf.random_normal([512], stddev=0.1), dtype=tf.float32, name='b_bn5')
bn_mean5, bn_var5 = tf.nn.moments(out_fc1, [i for i in range(len(out_fc1.shape))])
out_bn5 = tf.nn.batch_normalization(out_fc1, bn_mean5, bn_var5, b_bn5, r_bn5, bn_variance_epsilon)
out_dropout5 = tf.nn.dropout(tf.nn.relu(out_bn5), keep_prob)
#del out_fc1, out_bn5
w_fc2 = tf.Variable(tf.random_normal([512, 2], stddev=0.1), name='w_fc2')
#b_fc2 = tf.Variable(tf.random_normal(shape=[2], mean=0, stddev=0.1) + tf.constant([-math.log((1-positive_confidence)/positive_confidence), math.log((1-positive_confidence)/positive_confidence)]), name='b_fc2')
b_fc2 = tf.Variable([-math.log((1-positive_confidence)/positive_confidence), math.log((1-positive_confidence)/positive_confidence)], name='b_fc2')
out_fc2 = tf.add(tf.matmul(out_dropout5, w_fc2), b_fc2)
out_sm = tf.nn.softmax(out_fc2)
#return b_bn1, w_conv1, w_conv2, out_conv1, out_bn1, hidden_conv1, hidden_conv2, hidden_conv3, out_fc1, out_fc2, out_sm
return out_fc2, out_sm
def volume_bndo_flbias_l5_42(input, positive_confidence=0.5, dropout_rate=0.3):
#batch normalization net for focal loss training with bias fine tuned for positive_confidence
#dropout layer adapted
bn_variance_epsilon = tf.constant(0.0000000000001, dtype=tf.float32)
keep_prob = tf.constant(1-dropout_rate, dtype=tf.float32)
w_conv1 = tf.Variable(tf.random_normal([5, 5, 5, 1, 64], stddev=0.1), dtype=tf.float32, name='w_conv1')
out_conv1 = tf.nn.conv3d(input, w_conv1, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean1, bn_var1 = tf.nn.moments(out_conv1, [i for i in range(len(out_conv1.shape))])
r_bn1 = tf.Variable([1], dtype=tf.float32, name='r_bn1')
b_bn1 = tf.Variable(tf.random_normal([64], stddev=0.1), dtype=tf.float32, name='b_bn1')
out_bn1 = tf.nn.batch_normalization(out_conv1, bn_mean1, bn_var1, b_bn1, r_bn1, bn_variance_epsilon)
out_dropout1 = tf.nn.dropout(tf.nn.relu(out_bn1), keep_prob)
hidden_conv1 = tf.nn.max_pool3d(out_dropout1, strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv1 ,the output volume size is 19x19x19([batch_size,in_deep,width,height,output_deep])
w_conv2 = tf.Variable(tf.random_normal([4, 4, 4, 64, 128], stddev=0.1), dtype=tf.float32, name='w_conv2')
out_conv2 = tf.nn.conv3d(hidden_conv1, w_conv2, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean2, bn_var2 = tf.nn.moments(out_conv2, [i for i in range(len(out_conv2.shape))])
r_bn2 = tf.Variable([1], dtype=tf.float32, name='r_bn2')
b_bn2 = tf.Variable(tf.random_normal([128], stddev=0.1), dtype=tf.float32, name='b_bn2')
out_bn2 = tf.nn.batch_normalization(out_conv2, bn_mean2, bn_var2, b_bn2, r_bn2, bn_variance_epsilon)
out_dropout2 = tf.nn.dropout(tf.nn.relu(out_bn2), keep_prob)
hidden_conv2 = tf.nn.max_pool3d(out_dropout2, strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv2 ,the output volume size is 8x8x8([batch_size,in_deep,width,height,output_deep])
w_conv3 = tf.Variable(tf.random_normal([3, 3, 3, 128, 256], stddev=0.1), dtype=tf.float32, name='w_conv3')
out_conv3 = tf.nn.conv3d(hidden_conv2, w_conv3, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean3, bn_var3 = tf.nn.moments(out_conv3, [i for i in range(len(out_conv3.shape))])
r_bn3 = tf.Variable([1], dtype=tf.float32, name='r_bn3')
b_bn3 = tf.Variable(tf.random_normal([256], stddev=0.1), dtype=tf.float32, name='b_bn3')
out_bn3 = tf.nn.batch_normalization(out_conv3, bn_mean3, bn_var3, b_bn3, r_bn3, bn_variance_epsilon)
out_dropout3 = tf.nn.dropout(tf.nn.relu(out_bn3), keep_prob)
hidden_conv3 = tf.nn.max_pool3d(out_dropout3, strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv3 ,the output volume size is 3x3x3([batch_size,in_deep,width,height,output_deep])
w_conv4 = tf.Variable(tf.random_normal([3, 3, 3, 256, 1024], stddev=0.1), dtype=tf.float32, name='w_conv4')
out_conv4 = tf.nn.conv3d(hidden_conv3, w_conv4, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean4, bn_var4 = tf.nn.moments(out_conv4, [i for i in range(len(out_conv4.shape))])
r_bn4 = tf.Variable([1], dtype=tf.float32, name='r_bn4')
b_bn4 = tf.Variable(tf.random_normal([1024], stddev=0.1), dtype=tf.float32, name='b_bn4')
out_bn4 = tf.nn.batch_normalization(out_conv4, bn_mean4, bn_var4, b_bn4, r_bn4, bn_variance_epsilon)
out_dropout4 = tf.nn.dropout(tf.nn.relu(out_bn4), keep_prob)
# after conv3 ,the output size is batch_sizex1x1x1([batch_size,in_deep,width,height,output_deep])
# all feature map flatten to one dimension vector,this vector will be much long
flattened_conv4 = tf.reshape(out_dropout4, [-1, 1024])
#w_fc1 = tf.Variable(tf.random_normal([512, 512], stddev=0.1), name='w_fc1')
#out_fc1 = tf.matmul(flattened_conv4, w_fc1)
#r_bn5 = tf.Variable([1], dtype=tf.float32, name='r_bn5')
#b_bn5 = tf.Variable(tf.random_normal([512], stddev=0.1), dtype=tf.float32, name='b_bn5')
#bn_mean5, bn_var5 = tf.nn.moments(out_fc1, [i for i in range(len(out_fc1.shape))])
#out_bn5 = tf.nn.batch_normalization(out_fc1, bn_mean5, bn_var5, b_bn5, r_bn5, bn_variance_epsilon)
#out_dropout5 = tf.nn.dropout(tf.nn.relu(out_bn5), keep_prob)
w_fc1 = tf.Variable(tf.random_normal([1024, 2], stddev=0.1), name='w_fc1')
#b_fc2 = tf.Variable(tf.random_normal(shape=[2], mean=0, stddev=0.1) + tf.constant([-math.log((1-positive_confidence)/positive_confidence), math.log((1-positive_confidence)/positive_confidence)]), name='b_fc2')
b_fc1 = tf.Variable([-math.log((1-positive_confidence)/positive_confidence), math.log((1-positive_confidence)/positive_confidence)], name='b_fc1')
out_fc1 = tf.add(tf.matmul(flattened_conv4, w_fc1), b_fc1)
out_sm = tf.nn.softmax(out_fc1)
#return b_bn1, w_conv1, w_conv2, out_conv1, out_bn1, hidden_conv1, hidden_conv2, hidden_conv3, out_fc1, out_fc2, out_sm
return out_fc1, out_sm
def volume_bndo_flbias_l6_40(input, training=True, positive_confidence=0.5, dropout_rate=0.3, batch_normalization_statistic=True, bn_params=None):
#batch normalization net for focal loss training with bias fine tuned for positive_confidence
#dropout layer adapted
bn_variance_epsilon = tf.constant(0.0000000000001, dtype=tf.float32)
keep_prob = tf.constant(1-dropout_rate, dtype=tf.float32)
w_conv1 = tf.Variable(tf.random_normal([5, 5, 5, 1, 64], stddev=0.1), dtype=tf.float32, name='w_conv1', trainable=training)
out_conv1 = tf.nn.conv3d(input, w_conv1, strides=[1, 1, 1, 1, 1], padding='VALID')
if bn_params is None:
bn_mean1, bn_var1 = tf.nn.moments(out_conv1, [i for i in range(len(out_conv1.shape))])
else:
bn_mean1 = tf.constant(bn_params[0][0], dtype=tf.float32, name='bn_mean1')
bn_var1 = tf.constant(bn_params[0][1], dtype=tf.float32, name='bn_var1')
r_bn1 = tf.Variable([1], dtype=tf.float32, name='r_bn1', trainable=training)
b_bn1 = tf.Variable(tf.random_normal([64], stddev=0.1), dtype=tf.float32, name='b_bn1', trainable=training)
out_bn1 = tf.nn.batch_normalization(out_conv1, bn_mean1, bn_var1, b_bn1, r_bn1, bn_variance_epsilon)
out_dropout1 = tf.nn.dropout(tf.nn.relu(out_bn1), keep_prob)
hidden_conv1 = tf.nn.max_pool3d(out_dropout1, strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv1 ,the output volume size is 18x18x18([batch_size,in_deep,width,height,output_deep])
w_conv2 = tf.Variable(tf.random_normal([5, 5, 5, 64, 128], stddev=0.1), dtype=tf.float32, name='w_conv2', trainable=training)
out_conv2 = tf.nn.conv3d(hidden_conv1, w_conv2, strides=[1, 1, 1, 1, 1], padding='VALID')
if bn_params is None:
bn_mean2, bn_var2 = tf.nn.moments(out_conv2, [i for i in range(len(out_conv2.shape))])
else:
bn_mean2 = tf.constant(bn_params[1][0], dtype=tf.float32, name='bn_mean2')
bn_var2 = tf.constant(bn_params[1][1], dtype=tf.float32, name='bn_var2')
r_bn2 = tf.Variable([1], dtype=tf.float32, name='r_bn2', trainable=training)
b_bn2 = tf.Variable(tf.random_normal([128], stddev=0.1), dtype=tf.float32, name='b_bn2', trainable=training)
out_bn2 = tf.nn.batch_normalization(out_conv2, bn_mean2, bn_var2, b_bn2, r_bn2, bn_variance_epsilon)
out_dropout2 = tf.nn.dropout(tf.nn.relu(out_bn2), keep_prob)
hidden_conv2 = tf.nn.max_pool3d(out_dropout2, strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv2 ,the output volume size is 7x7x7([batch_size,in_deep,width,height,output_deep])
w_conv3 = tf.Variable(tf.random_normal([2, 2, 2, 128, 256], stddev=0.1), dtype=tf.float32, name='w_conv3', trainable=training)
out_conv3 = tf.nn.conv3d(hidden_conv2, w_conv3, strides=[1, 1, 1, 1, 1], padding='VALID')
if bn_params is None:
bn_mean3, bn_var3 = tf.nn.moments(out_conv3, [i for i in range(len(out_conv3.shape))])
else:
bn_mean3 = tf.constant(bn_params[2][0], dtype=tf.float32, name='bn_mean3')
bn_var3 = tf.constant(bn_params[2][1], dtype=tf.float32, name='bn_var3')
r_bn3 = tf.Variable([1], dtype=tf.float32, name='r_bn3', trainable=training)
b_bn3 = tf.Variable(tf.random_normal([256], stddev=0.1), dtype=tf.float32, name='b_bn3', trainable=training)
out_bn3 = tf.nn.batch_normalization(out_conv3, bn_mean3, bn_var3, b_bn3, r_bn3, bn_variance_epsilon)
out_dropout3 = tf.nn.dropout(tf.nn.relu(out_bn3), keep_prob)
hidden_conv3 = tf.nn.max_pool3d(out_dropout3, strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv3 ,the output volume size is 3x3x3([batch_size,in_deep,width,height,output_deep])
w_conv4 = tf.Variable(tf.random_normal([3, 3, 3, 256, 512], stddev=0.1), dtype=tf.float32, name='w_conv4', trainable=training)
out_conv4 = tf.nn.conv3d(hidden_conv3, w_conv4, strides=[1, 1, 1, 1, 1], padding='VALID')
if bn_params is None:
bn_mean4, bn_var4 = tf.nn.moments(out_conv4, [i for i in range(len(out_conv4.shape))])
else:
bn_mean4 = tf.constant(bn_params[3][0], dtype=tf.float32, name='bn_mean4')
bn_var4 = tf.constant(bn_params[3][1], dtype=tf.float32, name='bn_var4')
r_bn4 = tf.Variable([1], dtype=tf.float32, name='r_bn4', trainable=training)
b_bn4 = tf.Variable(tf.random_normal([512], stddev=0.1), dtype=tf.float32, name='b_bn4', trainable=training)
out_bn4 = tf.nn.batch_normalization(out_conv4, bn_mean4, bn_var4, b_bn4, r_bn4, bn_variance_epsilon)
out_dropout4 = tf.nn.dropout(tf.nn.relu(out_bn4), keep_prob)
# after conv3 ,the output size is batch_sizex1x1x1([batch_size,in_deep,width,height,output_deep])
# all feature map flatten to one dimension vector,this vector will be much long
flattened_conv4 = tf.reshape(out_dropout4, [-1, 512])
w_fc1 = tf.Variable(tf.random_normal([512, 512], stddev=0.1), name='w_fc1', trainable=training)
out_fc1 = tf.matmul(flattened_conv4, w_fc1)
r_bn5 = tf.Variable([1], dtype=tf.float32, name='r_bn5', trainable=training)
b_bn5 = tf.Variable(tf.random_normal([512], stddev=0.1), dtype=tf.float32, name='b_bn5', trainable=training)
if bn_params is None:
bn_mean5, bn_var5 = tf.nn.moments(out_fc1, [i for i in range(len(out_fc1.shape))])
else:
bn_mean5 = tf.constant(bn_params[4][0], dtype=tf.float32, name='bn_mean5')
bn_var5 = tf.constant(bn_params[4][1], dtype=tf.float32, name='bn_var5')
out_bn5 = tf.nn.batch_normalization(out_fc1, bn_mean5, bn_var5, b_bn5, r_bn5, bn_variance_epsilon)
out_dropout5 = tf.nn.dropout(tf.nn.relu(out_bn5), keep_prob)
w_fc2 = tf.Variable(tf.random_normal([512, 2], stddev=0.1), name='w_fc2', trainable=training)
#b_fc2 = tf.Variable(tf.random_normal(shape=[2], mean=0, stddev=0.1) + tf.constant([-math.log((1-positive_confidence)/positive_confidence), math.log((1-positive_confidence)/positive_confidence)]), name='b_fc2', trainable=training)
b_fc2 = tf.Variable([-math.log((1-positive_confidence)/positive_confidence), math.log((1-positive_confidence)/positive_confidence)], name='b_fc2', trainable=training)
out_fc2 = tf.add(tf.matmul(out_dropout5, w_fc2), b_fc2)
out_sm = tf.nn.softmax(out_fc2)
outputs = {'conv1_out':hidden_conv1, 'conv2_out':hidden_conv2, 'conv3_out':hidden_conv3, 'conv4_out':out_dropout4, 'flattened_out':flattened_conv4, 'fc1_out':out_dropout5, 'last_out':out_fc2, 'sm_out':out_sm}
variables = {'w_conv1':w_conv1, 'r_bn1':r_bn1, 'b_bn1':b_bn1,
'w_conv2':w_conv2, 'r_bn2':r_bn2, 'b_bn2':b_bn2,
'w_conv3':w_conv3, 'r_bn3':r_bn3, 'b_bn3':b_bn3,
'w_conv4':w_conv4, 'r_bn4':r_bn4, 'b_bn4':b_bn4,
'w_fc1':w_fc1, 'r_bn5':r_bn5, 'b_bn5':b_bn5,
'w_fc2':w_fc2, 'b_fc2':b_fc2}
if batch_normalization_statistic:
bn_pars = []
bn_pars.append([bn_mean1, bn_var1])
bn_pars.append([bn_mean2, bn_var2])
bn_pars.append([bn_mean3, bn_var3])
bn_pars.append([bn_mean4, bn_var4])
bn_pars.append([bn_mean5, bn_var5])
else:
bn_pars = None
#variables = [w_conv1, r_bn1, b_bn1, w_conv2, r_bn2, b_bn2, w_conv3, r_bn3, b_bn3, w_conv4, r_bn4, b_bn4, w_fc1, r_bn5, b_bn5, w_fc2, b_fc2]
return outputs, variables, bn_pars
def volume_bndo_flbias_l6_40_v2(input, training=True, positive_confidence=0.5, dropout_rate=0.3, batch_normalization_statistic=True, bn_params=None):
#batch normalization net for focal loss training with bias fine tuned for positive_confidence
#dropout layer adapted
bn_variance_epsilon = tf.constant(0.0000000000001, dtype=tf.float32)
keep_prob = tf.constant(1-dropout_rate, dtype=tf.float32)
w_conv1 = tf.Variable(tf.random_normal([5, 5, 5, 1, 64], stddev=0.1), dtype=tf.float32, name='w_conv1', trainable=training)
out_conv1 = tf.nn.conv3d(input, w_conv1, strides=[1, 1, 1, 1, 1], padding='VALID')
if bn_params is None:
bn_mean1, bn_var1 = tf.nn.moments(out_conv1, [i for i in range(len(out_conv1.shape))])
else:
bn_mean1 = tf.constant(bn_params[0][0], dtype=tf.float32, name='bn_mean1')
bn_var1 = tf.constant(bn_params[0][1], dtype=tf.float32, name='bn_var1')
r_bn1 = tf.Variable([1], dtype=tf.float32, name='r_bn1', trainable=training)
b_bn1 = tf.Variable(tf.random_normal([64], stddev=0.1), dtype=tf.float32, name='b_bn1', trainable=training)
out_bn1 = tf.nn.batch_normalization(out_conv1, bn_mean1, bn_var1, b_bn1, r_bn1, bn_variance_epsilon)
out_dropout1 = tf.nn.dropout(tf.nn.relu(out_bn1), keep_prob)
hidden_conv1 = tf.nn.max_pool3d(out_dropout1, strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv1 ,the output volume size is 18x18x18([batch_size,in_deep,width,height,output_deep])
w_conv2 = tf.Variable(tf.random_normal([5, 5, 5, 64, 128], stddev=0.1), dtype=tf.float32, name='w_conv2', trainable=training)
out_conv2 = tf.nn.conv3d(hidden_conv1, w_conv2, strides=[1, 1, 1, 1, 1], padding='VALID')
if bn_params is None:
bn_mean2, bn_var2 = tf.nn.moments(out_conv2, [i for i in range(len(out_conv2.shape))])
else:
bn_mean2 = tf.constant(bn_params[1][0], dtype=tf.float32, name='bn_mean2')
bn_var2 = tf.constant(bn_params[1][1], dtype=tf.float32, name='bn_var2')
r_bn2 = tf.Variable([1], dtype=tf.float32, name='r_bn2', trainable=training)
b_bn2 = tf.Variable(tf.random_normal([128], stddev=0.1), dtype=tf.float32, name='b_bn2', trainable=training)
out_bn2 = tf.nn.batch_normalization(out_conv2, bn_mean2, bn_var2, b_bn2, r_bn2, bn_variance_epsilon)
out_dropout2 = tf.nn.dropout(tf.nn.relu(out_bn2), keep_prob)
hidden_conv2 = tf.nn.max_pool3d(out_dropout2, strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv2 ,the output volume size is 7x7x7([batch_size,in_deep,width,height,output_deep])
w_conv3 = tf.Variable(tf.random_normal([3, 3, 3, 128, 256], stddev=0.1), dtype=tf.float32, name='w_conv3', trainable=training)
out_conv3 = tf.nn.conv3d(hidden_conv2, w_conv3, strides=[1, 1, 1, 1, 1], padding='VALID')
if bn_params is None:
bn_mean3, bn_var3 = tf.nn.moments(out_conv3, [i for i in range(len(out_conv3.shape))])
else:
bn_mean3 = tf.constant(bn_params[2][0], dtype=tf.float32, name='bn_mean3')
bn_var3 = tf.constant(bn_params[2][1], dtype=tf.float32, name='bn_var3')
r_bn3 = tf.Variable([1], dtype=tf.float32, name='r_bn3', trainable=training)
b_bn3 = tf.Variable(tf.random_normal([256], stddev=0.1), dtype=tf.float32, name='b_bn3', trainable=training)
out_bn3 = tf.nn.batch_normalization(out_conv3, bn_mean3, bn_var3, b_bn3, r_bn3, bn_variance_epsilon)
out_dropout3 = tf.nn.dropout(tf.nn.relu(out_bn3), keep_prob)
# after conv3 ,the output volume size is 5x5x5([batch_size,in_deep,width,height,output_deep])
w_conv4 = tf.Variable(tf.random_normal([5, 5, 5, 256, 512], stddev=0.1), dtype=tf.float32, name='w_conv4', trainable=training)
out_conv4 = tf.nn.conv3d(out_dropout3, w_conv4, strides=[1, 1, 1, 1, 1], padding='VALID')
if bn_params is None:
bn_mean4, bn_var4 = tf.nn.moments(out_conv4, [i for i in range(len(out_conv4.shape))])
else:
bn_mean4 = tf.constant(bn_params[3][0], dtype=tf.float32, name='bn_mean4')
bn_var4 = tf.constant(bn_params[3][1], dtype=tf.float32, name='bn_var4')
r_bn4 = tf.Variable([1], dtype=tf.float32, name='r_bn4', trainable=training)
b_bn4 = tf.Variable(tf.random_normal([512], stddev=0.1), dtype=tf.float32, name='b_bn4', trainable=training)
out_bn4 = tf.nn.batch_normalization(out_conv4, bn_mean4, bn_var4, b_bn4, r_bn4, bn_variance_epsilon)
out_dropout4 = tf.nn.dropout(tf.nn.relu(out_bn4), keep_prob)
# after conv3 ,the output size is batch_sizex1x1x1([batch_size,in_deep,width,height,output_deep])
# all feature map flatten to one dimension vector,this vector will be much long
flattened_conv4 = tf.reshape(out_dropout4, [-1, 512])
w_fc1 = tf.Variable(tf.random_normal([512, 512], stddev=0.1), name='w_fc1', trainable=training)
out_fc1 = tf.matmul(flattened_conv4, w_fc1)
r_bn5 = tf.Variable([1], dtype=tf.float32, name='r_bn5', trainable=training)
b_bn5 = tf.Variable(tf.random_normal([512], stddev=0.1), dtype=tf.float32, name='b_bn5', trainable=training)
if bn_params is None:
bn_mean5, bn_var5 = tf.nn.moments(out_fc1, [i for i in range(len(out_fc1.shape))])
else:
bn_mean5 = tf.constant(bn_params[4][0], dtype=tf.float32, name='bn_mean5')
bn_var5 = tf.constant(bn_params[4][1], dtype=tf.float32, name='bn_var5')
out_bn5 = tf.nn.batch_normalization(out_fc1, bn_mean5, bn_var5, b_bn5, r_bn5, bn_variance_epsilon)
out_dropout5 = tf.nn.dropout(tf.nn.relu(out_bn5), keep_prob)
w_fc2 = tf.Variable(tf.random_normal([512, 2], stddev=0.1), name='w_fc2', trainable=training)
#b_fc2 = tf.Variable(tf.random_normal(shape=[2], mean=0, stddev=0.1) + tf.constant([-math.log((1-positive_confidence)/positive_confidence), math.log((1-positive_confidence)/positive_confidence)]), name='b_fc2', trainable=training)
b_fc2 = tf.Variable([-math.log((1-positive_confidence)/positive_confidence), math.log((1-positive_confidence)/positive_confidence)], name='b_fc2', trainable=training)
out_fc2 = tf.add(tf.matmul(out_dropout5, w_fc2), b_fc2)
out_sm = tf.nn.softmax(out_fc2)
outputs = {'conv1_out':hidden_conv1, 'conv2_out':hidden_conv2, 'conv3_out':out_dropout3, 'conv4_out':out_dropout4, 'flattened_out':flattened_conv4, 'fc1_out':out_dropout5, 'last_out':out_fc2, 'sm_out':out_sm}
variables = {'w_conv1':w_conv1, 'r_bn1':r_bn1, 'b_bn1':b_bn1,
'w_conv2':w_conv2, 'r_bn2':r_bn2, 'b_bn2':b_bn2,
'w_conv3':w_conv3, 'r_bn3':r_bn3, 'b_bn3':b_bn3,
'w_conv4':w_conv4, 'r_bn4':r_bn4, 'b_bn4':b_bn4,
'w_fc1':w_fc1, 'r_bn5':r_bn5, 'b_bn5':b_bn5,
'w_fc2':w_fc2, 'b_fc2':b_fc2}
if batch_normalization_statistic:
bn_pars = []
bn_pars.append([bn_mean1, bn_var1])
bn_pars.append([bn_mean2, bn_var2])
bn_pars.append([bn_mean3, bn_var3])
bn_pars.append([bn_mean4, bn_var4])
bn_pars.append([bn_mean5, bn_var5])
else:
bn_pars = None
return outputs, variables, bn_pars
def volume_bndo_flbias_l5_30(input, training=True, positive_confidence=0.5, dropout_rate=0.3, batch_normalization_statistic=True, bn_params=None):
#batch normalization net for focal loss training with bias fine tuned for positive_confidence
#dropout layer adapted
bn_variance_epsilon = tf.constant(0.0000000000001, dtype=tf.float32)
keep_prob = tf.constant(1-dropout_rate, dtype=tf.float32)
w_conv1 = tf.Variable(tf.random_normal([5, 5, 5, 1, 64], stddev=0.1), dtype=tf.float32, name='w_conv1', trainable=training)
out_conv1 = tf.nn.conv3d(input, w_conv1, strides=[1, 1, 1, 1, 1], padding='VALID')
if bn_params is None:
bn_mean1, bn_var1 = tf.nn.moments(out_conv1, [i for i in range(len(out_conv1.shape))])
else:
bn_mean1 = tf.constant(bn_params[0][0], dtype=tf.float32, name='bn_mean1')
bn_var1 = tf.constant(bn_params[0][1], dtype=tf.float32, name='bn_var1')
r_bn1 = tf.Variable([1], dtype=tf.float32, name='r_bn1', trainable=training)
b_bn1 = tf.Variable(tf.random_normal([64], stddev=0.1), dtype=tf.float32, name='b_bn1', trainable=training)
out_bn1 = tf.nn.batch_normalization(out_conv1, bn_mean1, bn_var1, b_bn1, r_bn1, bn_variance_epsilon)
out_dropout1 = tf.nn.dropout(tf.nn.relu(out_bn1), keep_prob)
hidden_conv1 = tf.nn.max_pool3d(out_dropout1, strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv1 ,the output volume size is 13x13x13([batch_size,in_deep,width,height,output_deep])
w_conv2 = tf.Variable(tf.random_normal([2, 2, 2, 64, 128], stddev=0.1), dtype=tf.float32, name='w_conv2', trainable=training)
out_conv2 = tf.nn.conv3d(hidden_conv1, w_conv2, strides=[1, 1, 1, 1, 1], padding='VALID')
if bn_params is None:
bn_mean2, bn_var2 = tf.nn.moments(out_conv2, [i for i in range(len(out_conv2.shape))])
else:
bn_mean2 = tf.constant(bn_params[1][0], dtype=tf.float32, name='bn_mean2')
bn_var2 = tf.constant(bn_params[1][1], dtype=tf.float32, name='bn_var2')
r_bn2 = tf.Variable([1], dtype=tf.float32, name='r_bn2', trainable=training)
b_bn2 = tf.Variable(tf.random_normal([128], stddev=0.1), dtype=tf.float32, name='b_bn2', trainable=training)
out_bn2 = tf.nn.batch_normalization(out_conv2, bn_mean2, bn_var2, b_bn2, r_bn2, bn_variance_epsilon)
out_dropout2 = tf.nn.dropout(tf.nn.relu(out_bn2), keep_prob)
hidden_conv2 = tf.nn.max_pool3d(out_dropout2, strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv2 ,the output volume size is 6x6x6([batch_size,in_deep,width,height,output_deep])
w_conv3 = tf.Variable(tf.random_normal([3, 3, 3, 128, 256], stddev=0.1), dtype=tf.float32, name='w_conv3', trainable=training)
out_conv3 = tf.nn.conv3d(hidden_conv2, w_conv3, strides=[1, 1, 1, 1, 1], padding='VALID')
if bn_params is None:
bn_mean3, bn_var3 = tf.nn.moments(out_conv3, [i for i in range(len(out_conv3.shape))])
else:
bn_mean3 = tf.constant(bn_params[2][0], dtype=tf.float32, name='bn_mean3')
bn_var3 = tf.constant(bn_params[2][1], dtype=tf.float32, name='bn_var3')
r_bn3 = tf.Variable([1], dtype=tf.float32, name='r_bn3', trainable=training)
b_bn3 = tf.Variable(tf.random_normal([256], stddev=0.1), dtype=tf.float32, name='b_bn3', trainable=training)
out_bn3 = tf.nn.batch_normalization(out_conv3, bn_mean3, bn_var3, b_bn3, r_bn3, bn_variance_epsilon)
out_dropout3 = tf.nn.dropout(tf.nn.relu(out_bn3), keep_prob)
hidden_conv3 = tf.nn.max_pool3d(out_dropout3, strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv3 ,the output volume size is 2x2x2([batch_size,in_deep,width,height,output_deep])
w_conv4 = tf.Variable(tf.random_normal([2, 2, 2, 256, 512], stddev=0.1), dtype=tf.float32, name='w_conv4', trainable=training)
out_conv4 = tf.nn.conv3d(hidden_conv3, w_conv4, strides=[1, 1, 1, 1, 1], padding='VALID')
if bn_params is None:
bn_mean4, bn_var4 = tf.nn.moments(out_conv4, [i for i in range(len(out_conv4.shape))])
else:
bn_mean4 = tf.constant(bn_params[3][0], dtype=tf.float32, name='bn_mean4')
bn_var4 = tf.constant(bn_params[3][1], dtype=tf.float32, name='bn_var4')
r_bn4 = tf.Variable([1], dtype=tf.float32, name='r_bn4', trainable=training)
b_bn4 = tf.Variable(tf.random_normal([512], stddev=0.1), dtype=tf.float32, name='b_bn4', trainable=training)
out_bn4 = tf.nn.batch_normalization(out_conv4, bn_mean4, bn_var4, b_bn4, r_bn4, bn_variance_epsilon)
out_dropout4 = tf.nn.dropout(tf.nn.relu(out_bn4), keep_prob)
# after conv3 ,the output size is batch_sizex1x1x1([batch_size,in_deep,width,height,output_deep])
# all feature map flatten to one dimension vector,this vector will be much long
flattened_conv4 = tf.reshape(out_dropout4, [-1, 512])
#w_fc1 = tf.Variable(tf.random_normal([512, 512], stddev=0.1), name='w_fc1')
#out_fc1 = tf.matmul(flattened_conv4, w_fc1)
#r_bn5 = tf.Variable([1], dtype=tf.float32, name='r_bn5')
#b_bn5 = tf.Variable(tf.random_normal([512], stddev=0.1), dtype=tf.float32, name='b_bn5')
#bn_mean5, bn_var5 = tf.nn.moments(out_fc1, [i for i in range(len(out_fc1.shape))])
#out_bn5 = tf.nn.batch_normalization(out_fc1, bn_mean5, bn_var5, b_bn5, r_bn5, bn_variance_epsilon)
#out_dropout5 = tf.nn.dropout(tf.nn.relu(out_bn5), keep_prob)
w_fc1 = tf.Variable(tf.random_normal([512, 2], stddev=0.1), name='w_fc1', trainable=training)
#b_fc2 = tf.Variable(tf.random_normal(shape=[2], mean=0, stddev=0.1) + tf.constant([-math.log((1-positive_confidence)/positive_confidence), math.log((1-positive_confidence)/positive_confidence)]), name='b_fc2')
b_fc1 = tf.Variable([-math.log((1-positive_confidence)/positive_confidence), math.log((1-positive_confidence)/positive_confidence)], name='b_fc1', trainable=training)
out_fc1 = tf.add(tf.matmul(flattened_conv4, w_fc1), b_fc1)
out_sm = tf.nn.softmax(out_fc1)
outputs = {'conv1_out':hidden_conv1, 'conv2_out':hidden_conv2, 'conv3_out':hidden_conv3, 'conv4_out':out_dropout4, 'flattened_out':flattened_conv4, 'last_out':out_fc1, 'sm_out':out_sm}
variables = {'w_conv1':w_conv1, 'r_bn1':r_bn1, 'b_bn1':b_bn1,
'w_conv2':w_conv2, 'r_bn2':r_bn2, 'b_bn2':b_bn2,
'w_conv3':w_conv3, 'r_bn3':r_bn3, 'b_bn3':b_bn3,
'w_conv4':w_conv4, 'r_bn4':r_bn4, 'b_bn4':b_bn4,
'w_fc1':w_fc1, 'b_fc1':b_fc1}
if batch_normalization_statistic:
bn_pars = []
bn_pars.append([bn_mean1, bn_var1])
bn_pars.append([bn_mean2, bn_var2])
bn_pars.append([bn_mean3, bn_var3])
bn_pars.append([bn_mean4, bn_var4])
else:
bn_pars = None
return outputs, variables, bn_pars
def volume_bndo_flbias_l5_30_v2(input, positive_confidence=0.5, dropout_rate=0.3):
#batch normalization net for focal loss training with bias fine tuned for positive_confidence
#dropout layer adapted
bn_variance_epsilon = tf.constant(0.0000000000001, dtype=tf.float32)
keep_prob = tf.constant(1-dropout_rate, dtype=tf.float32)
w_conv1 = tf.Variable(tf.random_normal([5, 5, 5, 1, 64], stddev=0.1), dtype=tf.float32, name='w_conv1')
out_conv1 = tf.nn.conv3d(input, w_conv1, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean1, bn_var1 = tf.nn.moments(out_conv1, [i for i in range(len(out_conv1.shape))])
r_bn1 = tf.Variable([1], dtype=tf.float32, name='r_bn1')
b_bn1 = tf.Variable(tf.random_normal([64], stddev=0.1), dtype=tf.float32, name='b_bn1')
out_bn1 = tf.nn.batch_normalization(out_conv1, bn_mean1, bn_var1, b_bn1, r_bn1, bn_variance_epsilon)
out_dropout1 = tf.nn.dropout(tf.nn.relu(out_bn1), keep_prob)
hidden_conv1 = tf.nn.max_pool3d(out_dropout1, strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv1 ,the output volume size is 13x13x13([batch_size,in_deep,width,height,output_deep])
w_conv2 = tf.Variable(tf.random_normal([4, 4, 4, 64, 128], stddev=0.1), dtype=tf.float32, name='w_conv2')
out_conv2 = tf.nn.conv3d(hidden_conv1, w_conv2, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean2, bn_var2 = tf.nn.moments(out_conv2, [i for i in range(len(out_conv2.shape))])
r_bn2 = tf.Variable([1], dtype=tf.float32, name='r_bn2')
b_bn2 = tf.Variable(tf.random_normal([128], stddev=0.1), dtype=tf.float32, name='b_bn2')
out_bn2 = tf.nn.batch_normalization(out_conv2, bn_mean2, bn_var2, b_bn2, r_bn2, bn_variance_epsilon)
out_dropout2 = tf.nn.dropout(tf.nn.relu(out_bn2), keep_prob)
hidden_conv2 = tf.nn.max_pool3d(out_dropout2, strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv2 ,the output volume size is 5x5x5([batch_size,in_deep,width,height,output_deep])
w_conv3 = tf.Variable(tf.random_normal([3, 3, 3, 128, 256], stddev=0.1), dtype=tf.float32, name='w_conv3')
out_conv3 = tf.nn.conv3d(hidden_conv2, w_conv3, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean3, bn_var3 = tf.nn.moments(out_conv3, [i for i in range(len(out_conv3.shape))])
r_bn3 = tf.Variable([1], dtype=tf.float32, name='r_bn3')
b_bn3 = tf.Variable(tf.random_normal([256], stddev=0.1), dtype=tf.float32, name='b_bn3')
out_bn3 = tf.nn.batch_normalization(out_conv3, bn_mean3, bn_var3, b_bn3, r_bn3, bn_variance_epsilon)
out_dropout3 = tf.nn.dropout(tf.nn.relu(out_bn3), keep_prob)
# after conv3 ,the output volume size is 3x3x3([batch_size,in_deep,width,height,output_deep])
w_conv4 = tf.Variable(tf.random_normal([3, 3, 3, 256, 512], stddev=0.1), dtype=tf.float32, name='w_conv4')
out_conv4 = tf.nn.conv3d(out_dropout3, w_conv4, strides=[1, 1, 1, 1, 1], padding='VALID')
bn_mean4, bn_var4 = tf.nn.moments(out_conv4, [i for i in range(len(out_conv4.shape))])
r_bn4 = tf.Variable([1], dtype=tf.float32, name='r_bn4')
b_bn4 = tf.Variable(tf.random_normal([512], stddev=0.1), dtype=tf.float32, name='b_bn4')
out_bn4 = tf.nn.batch_normalization(out_conv4, bn_mean4, bn_var4, b_bn4, r_bn4, bn_variance_epsilon)
out_dropout4 = tf.nn.dropout(tf.nn.relu(out_bn4), keep_prob)
# after conv3 ,the output size is batch_sizex1x1x1([batch_size,in_deep,width,height,output_deep])
# all feature map flatten to one dimension vector,this vector will be much long
flattened_conv4 = tf.reshape(out_dropout4, [-1, 512])
#w_fc1 = tf.Variable(tf.random_normal([512, 512], stddev=0.1), name='w_fc1')
#out_fc1 = tf.matmul(flattened_conv4, w_fc1)
#r_bn5 = tf.Variable([1], dtype=tf.float32, name='r_bn5')
#b_bn5 = tf.Variable(tf.random_normal([512], stddev=0.1), dtype=tf.float32, name='b_bn5')
#bn_mean5, bn_var5 = tf.nn.moments(out_fc1, [i for i in range(len(out_fc1.shape))])
#out_bn5 = tf.nn.batch_normalization(out_fc1, bn_mean5, bn_var5, b_bn5, r_bn5, bn_variance_epsilon)
#out_dropout5 = tf.nn.dropout(tf.nn.relu(out_bn5), keep_prob)
w_fc1 = tf.Variable(tf.random_normal([512, 2], stddev=0.1), name='w_fc1')
#b_fc2 = tf.Variable(tf.random_normal(shape=[2], mean=0, stddev=0.1) + tf.constant([-math.log((1-positive_confidence)/positive_confidence), math.log((1-positive_confidence)/positive_confidence)]), name='b_fc2')
b_fc1 = tf.Variable([-math.log((1-positive_confidence)/positive_confidence), math.log((1-positive_confidence)/positive_confidence)], name='b_fc1')
out_fc1 = tf.add(tf.matmul(flattened_conv4, w_fc1), b_fc1)
out_sm = tf.nn.softmax(out_fc1)
#return b_bn1, w_conv1, w_conv2, out_conv1, out_bn1, hidden_conv1, hidden_conv2, hidden_conv3, out_fc1, out_fc2, out_sm
return out_fc1, out_sm
def volume_bndo_flbias_l5_20(input, training=True, positive_confidence=0.5, dropout_rate=0.3, batch_normalization_statistic=True, bn_params=None):
#batch normalization net for focal loss training with bias fine tuned for positive_confidence
#dropout layer adapted
bn_variance_epsilon = tf.constant(0.0000000000001, dtype=tf.float32)
keep_prob = tf.constant(1-dropout_rate, dtype=tf.float32)
w_conv1 = tf.Variable(tf.random_normal([5, 5, 5, 1, 64], stddev=0.1), dtype=tf.float32, name='w_conv1', trainable=training)
out_conv1 = tf.nn.conv3d(input, w_conv1, strides=[1, 1, 1, 1, 1], padding='VALID')
if bn_params is None:
bn_mean1, bn_var1 = tf.nn.moments(out_conv1, [i for i in range(len(out_conv1.shape))])
else:
bn_mean1 = tf.constant(bn_params[0][0], dtype=tf.float32, name='bn_mean1')
bn_var1 = tf.constant(bn_params[0][1], dtype=tf.float32, name='bn_var1')
r_bn1 = tf.Variable([1], dtype=tf.float32, name='r_bn1', trainable=training)
b_bn1 = tf.Variable(tf.random_normal([64], stddev=0.1), dtype=tf.float32, name='b_bn1', trainable=training)
out_bn1 = tf.nn.batch_normalization(out_conv1, bn_mean1, bn_var1, b_bn1, r_bn1, bn_variance_epsilon)
out_dropout1 = tf.nn.dropout(tf.nn.relu(out_bn1), keep_prob)
hidden_conv1 = tf.nn.max_pool3d(out_dropout1, strides=[1, 2, 2, 2, 1], ksize=[1, 2, 2, 2, 1], padding='SAME')
# after conv1 ,the output volume size is 8x8x8([batch_size,in_deep,width,height,output_deep])
w_conv2 = tf.Variable(tf.random_normal([3, 3, 3, 64, 128], stddev=0.1), dtype=tf.float32, name='w_conv2', trainable=training)
out_conv2 = tf.nn.conv3d(hidden_conv1, w_conv2, strides=[1, 1, 1, 1, 1], padding='VALID')
if bn_params is None:
bn_mean2, bn_var2 = tf.nn.moments(out_conv2, [i for i in range(len(out_conv2.shape))])
else:
bn_mean2 = tf.constant(bn_params[1][0], dtype=tf.float32, name='bn_mean2')
bn_var2 = tf.constant(bn_params[1][1], dtype=tf.float32, name='bn_var2')
r_bn2 = tf.Variable([1], dtype=tf.float32, name='r_bn2', trainable=training)
b_bn2 = tf.Variable(tf.random_normal([128], stddev=0.1), dtype=tf.float32, name='b_bn2', trainable=training)
out_bn2 = tf.nn.batch_normalization(out_conv2, bn_mean2, bn_var2, b_bn2, r_bn2, bn_variance_epsilon)
out_dropout2 = tf.nn.dropout(tf.nn.relu(out_bn2), keep_prob)
# after conv2 ,the output volume size is 6x6x6([batch_size,in_deep,width,height,output_deep])
w_conv3 = tf.Variable(tf.random_normal([4, 4, 4, 128, 256], stddev=0.1), dtype=tf.float32, name='w_conv3', trainable=training)
out_conv3 = tf.nn.conv3d(out_dropout2, w_conv3, strides=[1, 1, 1, 1, 1], padding='VALID')
if bn_params is None:
bn_mean3, bn_var3 = tf.nn.moments(out_conv3, [i for i in range(len(out_conv3.shape))])
else:
bn_mean3 = tf.constant(bn_params[2][0], dtype=tf.float32, name='bn_mean3')
bn_var3 = tf.constant(bn_params[2][1], dtype=tf.float32, name='bn_var3')
r_bn3 = tf.Variable([1], dtype=tf.float32, name='r_bn3', trainable=training)
b_bn3 = tf.Variable(tf.random_normal([256], stddev=0.1), dtype=tf.float32, name='b_bn3', trainable=training)
out_bn3 = tf.nn.batch_normalization(out_conv3, bn_mean3, bn_var3, b_bn3, r_bn3, bn_variance_epsilon)
out_dropout3 = tf.nn.dropout(tf.nn.relu(out_bn3), keep_prob)
# after conv3 ,the output volume size is 3x3x3([batch_size,in_deep,width,height,output_deep])
w_conv4 = tf.Variable(tf.random_normal([3, 3, 3, 256, 512], stddev=0.1), dtype=tf.float32, name='w_conv4', trainable=training)
out_conv4 = tf.nn.conv3d(out_dropout3, w_conv4, strides=[1, 1, 1, 1, 1], padding='VALID')
if bn_params is None:
bn_mean4, bn_var4 = tf.nn.moments(out_conv4, [i for i in range(len(out_conv4.shape))])
else:
bn_mean4 = tf.constant(bn_params[3][0], dtype=tf.float32, name='bn_mean4')
bn_var4 = tf.constant(bn_params[3][1], dtype=tf.float32, name='bn_var4')
r_bn4 = tf.Variable([1], dtype=tf.float32, name='r_bn4', trainable=training)
b_bn4 = tf.Variable(tf.random_normal([512], stddev=0.1), dtype=tf.float32, name='b_bn4', trainable=training)
out_bn4 = tf.nn.batch_normalization(out_conv4, bn_mean4, bn_var4, b_bn4, r_bn4, bn_variance_epsilon)
out_dropout4 = tf.nn.dropout(tf.nn.relu(out_bn4), keep_prob)
# after conv3 ,the output size is batch_sizex1x1x1([batch_size,in_deep,width,height,output_deep])
# all feature map flatten to one dimension vector,this vector will be much long
flattened_conv4 = tf.reshape(out_dropout4, [-1, 512])
w_fc1 = tf.Variable(tf.random_normal([512, 2], stddev=0.1), name='w_fc1', trainable=training)
#b_fc2 = tf.Variable(tf.random_normal(shape=[2], mean=0, stddev=0.1) + tf.constant([-math.log((1-positive_confidence)/positive_confidence), math.log((1-positive_confidence)/positive_confidence)]), name='b_fc2')
b_fc1 = tf.Variable([-math.log((1-positive_confidence)/positive_confidence), math.log((1-positive_confidence)/positive_confidence)], name='b_fc1', trainable=training)
out_fc1 = tf.add(tf.matmul(flattened_conv4, w_fc1), b_fc1)
out_sm = tf.nn.softmax(out_fc1)
outputs = {'conv1_out':hidden_conv1, 'conv2_out':out_dropout2, 'conv3_out':out_dropout3, 'conv4_out':out_dropout4, 'flattened_out':flattened_conv4, 'last_out':out_fc1, 'sm_out':out_sm}
variables = {'w_conv1':w_conv1, 'r_bn1':r_bn1, 'b_bn1':b_bn1,
'w_conv2':w_conv2, 'r_bn2':r_bn2, 'b_bn2':b_bn2,
'w_conv3':w_conv3, 'r_bn3':r_bn3, 'b_bn3':b_bn3,
'w_conv4':w_conv4, 'r_bn4':r_bn4, 'b_bn4':b_bn4,
'w_fc1':w_fc1, 'b_fc1':b_fc1}
if batch_normalization_statistic:
bn_pars = []
bn_pars.append([bn_mean1, bn_var1])
bn_pars.append([bn_mean2, bn_var2])
bn_pars.append([bn_mean3, bn_var3])
bn_pars.append([bn_mean4, bn_var4])
else:
bn_pars = None
return outputs, variables, bn_pars
def vote_fusion(predictions):
predictions_clipped = [tf.reshape(predictions[0][:,0], [-1,1]), tf.reshape(predictions[1][:,0], [-1,1]), tf.reshape(predictions[2][:,0], [-1,1])]
concpred = tf.keras.backend.concatenate(predictions_clipped, axis=1)
return tf.reduce_max(concpred, axis=1)
def committe_fusion(predictions, weights=[0.3, 0.4, 0.3]):
if len(predictions) != len(weights):
print('length incorrect')
return tf.add_n(predictions)/tf.constant(len(predictions), dtype=tf.float32)
weighted_predictions = []
for i in range(len(predictions)):
weighted_predictions.append(predictions[i]*tf.constant(weights[i]))
return tf.add_n(weighted_predictions)
def late_fusion(features, training=True, positive_confidence=0.5):
concfeature = tf.keras.backend.concatenate(features, axis=1)
w_fc_conc = tf.Variable(tf.random_normal(tf.TensorShape([concfeature.shape[1], tf.Dimension(2)]), stddev=0.1), trainable=training, dtype=tf.float32, name='w_fc_conc')
b_fc_conc = tf.Variable([-math.log((1-positive_confidence)/positive_confidence), math.log((1-positive_confidence)/positive_confidence)], trainable=training, dtype=tf.float32, name='b_fc_conc')
out_fc_conc = tf.add(tf.matmul(concfeature, w_fc_conc), b_fc_conc)
out_sm = tf.nn.softmax(out_fc_conc)
variables = {'w_fc_conc': w_fc_conc, 'b_fc_conc': b_fc_conc}
return out_fc_conc, out_sm, variables
| 67.707218
| 231
| 0.733407
| 15,422
| 83,483
| 3.74063
| 0.014525
| 0.030093
| 0.068437
| 0.079878
| 0.974986
| 0.97145
| 0.966822
| 0.963199
| 0.956906
| 0.955814
| 0
| 0.086352
| 0.0982
| 83,483
| 1,233
| 232
| 67.707218
| 0.67991
| 0.192434
| 0
| 0.868871
| 0
| 0
| 0.045538
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025086
| false
| 0
| 0.002281
| 0
| 0.053592
| 0.00114
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a92dfe16cbcddf8569b8b82e06d6e8b00e6fade7
| 108
|
py
|
Python
|
demos/instance_occlsegm/instance_occlsegm_lib/contrib/instance_occlsegm/functions/__init__.py
|
pazeshun/jsk_apc
|
0ff42000ad5992f8a31e719a5360a39cf4fa1fde
|
[
"BSD-3-Clause"
] | null | null | null |
demos/instance_occlsegm/instance_occlsegm_lib/contrib/instance_occlsegm/functions/__init__.py
|
pazeshun/jsk_apc
|
0ff42000ad5992f8a31e719a5360a39cf4fa1fde
|
[
"BSD-3-Clause"
] | 2
|
2019-04-11T05:36:23.000Z
|
2019-08-19T12:58:10.000Z
|
demos/instance_occlsegm/instance_occlsegm_lib/contrib/instance_occlsegm/functions/__init__.py
|
pazeshun/jsk_apc
|
0ff42000ad5992f8a31e719a5360a39cf4fa1fde
|
[
"BSD-3-Clause"
] | null | null | null |
# flake8: noqa
from .roi_unpooling_2d import ROIUnpooling2D
from .roi_unpooling_2d import roi_unpooling_2d
| 21.6
| 46
| 0.851852
| 16
| 108
| 5.375
| 0.5
| 0.418605
| 0.488372
| 0.418605
| 0.55814
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052083
| 0.111111
| 108
| 4
| 47
| 27
| 0.84375
| 0.111111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
a980169d2a5f39c320e021e2943551a05b60e90b
| 5,312
|
py
|
Python
|
test.py
|
overfly83/spacy
|
f4fcc20541c9462f2708a60948ce2e35d918ee88
|
[
"MIT"
] | null | null | null |
test.py
|
overfly83/spacy
|
f4fcc20541c9462f2708a60948ce2e35d918ee88
|
[
"MIT"
] | null | null | null |
test.py
|
overfly83/spacy
|
f4fcc20541c9462f2708a60948ce2e35d918ee88
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding: utf8
from spacyrest.rest.spacy.SacParse import SacParse
import json
if __name__ == '__main__':
print(u'=========================================')
sentence = u'What if I increase milk rate by 10% in 2020.'
print(sentence)
sp = SacParse()
doc = sp.read_document(sentence)
ents_json = sp.entity_recognition(doc)
verb_dobj = sp.generate_verb_dobj(doc)
if ents_json:
print(ents_json)
print(sp.generate_verb_dobj(doc))
print(u'=========================================')
sentence = u'let\'s try if increase the candy rate by 10% in 2020.'
print(sentence)
sp = SacParse()
doc = sp.read_document(sentence)
ents_json = sp.entity_recognition(doc)
verb_dobj = sp.generate_verb_dobj(doc)
if ents_json:
print(ents_json)
print(sp.generate_verb_dobj(doc))
print(u'=========================================')
sentence = u'Let\s go to planning.'
print(sentence)
sp = SacParse()
doc = sp.read_document(sentence)
ents_json = sp.entity_recognition(doc)
verb_dobj = sp.generate_verb_dobj(doc)
if ents_json:
print(ents_json)
print(sp.generate_verb_dobj(doc))
# print(u'=========================================')
# sentence = u'Please help me quickly lock the cell.'
# print(sentence)
# sp = SacParse()
# doc = sp.read_document(sentence)
# ents_json = sp.entity_recognition(doc)
# verb_dobj = sp.generate_verb_dobj(doc)
# if ents_json:
# print(ents_json)
# print(sp.generate_verb_dobj(doc))
# print(u'=========================================')
# sentence = u'Autonomous cars shift insurance liability toward manufacturers'
# print(sentence)
# sp = SacParse()
# doc = sp.read_document(sentence)
# ents_json = sp.entity_recognition(doc)
# verb_dobj = sp.generate_verb_dobj(doc)
# if ents_json:
# print(ents_json)
# print(sp.generate_verb_dobj(doc))
# print(u'=========================================')
# sentence = u'Lock the actual data from year 2016 to year 2018.'
# print(sentence)
# sp = SacParse()
# doc = sp.read_document(sentence)
# ents_json = sp.entity_recognition(doc)
# verb_dobj = sp.generate_verb_dobj(doc)
# if ents_json:
# print(ents_json)
# print(sp.generate_verb_dobj(doc))
# print(u'=========================================')
# sentence = u'Lock the actual data in 2016'
# print(sentence)
# sp = SacParse()
# doc = sp.read_document(sentence)
# ents_json = sp.entity_recognition(doc)
# if ents_json:
# print(ents_json)
# print(sp.generate_verb_dobj(doc))
# print(u'=========================================')
# sentence = u'Lock the cell from year 2016 to year 2018'
# print(sentence)
# sp = SacParse()
# doc = sp.read_document(sentence)
# ents_json = sp.entity_recognition(doc)
# if ents_json:
# print(ents_json)
# print(sp.generate_verb_dobj(doc))
# print(u'=========================================')
# sentence = u'Lock the cell of actual data in 2018'
# print(sentence)
# sp = SacParse()
# doc = sp.read_document(sentence)
# ents_json = sp.entity_recognition(doc)
# if ents_json:
# print(ents_json)
# print(sp.generate_verb_dobj(doc))
# print(u'=========================================')
# sentence = u'I don\'t lock that cell, I lock this cell'
# print(sentence)
# sp = SacParse()
# doc = sp.read_document(sentence)
# ents_json = sp.entity_recognition(doc)
# if ents_json:
# print(ents_json)
# print(sp.generate_verb_dobj(doc))
# print(u'=========================================')
# sentence = u'go to nice model list page'
# print(sentence)
# sp = SacParse()
# doc = sp.read_document(sentence)
# ents_json = sp.entity_recognition(doc)
# if ents_json:
# print(ents_json)
# print(sp.generate_verb_dobj(doc))
# print(u'=========================================')
# sentence = u'go to model list'
# print(sentence)
# sp = SacParse()
# doc = sp.read_document(sentence)
# ents_json = sp.entity_recognition(doc)
# if ents_json:
# print(ents_json)
# print(sp.generate_verb_dobj(doc))
# print(u'=========================================')
# sentence = u'go to the model detail page'
# print(sentence)
# sp = SacParse()
# doc = sp.read_document(sentence)
# ents_json = sp.entity_recognition(doc)
# if ents_json:
# print(ents_json)
# print(sp.generate_verb_dobj(doc))
# print(u'=========================================')
# sentence = u'naivgate to the model detail page'
# print(sentence)
# sp = SacParse()
# doc = sp.read_document(sentence)
# ents_json = sp.entity_recognition(doc)
# if ents_json:
# print(ents_json)
# print(sp.generate_verb_dobj(doc))
# print(u'=========================================')
# sentence = u'Colleages located in shanghai office report to Sam'
# print(sentence)
# sp = SacParse()
# doc = sp.read_document(sentence)
# ents_json = sp.entity_recognition(doc)
# if ents_json:
# print(ents_json)
# print(sp.generate_verb_dobj(doc))
| 35.891892
| 82
| 0.560806
| 639
| 5,312
| 4.456964
| 0.126761
| 0.126404
| 0.136938
| 0.132725
| 0.874649
| 0.874649
| 0.874649
| 0.874649
| 0.874649
| 0.874649
| 0
| 0.008858
| 0.213667
| 5,312
| 147
| 83
| 36.136054
| 0.672971
| 0.654367
| 0
| 0.818182
| 0
| 0
| 0.116077
| 0.071387
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.060606
| 0
| 0.060606
| 0.363636
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8d119ffa506558f3064414fd972ed50e79238caa
| 9,762
|
py
|
Python
|
api_testing/testCase/testApiV0FileAdd.py
|
elastos/Elastos.Hive.HttpAPITests
|
2f14928992e4e2f5bf53e50bb93a0b7c4f6eaacd
|
[
"MIT"
] | 4
|
2019-01-06T05:19:05.000Z
|
2019-06-07T09:32:45.000Z
|
api_testing/testCase/testApiV0FileAdd.py
|
elastos/Elastos.Hive.HttpAPITests
|
2f14928992e4e2f5bf53e50bb93a0b7c4f6eaacd
|
[
"MIT"
] | null | null | null |
api_testing/testCase/testApiV0FileAdd.py
|
elastos/Elastos.Hive.HttpAPITests
|
2f14928992e4e2f5bf53e50bb93a0b7c4f6eaacd
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
__title__ = ''
__author__ = 'suxx'
__mtime__ = '2019/1/22'
"""
import unittest, sys, json, time, os
sys.path.append("../")
import read_conf
from function.func import *
from function.ela_log import MyLog
log = MyLog.get_log()
logger = log.get_logger()
a = read_conf.ReadConfig()
b = read_conf.ReadData()
ipfs_master_api_baseurl = a.get_ipfs_cluster("ipfs_master_api_baseurl")
ipfs_master_api_port = a.get_ipfs_cluster("ipfs_master_api_endpoint_port")
api = b.get_api_v0_file_add("api")
normal_response_body = b.get_api_v0_file_add("normal_response_body")
class ApiV0FileAdd(unittest.TestCase):
'''
Add a file or directory to cluster.
METHOD: GET/POST
Argument Type Required Description
path file yes The path to a file to be added to the cluster.
recursive bool no Add directory paths recursively. Default: “false”. Required: no.
hidden bool no Include files that are hidden. Only takes effect on recursive add. Default: “false”.
pin bool no Pin this object when adding. Default: “true”.
'''
def __init__(self, methodName='runTest'):
self.f = ConfigHttp("ipfs_master_api_endpoint_port")
unittest.TestCase.__init__(self, methodName)
@Wrappers.wrap_case(os.path.basename(__file__))
def test_no_arg_get(self):
a1, b1 = self.f.curl_get_code(ipfs_master_api_baseurl, ipfs_master_api_port, api)
logger.info(b1)
self.assertEqual(b1, "500")
@Wrappers.wrap_case(os.path.basename(__file__))
def test_with_correct_arg_get(self):
# Create random file name.
fname = "%s" % self.f.random_str()
logger.info(fname)
with open(fname, "a") as f:
f.write("This is file %s\n" % fname)
f.close()
a1, b1 = self.f.run_cmd("curl --connect-timeout 10 -m 10 -v -F file=@%s %s:%s%s" % (fname, ipfs_master_api_baseurl,
ipfs_master_api_port, api))
os.remove(fname)
logger.info(a1)
logger.info(b1)
self.assertIn("200 OK", a1)
x = self.f.check_body(json.loads(b1), json.loads(normal_response_body))
self.assertEqual(x, 0)
@Wrappers.wrap_case(os.path.basename(__file__))
def test_with_unexist_file_get(self):
fname = "%s" % self.f.random_str()
a1, b1 = self.f.run_cmd("curl --connect-timeout 10 -m 10 -v -F file=@%s %s:%s%s" % (fname, ipfs_master_api_baseurl,
ipfs_master_api_port, api))
logger.info(a1)
logger.info(b1)
self.assertNotIn("200 OK", a1)
@Wrappers.wrap_case(os.path.basename(__file__))
def test_with_recursive_get(self):
# Create random file name.
fname = "%s" % self.f.random_str()
logger.info(fname)
with open(fname, "a") as f:
f.write("This is file %s\n" % fname)
f.close()
a1, b1 = self.f.run_cmd("curl --connect-timeout 10 -m 10 -v -F file=@%s %s:%s%s?recursive=1" % (fname, ipfs_master_api_baseurl,
ipfs_master_api_port, api))
logger.info(a1)
logger.info(b1)
self.assertIn("200 OK", a1)
x = self.f.check_body(json.loads(b1), json.loads(normal_response_body))
self.assertEqual(x, 0)
a1, b1 = self.f.run_cmd("curl --connect-timeout 10 -m 10 -v -F file=@%s %s:%s%s?recursive=0" % (fname, ipfs_master_api_baseurl,
ipfs_master_api_port, api))
logger.info(a1)
logger.info(b1)
self.assertIn("200 OK", a1)
x = self.f.check_body(json.loads(b1), json.loads(normal_response_body))
self.assertEqual(x, 0)
a1, b1 = self.f.run_cmd("curl --connect-timeout 10 -m 10 -v -F file=@%s %s:%s%s?recursive=xxx" % (fname, ipfs_master_api_baseurl,
ipfs_master_api_port, api))
logger.info(a1)
logger.info(b1)
self.assertNotIn("200 OK", a1)
os.remove(fname)
@Wrappers.wrap_case(os.path.basename(__file__))
def test_with_hidden_get(self):
# Create random file name.
fname = "%s" % self.f.random_str()
logger.info(fname)
with open(fname, "a") as f:
f.write("This is file %s\n" % fname)
f.close()
a1, b1 = self.f.run_cmd("curl --connect-timeout 10 -m 10 -v -F file=@%s %s:%s%s?hidden=1" % (fname, ipfs_master_api_baseurl,
ipfs_master_api_port, api))
logger.info(a1)
logger.info(b1)
self.assertIn("200 OK", a1)
x = self.f.check_body(json.loads(b1), json.loads(normal_response_body))
self.assertEqual(x, 0)
a1, b1 = self.f.run_cmd("curl --connect-timeout 10 -m 10 -v -F file=@%s %s:%s%s?hidden=0" % (fname, ipfs_master_api_baseurl,
ipfs_master_api_port, api))
logger.info(a1)
logger.info(b1)
self.assertIn("200 OK", a1)
x = self.f.check_body(json.loads(b1), json.loads(normal_response_body))
self.assertEqual(x, 0)
a1, b1 = self.f.run_cmd("curl --connect-timeout 10 -m 10 -v -F file=@%s %s:%s%s?hidden=xxx" % (fname, ipfs_master_api_baseurl,
ipfs_master_api_port, api))
logger.info(a1)
logger.info(b1)
self.assertNotIn("200 OK", a1)
os.remove(fname)
@Wrappers.wrap_case(os.path.basename(__file__))
def test_with_pin_get(self):
# Create random file name.
fname = "%s" % self.f.random_str()
logger.info(fname)
with open(fname, "a") as f:
f.write("This is file %s\n" % fname)
f.close()
a1, b1 = self.f.run_cmd("curl --connect-timeout 10 -m 10 -v -F file=@%s %s:%s%s?pin=1" % (fname, ipfs_master_api_baseurl,
ipfs_master_api_port, api))
logger.info(a1)
logger.info(b1)
self.assertIn("200 OK", a1)
x = self.f.check_body(json.loads(b1), json.loads(normal_response_body))
self.assertEqual(x, 0)
a1, b1 = self.f.run_cmd("curl --connect-timeout 10 -m 10 -v -F file=@%s %s:%s%s?pin=0" % (fname, ipfs_master_api_baseurl,
ipfs_master_api_port, api))
logger.info(a1)
logger.info(b1)
self.assertIn("200 OK", a1)
x = self.f.check_body(json.loads(b1), json.loads(normal_response_body))
self.assertEqual(x, 0)
a1, b1 = self.f.run_cmd("curl --connect-timeout 10 -m 10 -v -F file=@%s %s:%s%s?pin=xxx" % (fname, ipfs_master_api_baseurl,
ipfs_master_api_port, api))
logger.info(a1)
logger.info(b1)
self.assertNotIn("200 OK", a1)
os.remove(fname)
@Wrappers.wrap_case(os.path.basename(__file__))
def test_with_err_arg_string_get(self):
# Create random file name.
fname = "%s" % self.f.random_str()
logger.info(fname)
with open(fname, "a") as f:
f.write("This is file %s\n" % fname)
f.close()
a1, b1 = self.f.run_cmd("curl --connect-timeout 10 -m 10 -v -F file=@%s %s:%s%s?hidde=1" % (fname, ipfs_master_api_baseurl,
ipfs_master_api_port, api))
logger.info(a1)
logger.info(b1)
self.assertIn("200 OK", a1)
a1, b1 = self.f.run_cmd("curl --connect-timeout 10 -m 10 -v -F file=@%s %s:%s%s?resur=0" % (fname, ipfs_master_api_baseurl,
ipfs_master_api_port, api))
logger.info(a1)
logger.info(b1)
self.assertIn("200 OK", a1)
a1, b1 = self.f.run_cmd("curl --connect-timeout 10 -m 10 -v -F file=@%s %s:%s%s?pinn=0" % (fname, ipfs_master_api_baseurl,
ipfs_master_api_port, api))
logger.info(a1)
logger.info(b1)
self.assertIn("200 OK", a1)
os.remove(fname)
@Wrappers.wrap_case(os.path.basename(__file__))
def test_with_joint_arg_get(self):
# Create random file name.
fname = "%s" % self.f.random_str()
logger.info(fname)
with open(fname, "a") as f:
f.write("This is file %s\n" % fname)
f.close()
a1, b1 = self.f.run_cmd("curl --connect-timeout 10 -m 10 -v -F file=@%s %s:%s%s?hidden=1&recursive=1" % (fname, ipfs_master_api_baseurl,
ipfs_master_api_port, api))
logger.info(a1)
logger.info(b1)
self.assertIn("200 OK", a1)
x = self.f.check_body(json.loads(b1), json.loads(normal_response_body))
self.assertEqual(x, 0)
a1, b1 = self.f.run_cmd("curl --connect-timeout 10 -m 10 -v -F file=@%s %s:%s%s?hidden=1&recursive=1&pin=0" % (fname,
ipfs_master_api_baseurl,
ipfs_master_api_port,
api))
logger.info(a1)
logger.info(b1)
self.assertIn("200 OK", a1)
x = self.f.check_body(json.loads(b1), json.loads(normal_response_body))
self.assertEqual(x, 0)
os.remove(fname)
| 41.717949
| 144
| 0.546097
| 1,337
| 9,762
| 3.776365
| 0.10546
| 0.019014
| 0.100416
| 0.075262
| 0.83363
| 0.826302
| 0.816003
| 0.807487
| 0.800158
| 0.792038
| 0
| 0.036761
| 0.32565
| 9,762
| 233
| 145
| 41.896996
| 0.730214
| 0.070068
| 0
| 0.745665
| 0
| 0.092486
| 0.149334
| 0.018048
| 0
| 0
| 0
| 0
| 0.150289
| 1
| 0.052023
| false
| 0
| 0.023121
| 0
| 0.080925
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8d6573004403552f3aa8090e24c58d7de9b87cc0
| 16,681
|
py
|
Python
|
tests/unit/test_airflow.py
|
masry707/sagemaker-python-sdk
|
5b18ce4223aee41be8ea5ca50c9bff853a65a7d0
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/test_airflow.py
|
masry707/sagemaker-python-sdk
|
5b18ce4223aee41be8ea5ca50c9bff853a65a7d0
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/test_airflow.py
|
masry707/sagemaker-python-sdk
|
5b18ce4223aee41be8ea5ca50c9bff853a65a7d0
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from __future__ import absolute_import
import pytest
import mock
from sagemaker import estimator, tensorflow
from sagemaker.workflow import airflow
from sagemaker.amazon import amazon_estimator
from sagemaker.amazon import ntm
REGION = 'us-west-2'
BUCKET_NAME = 'output'
@pytest.fixture()
def sagemaker_session():
boto_mock = mock.Mock(name='boto_session', region_name=REGION)
session = mock.Mock(name='sagemaker_session', boto_session=boto_mock,
boto_region_name=REGION, config=None, local_mode=False)
session.default_bucket = mock.Mock(name='default_bucket', return_value=BUCKET_NAME)
session._default_bucket = BUCKET_NAME
return session
def test_byo_training_config_required_args(sagemaker_session):
byo = estimator.Estimator(
image_name="byo",
role="{{ role }}",
train_instance_count="{{ instance_count }}",
train_instance_type="ml.c4.2xlarge",
sagemaker_session=sagemaker_session)
byo.set_hyperparameters(epochs=32,
feature_dim=1024,
mini_batch_size=256)
data = {'train': "{{ training_data }}"}
config = airflow.training_config(byo, data)
expected_config = {
'AlgorithmSpecification': {
'TrainingImage': 'byo',
'TrainingInputMode': 'File'
},
'OutputDataConfig': {
'S3OutputPath': 's3://output/'
},
'TrainingJobName': "byo-{{ execution_date.strftime('%Y-%m-%d-%H-%M-%S') }}",
'StoppingCondition': {
'MaxRuntimeInSeconds': 86400
},
'ResourceConfig': {
'InstanceCount': '{{ instance_count }}',
'InstanceType': 'ml.c4.2xlarge',
'VolumeSizeInGB': 30
},
'RoleArn': '{{ role }}',
'InputDataConfig': [{
'DataSource': {
'S3DataSource': {
'S3DataDistributionType': 'FullyReplicated',
'S3DataType': 'S3Prefix',
'S3Uri': '{{ training_data }}'
}
}, 'ChannelName': 'train'
}],
'HyperParameters': {
'epochs': '32',
'feature_dim': '1024',
'mini_batch_size': '256'}
}
assert config == expected_config
def test_byo_training_config_all_args(sagemaker_session):
byo = estimator.Estimator(
image_name="byo",
role="{{ role }}",
train_instance_count="{{ instance_count }}",
train_instance_type="ml.c4.2xlarge",
train_volume_size="{{ train_volume_size }}",
train_volume_kms_key="{{ train_volume_kms_key }}",
train_max_run="{{ train_max_run }}",
input_mode='Pipe',
output_path="{{ output_path }}",
output_kms_key="{{ output_volume_kms_key }}",
base_job_name="{{ base_job_name }}",
tags=[{"{{ key }}": "{{ value }}"}],
subnets=["{{ subnet }}"],
security_group_ids=["{{ security_group_ids }}"],
model_uri="{{ model_uri }}",
model_channel_name="{{ model_chanel }}",
sagemaker_session=sagemaker_session)
byo.set_hyperparameters(epochs=32,
feature_dim=1024,
mini_batch_size=256)
data = {'train': "{{ training_data }}"}
config = airflow.training_config(byo, data)
expected_config = {
'AlgorithmSpecification': {
'TrainingImage': 'byo',
'TrainingInputMode': 'Pipe'
},
'OutputDataConfig': {
'S3OutputPath': '{{ output_path }}',
'KmsKeyId': '{{ output_volume_kms_key }}'
},
'TrainingJobName': "{{ base_job_name }}-{{ execution_date.strftime('%Y-%m-%d-%H-%M-%S') }}",
'StoppingCondition': {
'MaxRuntimeInSeconds': '{{ train_max_run }}'
},
'ResourceConfig': {
'InstanceCount': '{{ instance_count }}',
'InstanceType': 'ml.c4.2xlarge',
'VolumeSizeInGB': '{{ train_volume_size }}',
'VolumeKmsKeyId': '{{ train_volume_kms_key }}'
},
'RoleArn': '{{ role }}',
'InputDataConfig': [
{
'DataSource': {
'S3DataSource': {
'S3DataDistributionType': 'FullyReplicated',
'S3DataType': 'S3Prefix',
'S3Uri': '{{ training_data }}'
}
},
'ChannelName': 'train'
},
{
'DataSource': {
'S3DataSource': {
'S3DataDistributionType': 'FullyReplicated',
'S3DataType': 'S3Prefix',
'S3Uri': '{{ model_uri }}'
}
},
'ContentType': 'application/x-sagemaker-model',
'InputMode': 'File',
'ChannelName': '{{ model_chanel }}'
}
],
'VpcConfig': {
'Subnets': ['{{ subnet }}'],
'SecurityGroupIds': ['{{ security_group_ids }}']
},
'HyperParameters': {
'epochs': '32',
'feature_dim': '1024',
'mini_batch_size': '256'},
'Tags': [{'{{ key }}': '{{ value }}'}]
}
assert config == expected_config
def test_framework_training_config_required_args(sagemaker_session):
tf = tensorflow.TensorFlow(
entry_point="{{ entry_point }}",
framework_version='1.10.0',
training_steps=1000,
evaluation_steps=100,
role="{{ role }}",
train_instance_count="{{ instance_count }}",
train_instance_type="ml.c4.2xlarge",
sagemaker_session=sagemaker_session)
data = "{{ training_data }}"
config = airflow.training_config(tf, data)
expected_config = {
'AlgorithmSpecification': {
'TrainingImage': '520713654638.dkr.ecr.us-west-2.amazonaws.com/sagemaker-tensorflow:1.10.0-cpu-py2',
'TrainingInputMode': 'File'
},
'OutputDataConfig': {
'S3OutputPath': 's3://output/'
},
'TrainingJobName': "sagemaker-tensorflow-{{ execution_date.strftime('%Y-%m-%d-%H-%M-%S') }}",
'StoppingCondition': {
'MaxRuntimeInSeconds': 86400
},
'ResourceConfig': {
'InstanceCount': '{{ instance_count }}',
'InstanceType': 'ml.c4.2xlarge',
'VolumeSizeInGB': 30
},
'RoleArn': '{{ role }}',
'InputDataConfig': [{
'DataSource': {
'S3DataSource': {
'S3DataDistributionType': 'FullyReplicated',
'S3DataType': 'S3Prefix',
'S3Uri': '{{ training_data }}'
}
},
'ChannelName': 'training'
}],
'HyperParameters': {
'sagemaker_submit_directory': '"s3://output/sagemaker-tensorflow-'
'{{ execution_date.strftime(\'%Y-%m-%d-%H-%M-%S\') }}'
'/source/sourcedir.tar.gz"',
'sagemaker_program': '"{{ entry_point }}"',
'sagemaker_enable_cloudwatch_metrics': 'false',
'sagemaker_container_log_level': '20',
'sagemaker_job_name': '"sagemaker-tensorflow-{{ execution_date.strftime(\'%Y-%m-%d-%H-%M-%S\') }}"',
'sagemaker_region': '"us-west-2"',
'checkpoint_path': '"s3://output/sagemaker-tensorflow-{{ execution_date.strftime(\'%Y-%m-%d-%H-%M-%S\') }}'
'/checkpoints"',
'training_steps': '1000',
'evaluation_steps': '100',
'sagemaker_requirements': '""'},
'S3Operations': {
'S3Upload': [{
'Path': '{{ entry_point }}',
'Bucket': 'output',
'Key': "sagemaker-tensorflow-{{ execution_date.strftime('%Y-%m-%d-%H-%M-%S') }}"
"/source/sourcedir.tar.gz",
'Tar': True}]
}
}
assert config == expected_config
def test_framework_training_config_all_args(sagemaker_session):
tf = tensorflow.TensorFlow(
entry_point="{{ entry_point }}",
source_dir="{{ source_dir }}",
enable_cloudwatch_metrics=False,
container_log_level="{{ log_level }}",
code_location="{{ bucket_name }}",
training_steps=1000,
evaluation_steps=100,
checkpoint_path="{{ checkpoint_path }}",
py_version='py2',
framework_version='1.10.0',
requirements_file="",
role="{{ role }}",
train_instance_count="{{ instance_count }}",
train_instance_type="ml.c4.2xlarge",
train_volume_size="{{ train_volume_size }}",
train_volume_kms_key="{{ train_volume_kms_key }}",
train_max_run="{{ train_max_run }}",
input_mode='Pipe',
output_path="{{ output_path }}",
output_kms_key="{{ output_volume_kms_key }}",
base_job_name="{{ base_job_name }}",
tags=[{"{{ key }}": "{{ value }}"}],
subnets=["{{ subnet }}"],
security_group_ids=["{{ security_group_ids }}"],
sagemaker_session=sagemaker_session)
data = "{{ training_data }}"
config = airflow.training_config(tf, data)
expected_config = {
'AlgorithmSpecification': {
'TrainingImage': '520713654638.dkr.ecr.us-west-2.amazonaws.com/sagemaker-tensorflow:1.10.0-cpu-py2',
'TrainingInputMode': 'Pipe'
},
'OutputDataConfig': {
'S3OutputPath': '{{ output_path }}',
'KmsKeyId': '{{ output_volume_kms_key }}'
},
'TrainingJobName': "{{ base_job_name }}-{{ execution_date.strftime('%Y-%m-%d-%H-%M-%S') }}",
'StoppingCondition': {
'MaxRuntimeInSeconds': '{{ train_max_run }}'
},
'ResourceConfig': {
'InstanceCount': '{{ instance_count }}',
'InstanceType': 'ml.c4.2xlarge',
'VolumeSizeInGB': '{{ train_volume_size }}',
'VolumeKmsKeyId': '{{ train_volume_kms_key }}'
},
'RoleArn': '{{ role }}',
'InputDataConfig': [{
'DataSource': {
'S3DataSource': {
'S3DataDistributionType': 'FullyReplicated',
'S3DataType': 'S3Prefix',
'S3Uri': '{{ training_data }}'
}
},
'ChannelName': 'training'
}],
'VpcConfig': {
'Subnets': ['{{ subnet }}'],
'SecurityGroupIds': ['{{ security_group_ids }}']
},
'HyperParameters': {
'sagemaker_submit_directory': '"s3://{{ bucket_name }}/{{ base_job_name }}-'
'{{ execution_date.strftime(\'%Y-%m-%d-%H-%M-%S\') }}'
'/source/sourcedir.tar.gz"',
'sagemaker_program': '"{{ entry_point }}"',
'sagemaker_enable_cloudwatch_metrics': 'false',
'sagemaker_container_log_level': '"{{ log_level }}"',
'sagemaker_job_name': '"{{ base_job_name }}-{{ execution_date.strftime(\'%Y-%m-%d-%H-%M-%S\') }}"',
'sagemaker_region': '"us-west-2"',
'checkpoint_path': '"{{ checkpoint_path }}"',
'training_steps': '1000',
'evaluation_steps': '100',
'sagemaker_requirements': '""'
},
'Tags': [{'{{ key }}': '{{ value }}'}],
'S3Operations': {
'S3Upload': [{
'Path': '{{ source_dir }}',
'Bucket': '{{ bucket_name }}',
'Key': "{{ base_job_name }}-{{ execution_date.strftime('%Y-%m-%d-%H-%M-%S') }}"
"/source/sourcedir.tar.gz",
'Tar': True}]
}
}
assert config == expected_config
def test_amazon_alg_training_config_required_args(sagemaker_session):
ntm_estimator = ntm.NTM(
role="{{ role }}",
num_topics=10,
train_instance_count="{{ instance_count }}",
train_instance_type="ml.c4.2xlarge",
sagemaker_session=sagemaker_session)
ntm_estimator.epochs = 32
ntm_estimator.mini_batch_size = 256
record = amazon_estimator.RecordSet("{{ record }}", 10000, 100, 'S3Prefix')
config = airflow.training_config(ntm_estimator, record)
expected_config = {
'AlgorithmSpecification': {
'TrainingImage': '174872318107.dkr.ecr.us-west-2.amazonaws.com/ntm:1',
'TrainingInputMode': 'File'
},
'OutputDataConfig': {
'S3OutputPath': 's3://output/'
},
'TrainingJobName': "ntm-{{ execution_date.strftime('%Y-%m-%d-%H-%M-%S') }}",
'StoppingCondition': {'MaxRuntimeInSeconds': 86400},
'ResourceConfig': {
'InstanceCount': '{{ instance_count }}',
'InstanceType': 'ml.c4.2xlarge',
'VolumeSizeInGB': 30
},
'RoleArn': '{{ role }}',
'InputDataConfig': [{
'DataSource': {
'S3DataSource': {
'S3DataDistributionType': 'ShardedByS3Key',
'S3DataType': 'S3Prefix',
'S3Uri': '{{ record }}'
}
},
'ChannelName': 'train'
}],
'HyperParameters': {
'num_topics': '10',
'epochs': '32',
'mini_batch_size': '256',
'feature_dim': '100'
}
}
assert config == expected_config
def test_amazon_alg_training_config_all_args(sagemaker_session):
ntm_estimator = ntm.NTM(
role="{{ role }}",
num_topics=10,
train_instance_count="{{ instance_count }}",
train_instance_type="ml.c4.2xlarge",
train_volume_size="{{ train_volume_size }}",
train_volume_kms_key="{{ train_volume_kms_key }}",
train_max_run="{{ train_max_run }}",
input_mode='Pipe',
output_path="{{ output_path }}",
output_kms_key="{{ output_volume_kms_key }}",
base_job_name="{{ base_job_name }}",
tags=[{"{{ key }}": "{{ value }}"}],
subnets=["{{ subnet }}"],
security_group_ids=["{{ security_group_ids }}"],
sagemaker_session=sagemaker_session)
ntm_estimator.epochs = 32
ntm_estimator.mini_batch_size = 256
record = amazon_estimator.RecordSet("{{ record }}", 10000, 100, 'S3Prefix')
config = airflow.training_config(ntm_estimator, record)
expected_config = {
'AlgorithmSpecification': {
'TrainingImage': '174872318107.dkr.ecr.us-west-2.amazonaws.com/ntm:1',
'TrainingInputMode': 'Pipe'
},
'OutputDataConfig': {
'S3OutputPath': '{{ output_path }}',
'KmsKeyId': '{{ output_volume_kms_key }}'
},
'TrainingJobName': "{{ base_job_name }}-{{ execution_date.strftime('%Y-%m-%d-%H-%M-%S') }}",
'StoppingCondition': {
'MaxRuntimeInSeconds': '{{ train_max_run }}'
},
'ResourceConfig': {
'InstanceCount': '{{ instance_count }}',
'InstanceType': 'ml.c4.2xlarge',
'VolumeSizeInGB': '{{ train_volume_size }}',
'VolumeKmsKeyId': '{{ train_volume_kms_key }}'
},
'RoleArn': '{{ role }}',
'InputDataConfig': [{
'DataSource': {
'S3DataSource': {
'S3DataDistributionType': 'ShardedByS3Key',
'S3DataType': 'S3Prefix',
'S3Uri': '{{ record }}'
}
},
'ChannelName': 'train'
}],
'VpcConfig': {
'Subnets': ['{{ subnet }}'],
'SecurityGroupIds': ['{{ security_group_ids }}']
},
'HyperParameters': {
'num_topics': '10',
'epochs': '32',
'mini_batch_size': '256',
'feature_dim': '100'
},
'Tags': [{'{{ key }}': '{{ value }}'}]
}
assert config == expected_config
| 36.986696
| 119
| 0.525448
| 1,406
| 16,681
| 5.949502
| 0.162162
| 0.038255
| 0.021518
| 0.03419
| 0.837418
| 0.819008
| 0.799402
| 0.763897
| 0.729348
| 0.719904
| 0
| 0.026639
| 0.315868
| 16,681
| 450
| 120
| 37.068889
| 0.706362
| 0.032672
| 0
| 0.708955
| 0
| 0.00995
| 0.399988
| 0.101718
| 0
| 0
| 0
| 0
| 0.014925
| 1
| 0.017413
| false
| 0
| 0.017413
| 0
| 0.037313
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5739da1479c92343073c87c405fa2c31d554fafb
| 60
|
py
|
Python
|
Web Based Solution/healingpaws.py
|
LeifYaoYuXiang/seproject_healingpaws
|
bac63011a669cba522a066cfdf233428e37443fc
|
[
"MIT"
] | null | null | null |
Web Based Solution/healingpaws.py
|
LeifYaoYuXiang/seproject_healingpaws
|
bac63011a669cba522a066cfdf233428e37443fc
|
[
"MIT"
] | null | null | null |
Web Based Solution/healingpaws.py
|
LeifYaoYuXiang/seproject_healingpaws
|
bac63011a669cba522a066cfdf233428e37443fc
|
[
"MIT"
] | null | null | null |
from hospitalapp import app
from hospitalapp import routes
| 15
| 30
| 0.85
| 8
| 60
| 6.375
| 0.625
| 0.588235
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 60
| 3
| 31
| 20
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
93ba2c2bee6e61a5f6c0696c4ef265075d74783e
| 12,110
|
py
|
Python
|
lang/python/github/com/metaprov/modelaapi/services/postmortem/v1/postmortem_pb2_grpc.py
|
metaprov/modeldapi
|
ee05693832051dcd990ee4f061715d7ae0787340
|
[
"Apache-2.0"
] | 5
|
2022-02-18T03:40:10.000Z
|
2022-03-01T16:11:24.000Z
|
lang/python/github/com/metaprov/modelaapi/services/postmortem/v1/postmortem_pb2_grpc.py
|
metaprov/modeldapi
|
ee05693832051dcd990ee4f061715d7ae0787340
|
[
"Apache-2.0"
] | 1
|
2022-01-07T19:59:25.000Z
|
2022-02-04T01:21:14.000Z
|
lang/python/github/com/metaprov/modelaapi/services/postmortem/v1/postmortem_pb2_grpc.py
|
metaprov/modeldapi
|
ee05693832051dcd990ee4f061715d7ae0787340
|
[
"Apache-2.0"
] | 1
|
2022-03-25T10:21:43.000Z
|
2022-03-25T10:21:43.000Z
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from github.com.metaprov.modelaapi.services.postmortem.v1 import postmortem_pb2 as github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2
class PostMortemServiceStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.ListPostMortems = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.postmortem.v1.PostMortemService/ListPostMortems',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.ListPostMortemsRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.ListPostMortemsResponse.FromString,
)
self.CreatePostMortem = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.postmortem.v1.PostMortemService/CreatePostMortem',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.CreatePostMortemRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.CreatePostMortemResponse.FromString,
)
self.GetPostMortem = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.postmortem.v1.PostMortemService/GetPostMortem',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.GetPostMortemRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.GetPostMortemResponse.FromString,
)
self.UpdatePostMortem = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.postmortem.v1.PostMortemService/UpdatePostMortem',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.UpdatePostMortemRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.UpdatePostMortemResponse.FromString,
)
self.DeletePostMortem = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.postmortem.v1.PostMortemService/DeletePostMortem',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.DeletePostMortemRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.DeletePostMortemResponse.FromString,
)
class PostMortemServiceServicer(object):
"""Missing associated documentation comment in .proto file."""
def ListPostMortems(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreatePostMortem(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetPostMortem(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdatePostMortem(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeletePostMortem(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_PostMortemServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'ListPostMortems': grpc.unary_unary_rpc_method_handler(
servicer.ListPostMortems,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.ListPostMortemsRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.ListPostMortemsResponse.SerializeToString,
),
'CreatePostMortem': grpc.unary_unary_rpc_method_handler(
servicer.CreatePostMortem,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.CreatePostMortemRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.CreatePostMortemResponse.SerializeToString,
),
'GetPostMortem': grpc.unary_unary_rpc_method_handler(
servicer.GetPostMortem,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.GetPostMortemRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.GetPostMortemResponse.SerializeToString,
),
'UpdatePostMortem': grpc.unary_unary_rpc_method_handler(
servicer.UpdatePostMortem,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.UpdatePostMortemRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.UpdatePostMortemResponse.SerializeToString,
),
'DeletePostMortem': grpc.unary_unary_rpc_method_handler(
servicer.DeletePostMortem,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.DeletePostMortemRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.DeletePostMortemResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'github.com.metaprov.modelaapi.services.postmortem.v1.PostMortemService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class PostMortemService(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def ListPostMortems(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.postmortem.v1.PostMortemService/ListPostMortems',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.ListPostMortemsRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.ListPostMortemsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreatePostMortem(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.postmortem.v1.PostMortemService/CreatePostMortem',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.CreatePostMortemRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.CreatePostMortemResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetPostMortem(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.postmortem.v1.PostMortemService/GetPostMortem',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.GetPostMortemRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.GetPostMortemResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdatePostMortem(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.postmortem.v1.PostMortemService/UpdatePostMortem',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.UpdatePostMortemRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.UpdatePostMortemResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeletePostMortem(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.postmortem.v1.PostMortemService/DeletePostMortem',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.DeletePostMortemRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_postmortem_dot_v1_dot_postmortem__pb2.DeletePostMortemResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 60.854271
| 180
| 0.750867
| 1,233
| 12,110
| 6.875101
| 0.089213
| 0.095081
| 0.043883
| 0.054854
| 0.884157
| 0.884157
| 0.884157
| 0.856317
| 0.842515
| 0.811136
| 0
| 0.007644
| 0.189761
| 12,110
| 198
| 181
| 61.161616
| 0.856298
| 0.056647
| 0
| 0.493827
| 1
| 0
| 0.109985
| 0.083018
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074074
| false
| 0
| 0.012346
| 0.030864
| 0.135802
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
93c402e1dc780d58cf7fc5287ed856d948c80640
| 111
|
py
|
Python
|
lib/utils/__init__.py
|
Razerl/TRN.pytorch
|
f6b9054f0ed80693b45a61066f9ab9a20cf0884e
|
[
"MIT"
] | 63
|
2019-11-20T00:28:43.000Z
|
2022-03-23T03:45:13.000Z
|
lib/utils/__init__.py
|
yuminko/TRN.pytorch
|
f2a8a1ff59679c6af58360066512e3e0b6926880
|
[
"MIT"
] | 17
|
2019-12-11T11:23:36.000Z
|
2022-03-13T08:13:31.000Z
|
lib/utils/__init__.py
|
yuminko/TRN.pytorch
|
f2a8a1ff59679c6af58360066512e3e0b6926880
|
[
"MIT"
] | 18
|
2019-12-24T06:49:54.000Z
|
2022-03-23T09:14:41.000Z
|
from .net_utils import *
from .eval_utils import *
from .logger import *
from .multicrossentropy_loss import *
| 22.2
| 37
| 0.783784
| 15
| 111
| 5.6
| 0.533333
| 0.357143
| 0.357143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144144
| 111
| 4
| 38
| 27.75
| 0.884211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
19564efbdcdd66588e98aa56174fcf66b1ed59b8
| 9,373
|
py
|
Python
|
wpa_project/student_app/tests/tests_student.py
|
s-amundson/wpa_2p1
|
43deb859123e5ef2eab3652e403c8d2f53d43b77
|
[
"MIT"
] | 1
|
2022-01-03T02:46:34.000Z
|
2022-01-03T02:46:34.000Z
|
wpa_project/student_app/tests/tests_student.py
|
s-amundson/wpa_2p1
|
43deb859123e5ef2eab3652e403c8d2f53d43b77
|
[
"MIT"
] | 31
|
2021-12-29T17:43:06.000Z
|
2022-03-25T01:03:17.000Z
|
wpa_project/student_app/tests/tests_student.py
|
s-amundson/wpa_2p1
|
43deb859123e5ef2eab3652e403c8d2f53d43b77
|
[
"MIT"
] | null | null | null |
import logging
import json
from django.core import mail
from django.test import TestCase, Client
from django.urls import reverse
from ..models import Student, User
logger = logging.getLogger(__name__)
class TestsStudentAPI(TestCase):
fixtures = ['f1']
def setUp(self):
# Every test needs a client.
self.client = Client()
def test_get_student(self):
self.test_user = User.objects.get(pk=1)
self.client.force_login(self.test_user)
response = self.client.get(reverse('registration:student_api'), secure=True)
self.assertEqual(response.status_code, 200)
d = { "first_name": "", "last_name": "", "dob": None}
content = json.loads(response.content)
logging.debug(content)
for k,v in d.items():
logging.debug(k)
self.assertEqual(content[k], v)
def test_get_student_id(self):
self.test_user = User.objects.get(pk=1)
self.client.force_login(self.test_user)
response = self.client.get(reverse('registration:student_api', kwargs={'student_id': 1}), secure=True)
self.assertEqual(response.status_code, 200)
d = { "first_name": "Emily", "last_name": "Conlan", "dob": "1995-12-03"}
content = json.loads(response.content)
for k,v in d.items():
self.assertEqual(content[k], v)
def test_get_student_id_not_authorized(self):
self.test_user = User.objects.get(pk=4)
self.client.force_login(self.test_user)
response = self.client.get(reverse('registration:student_api', kwargs={'student_id': 1}), secure=True)
self.assertEqual(response.status_code, 400)
content = json.loads(response.content)
logging.debug(content)
self.assertEqual(content['error'], 'Not Authorized')
def test_post_add_student(self):
self.test_user = User.objects.get(pk=4)
self.client.force_login(self.test_user)
d = {"first_name": "Kiley", "last_name": "Conlan", "dob": "1995-12-03"}
response = self.client.post(reverse('registration:student_api'), d, secure=True)
student = Student.objects.last()
self.assertEqual(student.first_name, d['first_name'])
self.assertEqual(student.last_name, d['last_name'])
def test_post_student_id(self):
self.test_user = User.objects.get(pk=5)
self.client.force_login(self.test_user)
d = {"first_name": "Kiley", "last_name": "Conlan", "dob": "1995-12-03"}
response = self.client.post(reverse('registration:student_api', kwargs={'student_id': 6}), d, secure=True)
student = Student.objects.get(pk=6)
self.assertEqual(student.first_name, d['first_name'])
self.assertEqual(student.last_name, d['last_name'])
def test_post_student_id_staff(self):
self.test_user = User.objects.get(pk=1)
self.client.force_login(self.test_user)
d = {"first_name": "Kiley", "last_name": "Conlan", "dob": "1995-12-03"}
response = self.client.post(reverse('registration:student_api', kwargs={'student_id': 6}), d, secure=True)
student = Student.objects.get(pk=6)
self.assertEqual(student.first_name, d['first_name'])
self.assertEqual(student.last_name, d['last_name'])
def test_post_student_id_invalid(self):
self.test_user = User.objects.get(pk=4)
self.client.force_login(self.test_user)
d = {"first_name": "Kiley", "last_name": "Wells", "dob": "1995-12-03"}
response = self.client.post(reverse('registration:student_api', kwargs={'student_id': 1}), d, secure=True)
self.assertEqual(response.status_code, 404)
student = Student.objects.get(pk=1)
self.assertNotEqual(student.first_name, d['first_name'])
self.assertNotEqual(student.last_name, d['last_name'])
def test_post_student_errors(self):
self.test_user = User.objects.get(pk=4)
self.client.force_login(self.test_user)
d = {"first_name": "Kiley", "last_name": "Wells", }
response = self.client.post(reverse('registration:student_api'), d, secure=True)
self.assertEqual(response.status_code, 200)
content = json.loads(response.content)
logging.debug(content)
self.assertEqual(content['error'], {'dob': ['This field is required.']})
student = Student.objects.all()
self.assertEqual(len(student), 6)
def test_new_user_new_student(self):
u = User(username='testuser', email='test@example.com', password='password')
u.save()
self.client.force_login(u)
d = {"first_name": "Kiley", "last_name": "Conlan", "dob": "1995-12-03", "email": "test@example.com"}
response = self.client.post(reverse('registration:student_api'), d, secure=True)
student = Student.objects.last()
self.assertEqual(student.first_name, d['first_name'])
self.assertEqual(student.last_name, d['last_name'])
self.assertEqual(student.email, 'test@example.com')
def test_new_user_existing_student(self):
self.test_user = User.objects.get(pk=5)
self.client.force_login(self.test_user)
d = {"first_name": "Kiley", "last_name": "Conlan", "dob": "1995-12-03", "email": "test@example.com"}
response = self.client.post(reverse('registration:student_api'), d, secure=True)
student = Student.objects.last()
self.assertEqual(student.first_name, d['first_name'])
self.assertEqual(student.last_name, d['last_name'])
self.assertEqual(student.email, 'test@example.com')
self.assertEqual(mail.outbox[0].subject, 'Woodley Park Archers Invitation')
class TestsStudent(TestCase):
fixtures = ['f1']
def setUp(self):
# Every test needs a client.
self.client = Client()
self.test_user = User.objects.get(pk=1)
self.client.force_login(self.test_user)
def test_get_student(self):
response = self.client.get(reverse('registration:add_student'), secure=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed('student_app/forms/student.html')
def test_get_student_id(self):
response = self.client.get(reverse('registration:add_student', kwargs={'student_id': 1}), secure=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed('student_app/forms/student.html')
def test_get_student_id_not_authorized(self):
self.test_user = User.objects.get(pk=4)
self.client.force_login(self.test_user)
response = self.client.get(reverse('registration:add_student', kwargs={'student_id': 1}), secure=True)
self.assertEqual(response.status_code, 404)
def test_post_add_student(self):
self.test_user = User.objects.get(pk=4)
self.client.force_login(self.test_user)
d = {"first_name": "Kiley", "last_name": "Conlan", "dob": "1995-12-03"}
response = self.client.post(reverse('registration:add_student'), d, secure=True)
logging.debug(len(Student.objects.all()))
student = Student.objects.get(pk=7)
self.assertEqual(student.first_name, d['first_name'])
self.assertEqual(student.last_name, d['last_name'])
def test_post_student_id(self):
self.test_user = User.objects.get(pk=5)
self.client.force_login(self.test_user)
d = {"first_name": "Kiley", "last_name": "Conlan", "dob": "1995-12-03"}
response = self.client.post(reverse('registration:add_student', kwargs={'student_id': 6}), d, secure=True)
student = Student.objects.get(pk=6)
self.assertEqual(student.first_name, d['first_name'])
self.assertEqual(student.last_name, d['last_name'])
def test_post_student_id_staff(self):
self.test_user = User.objects.get(pk=1)
self.client.force_login(self.test_user)
d = {"first_name": "Kiley", "last_name": "Conlan", "dob": "1995-12-03"}
response = self.client.post(reverse('registration:add_student', kwargs={'student_id': 6}), d, secure=True)
student = Student.objects.get(pk=6)
self.assertEqual(student.first_name, d['first_name'])
self.assertEqual(student.last_name, d['last_name'])
def test_post_student_id_invalid(self):
self.test_user = User.objects.get(pk=4)
self.client.force_login(self.test_user)
d = {"first_name": "Kiley", "last_name": "Wells", "dob": "1995-12-03"}
response = self.client.post(reverse('registration:add_student', kwargs={'student_id': 1}), d, secure=True)
self.assertEqual(response.status_code, 404)
student = Student.objects.get(pk=1)
self.assertNotEqual(student.first_name, d['first_name'])
self.assertNotEqual(student.last_name, d['last_name'])
def test_post_student_errors(self):
self.test_user = User.objects.get(pk=4)
self.client.force_login(self.test_user)
d = {"first_name": "Kiley", "last_name": "Wells", }
response = self.client.post(reverse('registration:add_student'), d, secure=True)
self.assertEqual(response.status_code, 200)
student = Student.objects.all()
self.assertEqual(len(student), 6)
def test_get_student_table(self):
response = self.client.get(reverse('registration:student_table'), secure=True)
self.assertEqual(response.status_code, 200)
| 47.338384
| 114
| 0.663822
| 1,243
| 9,373
| 4.822204
| 0.087691
| 0.063397
| 0.064064
| 0.056723
| 0.915249
| 0.902402
| 0.902402
| 0.884218
| 0.867701
| 0.843677
| 0
| 0.02091
| 0.188734
| 9,373
| 197
| 115
| 47.57868
| 0.767359
| 0.005655
| 0
| 0.826347
| 0
| 0
| 0.16638
| 0.055603
| 0
| 0
| 0
| 0
| 0.251497
| 1
| 0.125749
| false
| 0.005988
| 0.035928
| 0
| 0.185629
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
19708da9c4242e70678a46b5107096dce2b3122e
| 6,821
|
py
|
Python
|
loldib/getratings/models/NA/na_renekton/na_renekton_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_renekton/na_renekton_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_renekton/na_renekton_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Renekton_Jng_Aatrox(Ratings):
pass
class NA_Renekton_Jng_Ahri(Ratings):
pass
class NA_Renekton_Jng_Akali(Ratings):
pass
class NA_Renekton_Jng_Alistar(Ratings):
pass
class NA_Renekton_Jng_Amumu(Ratings):
pass
class NA_Renekton_Jng_Anivia(Ratings):
pass
class NA_Renekton_Jng_Annie(Ratings):
pass
class NA_Renekton_Jng_Ashe(Ratings):
pass
class NA_Renekton_Jng_AurelionSol(Ratings):
pass
class NA_Renekton_Jng_Azir(Ratings):
pass
class NA_Renekton_Jng_Bard(Ratings):
pass
class NA_Renekton_Jng_Blitzcrank(Ratings):
pass
class NA_Renekton_Jng_Brand(Ratings):
pass
class NA_Renekton_Jng_Braum(Ratings):
pass
class NA_Renekton_Jng_Caitlyn(Ratings):
pass
class NA_Renekton_Jng_Camille(Ratings):
pass
class NA_Renekton_Jng_Cassiopeia(Ratings):
pass
class NA_Renekton_Jng_Chogath(Ratings):
pass
class NA_Renekton_Jng_Corki(Ratings):
pass
class NA_Renekton_Jng_Darius(Ratings):
pass
class NA_Renekton_Jng_Diana(Ratings):
pass
class NA_Renekton_Jng_Draven(Ratings):
pass
class NA_Renekton_Jng_DrMundo(Ratings):
pass
class NA_Renekton_Jng_Ekko(Ratings):
pass
class NA_Renekton_Jng_Elise(Ratings):
pass
class NA_Renekton_Jng_Evelynn(Ratings):
pass
class NA_Renekton_Jng_Ezreal(Ratings):
pass
class NA_Renekton_Jng_Fiddlesticks(Ratings):
pass
class NA_Renekton_Jng_Fiora(Ratings):
pass
class NA_Renekton_Jng_Fizz(Ratings):
pass
class NA_Renekton_Jng_Galio(Ratings):
pass
class NA_Renekton_Jng_Gangplank(Ratings):
pass
class NA_Renekton_Jng_Garen(Ratings):
pass
class NA_Renekton_Jng_Gnar(Ratings):
pass
class NA_Renekton_Jng_Gragas(Ratings):
pass
class NA_Renekton_Jng_Graves(Ratings):
pass
class NA_Renekton_Jng_Hecarim(Ratings):
pass
class NA_Renekton_Jng_Heimerdinger(Ratings):
pass
class NA_Renekton_Jng_Illaoi(Ratings):
pass
class NA_Renekton_Jng_Irelia(Ratings):
pass
class NA_Renekton_Jng_Ivern(Ratings):
pass
class NA_Renekton_Jng_Janna(Ratings):
pass
class NA_Renekton_Jng_JarvanIV(Ratings):
pass
class NA_Renekton_Jng_Jax(Ratings):
pass
class NA_Renekton_Jng_Jayce(Ratings):
pass
class NA_Renekton_Jng_Jhin(Ratings):
pass
class NA_Renekton_Jng_Jinx(Ratings):
pass
class NA_Renekton_Jng_Kalista(Ratings):
pass
class NA_Renekton_Jng_Karma(Ratings):
pass
class NA_Renekton_Jng_Karthus(Ratings):
pass
class NA_Renekton_Jng_Kassadin(Ratings):
pass
class NA_Renekton_Jng_Katarina(Ratings):
pass
class NA_Renekton_Jng_Kayle(Ratings):
pass
class NA_Renekton_Jng_Kayn(Ratings):
pass
class NA_Renekton_Jng_Kennen(Ratings):
pass
class NA_Renekton_Jng_Khazix(Ratings):
pass
class NA_Renekton_Jng_Kindred(Ratings):
pass
class NA_Renekton_Jng_Kled(Ratings):
pass
class NA_Renekton_Jng_KogMaw(Ratings):
pass
class NA_Renekton_Jng_Leblanc(Ratings):
pass
class NA_Renekton_Jng_LeeSin(Ratings):
pass
class NA_Renekton_Jng_Leona(Ratings):
pass
class NA_Renekton_Jng_Lissandra(Ratings):
pass
class NA_Renekton_Jng_Lucian(Ratings):
pass
class NA_Renekton_Jng_Lulu(Ratings):
pass
class NA_Renekton_Jng_Lux(Ratings):
pass
class NA_Renekton_Jng_Malphite(Ratings):
pass
class NA_Renekton_Jng_Malzahar(Ratings):
pass
class NA_Renekton_Jng_Maokai(Ratings):
pass
class NA_Renekton_Jng_MasterYi(Ratings):
pass
class NA_Renekton_Jng_MissFortune(Ratings):
pass
class NA_Renekton_Jng_MonkeyKing(Ratings):
pass
class NA_Renekton_Jng_Mordekaiser(Ratings):
pass
class NA_Renekton_Jng_Morgana(Ratings):
pass
class NA_Renekton_Jng_Nami(Ratings):
pass
class NA_Renekton_Jng_Nasus(Ratings):
pass
class NA_Renekton_Jng_Nautilus(Ratings):
pass
class NA_Renekton_Jng_Nidalee(Ratings):
pass
class NA_Renekton_Jng_Nocturne(Ratings):
pass
class NA_Renekton_Jng_Nunu(Ratings):
pass
class NA_Renekton_Jng_Olaf(Ratings):
pass
class NA_Renekton_Jng_Orianna(Ratings):
pass
class NA_Renekton_Jng_Ornn(Ratings):
pass
class NA_Renekton_Jng_Pantheon(Ratings):
pass
class NA_Renekton_Jng_Poppy(Ratings):
pass
class NA_Renekton_Jng_Quinn(Ratings):
pass
class NA_Renekton_Jng_Rakan(Ratings):
pass
class NA_Renekton_Jng_Rammus(Ratings):
pass
class NA_Renekton_Jng_RekSai(Ratings):
pass
class NA_Renekton_Jng_Renekton(Ratings):
pass
class NA_Renekton_Jng_Rengar(Ratings):
pass
class NA_Renekton_Jng_Riven(Ratings):
pass
class NA_Renekton_Jng_Rumble(Ratings):
pass
class NA_Renekton_Jng_Ryze(Ratings):
pass
class NA_Renekton_Jng_Sejuani(Ratings):
pass
class NA_Renekton_Jng_Shaco(Ratings):
pass
class NA_Renekton_Jng_Shen(Ratings):
pass
class NA_Renekton_Jng_Shyvana(Ratings):
pass
class NA_Renekton_Jng_Singed(Ratings):
pass
class NA_Renekton_Jng_Sion(Ratings):
pass
class NA_Renekton_Jng_Sivir(Ratings):
pass
class NA_Renekton_Jng_Skarner(Ratings):
pass
class NA_Renekton_Jng_Sona(Ratings):
pass
class NA_Renekton_Jng_Soraka(Ratings):
pass
class NA_Renekton_Jng_Swain(Ratings):
pass
class NA_Renekton_Jng_Syndra(Ratings):
pass
class NA_Renekton_Jng_TahmKench(Ratings):
pass
class NA_Renekton_Jng_Taliyah(Ratings):
pass
class NA_Renekton_Jng_Talon(Ratings):
pass
class NA_Renekton_Jng_Taric(Ratings):
pass
class NA_Renekton_Jng_Teemo(Ratings):
pass
class NA_Renekton_Jng_Thresh(Ratings):
pass
class NA_Renekton_Jng_Tristana(Ratings):
pass
class NA_Renekton_Jng_Trundle(Ratings):
pass
class NA_Renekton_Jng_Tryndamere(Ratings):
pass
class NA_Renekton_Jng_TwistedFate(Ratings):
pass
class NA_Renekton_Jng_Twitch(Ratings):
pass
class NA_Renekton_Jng_Udyr(Ratings):
pass
class NA_Renekton_Jng_Urgot(Ratings):
pass
class NA_Renekton_Jng_Varus(Ratings):
pass
class NA_Renekton_Jng_Vayne(Ratings):
pass
class NA_Renekton_Jng_Veigar(Ratings):
pass
class NA_Renekton_Jng_Velkoz(Ratings):
pass
class NA_Renekton_Jng_Vi(Ratings):
pass
class NA_Renekton_Jng_Viktor(Ratings):
pass
class NA_Renekton_Jng_Vladimir(Ratings):
pass
class NA_Renekton_Jng_Volibear(Ratings):
pass
class NA_Renekton_Jng_Warwick(Ratings):
pass
class NA_Renekton_Jng_Xayah(Ratings):
pass
class NA_Renekton_Jng_Xerath(Ratings):
pass
class NA_Renekton_Jng_XinZhao(Ratings):
pass
class NA_Renekton_Jng_Yasuo(Ratings):
pass
class NA_Renekton_Jng_Yorick(Ratings):
pass
class NA_Renekton_Jng_Zac(Ratings):
pass
class NA_Renekton_Jng_Zed(Ratings):
pass
class NA_Renekton_Jng_Ziggs(Ratings):
pass
class NA_Renekton_Jng_Zilean(Ratings):
pass
class NA_Renekton_Jng_Zyra(Ratings):
pass
| 16.357314
| 46
| 0.776133
| 972
| 6,821
| 5.020576
| 0.151235
| 0.197951
| 0.42418
| 0.509016
| 0.814139
| 0.814139
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162879
| 6,821
| 416
| 47
| 16.396635
| 0.854641
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
5fe48899348b7cdc57e92c266b467689d19b8f1b
| 9,460
|
py
|
Python
|
dsc_erpnext/dsc_erpnext/doctype/digital_signature_document/digital_signature_document.py
|
8848digital/dsc-erpnext
|
b82c01d4d21a376ad79ba8c0de95eb521859ddf2
|
[
"MIT"
] | null | null | null |
dsc_erpnext/dsc_erpnext/doctype/digital_signature_document/digital_signature_document.py
|
8848digital/dsc-erpnext
|
b82c01d4d21a376ad79ba8c0de95eb521859ddf2
|
[
"MIT"
] | null | null | null |
dsc_erpnext/dsc_erpnext/doctype/digital_signature_document/digital_signature_document.py
|
8848digital/dsc-erpnext
|
b82c01d4d21a376ad79ba8c0de95eb521859ddf2
|
[
"MIT"
] | 1
|
2021-12-28T18:35:54.000Z
|
2021-12-28T18:35:54.000Z
|
# Copyright (c) 2021, Nirali Satapara and contributors
# For license information, please see license.txt
import frappe
from frappe.model.document import Document
class DigitalSignatureDocument(Document):
def validate(self):
if not self.workflow and not self.do_not_create_workflow:
self.create_workflow()
def create_workflow(self):
workflow_state_list = ["DSC 1 Completed","DSC 2 Completed","DSC 3 Completed","DSC Completed"]
workflow_internal_list = ["DSC 1 Signing","DSC 2 Signing","DSC 3 Signing","DSC Signing"]
workflow_action_list = ["DSC 1","DSC 2","DSC 3","DSC 4"]
default_workflow_state = ["DSC Signing","DSC Completed","Submitted","Cancelled"]
for state in default_workflow_state:
if not frappe.db.exists("Workflow State",state):
state_doc = frappe.new_doc("Workflow State")
state_doc.workflow_state_name = state
state_doc.save()
if not frappe.db.exists("Workflow Action Master","Cancel"):
action_doc = frappe.new_doc("Workflow Action Master")
action_doc.workflow_action_name = "Cancel"
action_doc.save()
if not self.workflow and frappe.db.exists("Workflow",f"DSC {self.document_type}"):
self.workflow= f"DSC {self.document_type}"
if not self.role_1 and not self.role_2 and not self.role_3 and not self.role_4:
frappe.throw(f"Row {self.idx} : Please select role to create workflow")
if self.document_type and not frappe.db.exists("Workflow",f"DSC {self.document_type}"):
workflow = frappe.new_doc("Workflow")
workflow.workflow_name = f"DSC {self.document_type}"
workflow.document_type = "Digital Signature"
workflow.sent_email_alert = 0
if self.role_1:
next_state = next_internal_state = workflow_internal_list[0]
if not frappe.db.exists("Workflow State",workflow_state_list[0]):
state_doc = frappe.new_doc("Workflow State")
state_doc.workflow_state_name = workflow_state_list[0]
state_doc.save()
if not frappe.db.exists("Workflow State",workflow_internal_list[0]):
state_doc = frappe.new_doc("Workflow State")
state_doc.workflow_state_name = workflow_internal_list[0]
state_doc.save()
if not frappe.db.exists("Workflow Action Master",workflow_action_list[0]):
action_doc = frappe.new_doc("Workflow Action Master")
action_doc.workflow_action_name = workflow_action_list[0]
action_doc.save()
workflow.append("states",{
"state": "Submitted",
"doc_status": 1,
"allow_edit": self.role_1
})
workflow.append("transitions",{
"state": "Submitted",
"action": workflow_action_list[0],
"next_state": next_internal_state,
"allowed": self.role_1
})
workflow.append("transitions",{
"state": "Submitted",
"action": "Cancel",
"next_state": "Cancelled",
"allowed": self.role_1
})
if not self.role_2 and not self.role_3 and not self.role_4:
next_state = "DSC Completed"
next_internal_state = "DSC Signing"
workflow.append("states",{
"state": "DSC Signing",
"doc_status": 1,
"allow_edit": self.role_1
})
workflow.append("states",{
"state": "DSC Completed",
"doc_status": 1,
"allow_edit": self.role_1
})
workflow.append("transitions",{
"state": next_state,
"action": "Cancel",
"next_state": 'Cancelled',
"allowed": self.role_1
})
else:
workflow.append("states",{
"state": workflow_internal_list[0],
"doc_status": 1,
"allow_edit": self.role_1
})
workflow.append("states",{
"state": workflow_state_list[0],
"doc_status": 1,
"allow_edit": self.role_1
})
workflow.append("states",{
"state": "Cancelled",
"doc_status": 2,
"allow_edit": self.role_1
})
if self.role_2:
next_state = next_internal_state = workflow_internal_list[1]
if not self.role_3 and not self.role_4:
next_state = "DSC Completed"
next_internal_state = "DSC Signing"
workflow.append("states",{
"state": "DSC Signing",
"doc_status": 1,
"allow_edit": self.role_2
})
workflow.append("states",{
"state": "DSC Completed",
"doc_status": 1,
"allow_edit": self.role_2
})
workflow.append("transitions",{
"state": next_state,
"action": "Cancel",
"next_state": 'Cancelled',
"allowed": self.role_2
})
else:
workflow.append("states",{
"state": workflow_internal_list[1],
"doc_status": 1,
"allow_edit": self.role_2
})
workflow.append("states",{
"state": workflow_state_list[1],
"doc_status": 1,
"allow_edit": self.role_2
})
workflow.append("states",{
"state": "Cancelled",
"doc_status": 2,
"allow_edit": self.role_2
})
if not frappe.db.exists("Workflow State",workflow_state_list[1]):
state_doc = frappe.new_doc("Workflow State")
state_doc.workflow_state_name = workflow_state_list[1]
state_doc.save()
if not frappe.db.exists("Workflow State",workflow_internal_list[1]):
state_doc = frappe.new_doc("Workflow State")
state_doc.workflow_state_name = workflow_internal_list[1]
state_doc.save()
if not frappe.db.exists("Workflow Action Master",workflow_action_list[1]):
action_doc = frappe.new_doc("Workflow Action Master")
action_doc.workflow_action_name = workflow_action_list[1]
action_doc.save()
workflow.append("transitions",{
"state": workflow_state_list[0],
"action": workflow_action_list[1],
"next_state": next_internal_state,
"allowed": self.role_2
})
workflow.append("transitions",{
"state": workflow_state_list[0],
"action": "Cancel",
"next_state": "Cancelled",
"allowed": self.role_2
})
if self.role_3:
next_state = next_internal_state = workflow_internal_list[2]
if not self.role_4:
next_state = "DSC Completed"
next_internal_state = "DSC Signing"
workflow.append("states",{
"state": "DSC Signing",
"doc_status": 1,
"allow_edit": self.role_3
})
workflow.append("states",{
"state": "DSC Completed",
"doc_status": 1,
"allow_edit": self.role_3
})
workflow.append("transitions",{
"state": next_state,
"action": "Cancel",
"next_state": 'Cancelled',
"allowed": self.role_3
})
else:
workflow.append("states",{
"state": workflow_internal_list[2],
"doc_status": 1,
"allow_edit": self.role_3
})
workflow.append("states",{
"state": workflow_state_list[2],
"doc_status": 1,
"allow_edit": self.role_3
})
workflow.append("states",{
"state": "Cancelled",
"doc_status": 2,
"allow_edit": self.role_3
})
if not frappe.db.exists("Workflow State",workflow_state_list[2]):
state_doc = frappe.new_doc("Workflow State")
state_doc.workflow_state_name = workflow_state_list[2]
state_doc.save()
if not frappe.db.exists("Workflow State",workflow_internal_list[2]):
state_doc = frappe.new_doc("Workflow State")
state_doc.workflow_state_name = workflow_internal_list[2]
state_doc.save()
if not frappe.db.exists("Workflow Action Master",workflow_action_list[2]):
action_doc = frappe.new_doc("Workflow Action Master")
action_doc.workflow_action_name = workflow_action_list[2]
action_doc.save()
workflow.append("transitions",{
"state": workflow_state_list[1],
"action": workflow_action_list[2],
"next_state":next_internal_state,
"allowed": self.role_3
})
workflow.append("transitions",{
"state": workflow_state_list[1],
"action": "Cancel",
"next_state": "Cancelled",
"allowed": self.role_3
})
if self.role_4:
next_state = next_internal_state = workflow_internal_list[3]
workflow.append("states",{
"state": "DSC Signing",
"doc_status": 1,
"allow_edit": self.role_4
})
workflow.append("states",{
"state": "DSC Completed",
"doc_status": 1,
"allow_edit": self.role_4
})
workflow.append("transitions",{
"state": next_state,
"action": "Cancel",
"next_state": 'Cancelled',
"allowed": self.role_4
})
if not frappe.db.exists("Workflow State",workflow_state_list[3]):
state_doc = frappe.new_doc("Workflow State")
state_doc.workflow_state_name = workflow_state_list[3]
state_doc.save()
if not frappe.db.exists("Workflow State",workflow_internal_list[3]):
state_doc = frappe.new_doc("Workflow State")
state_doc.workflow_state_name = workflow_internal_list[3]
state_doc.save()
if not frappe.db.exists("Workflow Action Master",workflow_action_list[3]):
action_doc = frappe.new_doc("Workflow Action Master")
action_doc.workflow_action_name = workflow_action_list[3]
action_doc.save()
workflow.append("states",{
"state": "Cancelled",
"doc_status": 2,
"allow_edit": self.role_4
})
workflow.append("transitions",{
"state": workflow_state_list[2],
"action": workflow_action_list[3],
"next_state": next_internal_state,
"allowed": self.role_4
})
workflow.append("transitions",{
"state": workflow_state_list[2],
"action": "Cancel",
"next_state": "Cancelled",
"allowed": self.role_4
})
workflow.save(ignore_permissions=True)
self.workflow = workflow.name
| 32.176871
| 95
| 0.655497
| 1,213
| 9,460
| 4.843364
| 0.063479
| 0.104
| 0.064681
| 0.080851
| 0.84834
| 0.836426
| 0.822979
| 0.801872
| 0.722383
| 0.64766
| 0
| 0.01642
| 0.20814
| 9,460
| 293
| 96
| 32.286689
| 0.767855
| 0.010571
| 0
| 0.740602
| 0
| 0
| 0.240782
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.007519
| false
| 0
| 0.007519
| 0
| 0.018797
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5ffc0dad45d2cced2bad6ff897912db9eef7f593
| 7,298
|
py
|
Python
|
landlab/io/netcdf/tests/test_write_raster_netcdf.py
|
saraahsimon/landlab
|
1cf809b685efbccaaa149b5899a600c3ccedf30f
|
[
"MIT"
] | null | null | null |
landlab/io/netcdf/tests/test_write_raster_netcdf.py
|
saraahsimon/landlab
|
1cf809b685efbccaaa149b5899a600c3ccedf30f
|
[
"MIT"
] | null | null | null |
landlab/io/netcdf/tests/test_write_raster_netcdf.py
|
saraahsimon/landlab
|
1cf809b685efbccaaa149b5899a600c3ccedf30f
|
[
"MIT"
] | null | null | null |
import numpy as np
from numpy.testing import assert_array_equal
from landlab import RasterModelGrid
from landlab.io.netcdf import write_raster_netcdf
try:
import netCDF4 as nc
except ImportError:
pass
def test_append_with_time(tmpdir):
field = RasterModelGrid(4, 3)
field.add_field("node", "topographic__elevation", np.ones(12, dtype=np.int64))
with tmpdir.as_cwd():
write_raster_netcdf("test.nc", field, append=False, format="NETCDF4", time=0)
field.at_node["topographic__elevation"] *= 2
write_raster_netcdf("test.nc", field, append=True, format="NETCDF4", time=1.)
root = nc.Dataset("test.nc", "r", format="NETCDF4")
for name in ["topographic__elevation"]:
assert name in root.variables
assert_array_equal(
root.variables[name][:],
[
[[1, 1, 1], [1, 1, 1], [1, 1, 1], [1, 1, 1]],
[[2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2]],
],
)
assert root.variables[name][:].dtype == "int64"
assert "nt" in root.dimensions
assert len(root.dimensions["nt"]) == 2
assert "t" in root.variables
assert_array_equal(root.variables["t"][:], [0., 1.])
root.close()
def test_without_time(tmpdir):
field = RasterModelGrid(4, 3)
field.add_field("node", "topographic__elevation", np.ones(12, dtype=np.int64))
with tmpdir.as_cwd():
write_raster_netcdf("test.nc", field, append=False, format="NETCDF4")
root = nc.Dataset("test.nc", "r", format="NETCDF4")
for name in ["topographic__elevation"]:
assert name in root.variables
assert_array_equal(
root.variables[name][:], [[[1, 1, 1], [1, 1, 1], [1, 1, 1], [1, 1, 1]]]
)
assert root.variables[name][:].dtype == "int64"
assert "nt" in root.dimensions
assert len(root.dimensions["nt"]) == 1
assert "t" not in root.variables
root.close()
def test_with_time(tmpdir):
field = RasterModelGrid(4, 3)
field.add_field("node", "topographic__elevation", np.ones(12, dtype=np.int64))
with tmpdir.as_cwd():
write_raster_netcdf("test.nc", field, append=False, format="NETCDF4", time=0.)
root = nc.Dataset("test.nc", "r", format="NETCDF4")
for name in ["topographic__elevation"]:
assert name in root.variables
assert_array_equal(
root.variables[name][:], [[[1, 1, 1], [1, 1, 1], [1, 1, 1], [1, 1, 1]]]
)
assert root.variables[name][:].dtype == "int64"
assert "nt" in root.dimensions
assert len(root.dimensions["nt"]) == 1
assert "t" in root.variables
assert_array_equal(root.variables["t"][:], [0.])
root.close()
def test_with_time_netcdf3(tmpdir):
field = RasterModelGrid((4, 3))
field.add_field("node", "topographic__elevation", 2. * np.arange(12.))
field.add_field("node", "uplift_rate", 2. * np.arange(12.))
with tmpdir.as_cwd():
write_raster_netcdf("test.nc", field, format="NETCDF3_64BIT", time=10.)
root = nc.Dataset("test.nc", "r", format="NETCDF3_64BIT")
for name in ["uplift_rate", "topographic__elevation"]:
assert name in root.variables
assert_array_equal(
root.variables[name][:],
[[[0., 2., 4.], [6., 8., 10.], [12., 14., 16.], [18., 20., 22.]]],
)
assert "nt" in root.dimensions
assert len(root.dimensions["nt"]) == 1
assert "t" in root.variables
assert_array_equal(root.variables["t"][:], [10.])
root.close()
def test_append_with_time_netcdf3(tmpdir):
field = RasterModelGrid((4, 3))
field.add_field("topographic__elevation", np.ones(12), at="node")
with tmpdir.as_cwd():
write_raster_netcdf(
"test.nc", field, append=False, format="NETCDF3_64BIT", time=0
)
field.at_node["topographic__elevation"] *= 2
write_raster_netcdf(
"test.nc", field, append=True, format="NETCDF3_64BIT", time=1.
)
root = nc.Dataset("test.nc", "r", format="NETCDF3_64BIT")
for name in ["topographic__elevation"]:
assert name in root.variables
assert_array_equal(
root.variables[name][:],
[
[[1, 1, 1], [1, 1, 1], [1, 1, 1], [1, 1, 1]],
[[2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2]],
],
)
assert "nt" in root.dimensions
assert len(root.dimensions["nt"]) == 2
assert "t" in root.variables
assert_array_equal(root.variables["t"][:], [0., 1.])
root.close()
def test_append_without_time_netcdf3(tmpdir):
field = RasterModelGrid((4, 3))
field.add_field("topographic__elevation", np.ones(12), at="node")
with tmpdir.as_cwd():
write_raster_netcdf("test.nc", field, append=False, format="NETCDF3_64BIT")
field.at_node["topographic__elevation"] *= 2
write_raster_netcdf("test.nc", field, append=True, format="NETCDF3_64BIT")
root = nc.Dataset("test.nc", "r", format="NETCDF3_64BIT")
for name in ["topographic__elevation"]:
assert name in root.variables
assert_array_equal(
root.variables[name][:], [[[2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2]]]
)
assert "nt" in root.dimensions
assert len(root.dimensions["nt"]) == 1
assert "t" not in root.variables
root.close()
def test_without_time_netcdf3(tmpdir):
field = RasterModelGrid((4, 3))
field.add_field("node", "topographic__elevation", 2. * np.arange(12.))
field.add_field("node", "uplift_rate", 2. * np.arange(12.))
with tmpdir.as_cwd():
write_raster_netcdf("test.nc", field, format="NETCDF3_64BIT")
root = nc.Dataset("test.nc", "r", format="NETCDF3_64BIT")
for name in ["uplift_rate", "topographic__elevation"]:
assert name in root.variables
assert_array_equal(
root.variables[name][:],
[[[0., 2., 4.], [6., 8., 10.], [12., 14., 16.], [18., 20., 22.]]],
)
assert "nt" in root.dimensions
assert len(root.dimensions["nt"]) == 1
assert "t" not in root.variables
root.close()
def test_names_keyword(tmpdir):
field = RasterModelGrid((4, 3))
field.add_field("node", "topographic__elevation", np.arange(12.))
field.add_field("node", "uplift_rate", 2. * np.arange(12.))
with tmpdir.as_cwd():
write_raster_netcdf(
"test.nc", field, format="NETCDF3_64BIT", names="uplift_rate"
)
root = nc.Dataset("test.nc", "r", format="NETCDF3_64BIT")
assert "topographic__elevation" not in root.variables
assert "uplift_rate" in root.variables
assert_array_equal(
root.variables["uplift_rate"][:],
[[[0., 2., 4.], [6., 8., 10.], [12., 14., 16.], [18., 20., 22.]]],
)
assert "nt" in root.dimensions
assert len(root.dimensions["nt"]) == 1
root.close()
| 33.324201
| 87
| 0.563579
| 916
| 7,298
| 4.323144
| 0.081878
| 0.022222
| 0.030303
| 0.036364
| 0.92702
| 0.916667
| 0.910859
| 0.910859
| 0.899747
| 0.887626
| 0
| 0.05088
| 0.275555
| 7,298
| 218
| 88
| 33.477064
| 0.698127
| 0
| 0
| 0.70625
| 0
| 0
| 0.130721
| 0.057276
| 0
| 0
| 0
| 0
| 0.3
| 1
| 0.05
| false
| 0.00625
| 0.0375
| 0
| 0.0875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
27129a72152068ec8c52ca42070a01b70147e4c3
| 5,610
|
py
|
Python
|
urls/pageapi.py
|
nguyenhuong791123/api
|
cbefc2f3cb882d5655ed9ebdcfe42be1af69cc89
|
[
"MIT"
] | null | null | null |
urls/pageapi.py
|
nguyenhuong791123/api
|
cbefc2f3cb882d5655ed9ebdcfe42be1af69cc89
|
[
"MIT"
] | 5
|
2019-12-26T00:24:49.000Z
|
2022-01-13T01:56:06.000Z
|
urls/pageapi.py
|
nguyenhuong791123/api
|
cbefc2f3cb882d5655ed9ebdcfe42be1af69cc89
|
[
"MIT"
] | null | null | null |
# -*- coding: UTF-8 -*-
import json
import copy
import datetime
from flask import Blueprint, request, jsonify
from sqlalchemy import *
from sqlalchemy.orm import *
from service.page import *
from utils.cm.utils import *
app = Blueprint('pageapi', __name__)
@app.route('/setPage', methods=[ 'POST' ])
def setPage():
auth = request.headers.get('authorization', None)
result = None
if request.json is not None:
page = request.json['page']
if page is None:
return jsonify({ 'error': 'incorrect page info'}), 200
cId = request.json['cId']
if is_integer(cId) == False:
return jsonify({ 'error': 'incorrect company id'}), 200
uId = request.json['uId']
if is_integer(uId) == False:
return jsonify({ 'error': 'incorrect user id'}), 200
result = setServicePage(page, cId, uId)
return jsonify(result), 200
@app.route('/getPage', methods=[ 'POST' ])
def getPage():
auth = request.headers.get('authorization', None)
result = None
if request.json is not None:
cId = request.json['cId']
if is_integer(cId) == False:
return jsonify({ 'error': 'incorrect company id'}), 200
pId = request.json['pId']
if is_integer(pId) == False:
return jsonify({ 'error': 'incorrect page id'}), 200
language = request.json['language']
if is_empty(language) == True:
return jsonify({ 'error': 'incorrect language'}), 200
result = getServicePage(cId, pId, language, False)
return jsonify(result), 200
@app.route('/getEditCustomizePage', methods=[ 'POST' ])
def getEditCustomizePage():
auth = request.headers.get('authorization', None)
result = None
if request.json is not None:
cId = request.json['cId']
if is_integer(cId) == False:
return jsonify({ 'error': 'incorrect company id'}), 200
pId = request.json['pId']
if is_integer(pId) == False:
return jsonify({ 'error': 'incorrect page id'}), 200
language = request.json['language']
if is_empty(language) == True:
return jsonify({ 'error': 'incorrect language'}), 200
result = getServicePage(cId, pId, language, True)
return jsonify(result), 200
@app.route('/setGroupPage', methods=[ 'POST' ])
def setGroupPage():
auth = request.headers.get('authorization', None)
result = None
if request.json is not None:
page = request.json['page']
if page is None:
return jsonify({ 'error': 'incorrect page info'}), 200
cId = request.json['cId']
if is_integer(cId) == False:
return jsonify({ 'error': 'incorrect company id'}), 200
uId = request.json['uId']
if is_integer(uId) == False:
return jsonify({ 'error': 'incorrect user id'}), 200
result = setServiceGroupPage(page, cId, uId)
return jsonify(result), 200
@app.route('/updatePage', methods=[ 'POST' ])
def updatePage():
auth = request.headers.get('authorization', None)
result = None
if request.json is not None:
page = request.json['page']
if page is None:
return jsonify({ 'error': 'incorrect page info'}), 200
cId = request.json['cId']
if is_integer(cId) == False:
return jsonify({ 'error': 'incorrect company id'}), 200
uId = request.json['uId']
if is_integer(uId) == False:
return jsonify({ 'error': 'incorrect user id'}), 200
result = updateServicePage(page, cId)
return jsonify(result), 200
@app.route('/updatePages', methods=[ 'POST' ])
def updatePages():
auth = request.headers.get('authorization', None)
result = None
if request.json is not None:
pages = request.json['pages']
if pages is None:
return jsonify({ 'error': 'incorrect pages info'}), 200
cId = request.json['cId']
if is_integer(cId) == False:
return jsonify({ 'error': 'incorrect company id'}), 200
uId = request.json['uId']
if is_integer(uId) == False:
return jsonify({ 'error': 'incorrect user id'}), 200
result = updateServicePages(pages, cId)
return jsonify(result), 200
@app.route('/deletePage', methods=[ 'POST' ])
def deletePage():
auth = request.headers.get('authorization', None)
result = None
if request.json is not None:
page = request.json['page']
if page is None:
return jsonify({ 'error': 'incorrect page info'}), 200
cId = request.json['cId']
if is_integer(cId) == False:
return jsonify({ 'error': 'incorrect company id'}), 200
uId = request.json['uId']
if is_integer(uId) == False:
return jsonify({ 'error': 'incorrect user id'}), 200
result = deleteServicePage(page, cId)
return jsonify(result), 200
# @app.route('/savePage', methods=[ 'POST' ])
# def savePage():
# auth = request.headers.get('authorization', None)
# result = None
# if request.json is not None:
# page = request.json['page']
# if page is None:
# return jsonify({ 'error': 'incorrect page info'}), 200
# cId = request.json['cId']
# if is_integer(cId) == False:
# return jsonify({ 'error': 'incorrect company id'}), 200
# uId = request.json['uId']
# if is_integer(uId) == False:
# return jsonify({ 'error': 'incorrect user id'}), 200
# result = saveServicePage(page, cId, uId)
# return jsonify(result), 200
| 34
| 69
| 0.59287
| 650
| 5,610
| 5.083077
| 0.101538
| 0.106538
| 0.130751
| 0.196126
| 0.809927
| 0.809927
| 0.78178
| 0.762107
| 0.739709
| 0.715496
| 0
| 0.023509
| 0.264528
| 5,610
| 165
| 70
| 34
| 0.777266
| 0.11943
| 0
| 0.727273
| 0
| 0
| 0.159456
| 0.004266
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057851
| false
| 0
| 0.066116
| 0
| 0.355372
| 0.016529
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
27897c73a6fbd1c744cb2ff6c69804f680d75c12
| 111,814
|
py
|
Python
|
klayout_dot_config/tech/EBeam/pymacros/photonic_crystals/photonic_crystals.py
|
connormosquera/SiEPIC_EBeam_PDK
|
b243e2740dc642634be59534c1fe535abae1422d
|
[
"MIT"
] | 1
|
2021-01-22T07:07:04.000Z
|
2021-01-22T07:07:04.000Z
|
klayout_dot_config/tech/EBeam/pymacros/photonic_crystals/photonic_crystals.py
|
connormosquera/SiEPIC_EBeam_PDK
|
b243e2740dc642634be59534c1fe535abae1422d
|
[
"MIT"
] | null | null | null |
klayout_dot_config/tech/EBeam/pymacros/photonic_crystals/photonic_crystals.py
|
connormosquera/SiEPIC_EBeam_PDK
|
b243e2740dc642634be59534c1fe535abae1422d
|
[
"MIT"
] | null | null | null |
"""
KLayout-SiEPIC library for photonic crystals, UBC and SFU
*******
PCells:
*******
1) swg_fc
- sub-wavelength grating (SWG) fibre coupler (FC)
NOTE: after changing the code, the macro needs to be rerun to install the new
implementation. The macro is also set to "auto run" to install the PCell
when KLayout is run.
Version history:
2017/07/07 Timothy Richards (Simon Fraser University, BC, Canada) and Adam DeAbreu (Simon Fraser University, BC, Canada)
- swg_fc PCell
2017/07/07 Lukas Chrostowski
- library definition and github repo
2017/07/09 Jaspreet Jhoja
- Added Cavity Hole Pcell
2017/07/09 Jingda Wu
- Added 2D H0 Photonic crystal cavity with single bus waveguide and pins
2017/07/10 Megan Nantel
- Added waveguide with impedance matching tapers for transition from external waveguide to Photonic crystal W1 waveguide
2017/07/10 Jingda Wu
- Improved generation efficiency by using single hole as a cell
2017/07/12 Megan Nantel
- Added the H0c test structure that includes grating couplers, waveguides, and H0c
2017/07/12 Jingda Wu
- Added L3 cavity with double bus waveguide and pins
- Added a drop bus to H0 cavity(coupling between waveguide?)
- Simplified code for generation
2017/07/12 Lukas Chrostowski
- SWGFC litho test structure
2017/07/13 Megan Nantel
- grating coupler to grating coupler reference test structure
- photonic crystal with only W1 waveguide
- photonic crystal W1 reference test structure
2017/07/16 Jingda Wu
- Adaptive cavity generation under difference waveguide location
- Able to choose the number of waveguides per PhC cavity
- Added etch layer (12,0) on PhC slabs
- Added H0 cavity with oxide buffer, reduced the vertices (32->20) for holes due to much smaller hole radius
- Deleteted cavity hole class
- Added hexagon half cell and hexagon with hole half cell for PhC generation, in case needed
- Added H0 cavity generated with hexagon cells
- Added PhC test pattern
2017/08/19 Jingda Wu
- Added suspension anchor areas for the cavities with undercut
2018/02/14 Lukas Chrostowski
- Upgrade to KLayout 0.25 and SiEPIC-Tools v0.3.x, updating layers to SiEPIC-EBeam v0.3.0+
"""
# Import KLayout Python API methods:
# Box, Point, Polygon, Text, Trans, LayerInfo, etc
from pya import *
import pya
import math
from SiEPIC.utils import get_technology, get_technology_by_name
from SiEPIC.utils import arc, arc_wg, arc_to_waveguide, points_per_circle#,layout
# -------------------------------------------------------------------------------------------------------------------------------------------------- #
# -------------------------------------------------------------------------------------------------------------------------------------------------- #
class swg_fc_test(pya.PCellDeclarationHelper):
"""
Sub-wavelength-grating fibre coupler PCell litho test structure.
2017/07/12: Lukas Chrostowski, initial version, based on swg_fc by Tim
Input:
"""
def __init__(self):
# Important: initialize the super class
super(swg_fc_test, self).__init__()
# declare the parameters
self.param("wavelength", self.TypeDouble, "Design Wavelength (micron)", default = 2.9)
self.param("n_t", self.TypeDouble, "Fiber Mode", default = 1.0)
self.param("n_e", self.TypeDouble, "Grating Index Parameter", default = 3.1)
self.param("angle_e", self.TypeDouble, "Taper Angle (deg)", default = 20.0)
self.param("grating_length", self.TypeDouble, "Grating Length (micron)", default = 3.0)
self.param("taper_length", self.TypeDouble, "Taper Length (micron)", default = 32.0)
self.param("dc", self.TypeDouble, "Duty Cycle", default = 0.488193)
self.param("period", self.TypeDouble, "Grating Period", default = 1.18939)
self.param("ff", self.TypeDouble, "Fill Factor", default = 0.244319)
self.param("t", self.TypeDouble, "Waveguide Width (micron)", default = 1.0)
self.param("theta_c", self.TypeDouble, "Insertion Angle (deg)", default = 8.0)
self.param("fab_error", self.TypeDouble, "Fab Process error max (micron)", default = 0.05)
# Layer parameters
TECHNOLOGY = get_technology_by_name('EBeam')
self.param("layer", self.TypeLayer, "Layer", default = TECHNOLOGY['Waveguide'])
self.param("pinrec", self.TypeLayer, "PinRec Layer", default = TECHNOLOGY['PinRec'])
self.param("devrec", self.TypeLayer, "DevRec Layer", default = TECHNOLOGY['DevRec'])
self.param("textl", self.TypeLayer, "Text Layer", default = TECHNOLOGY['Text'])
def display_text_impl(self):
# Provide a descriptive text for the cell
return "swg_fc_test_%.1f-%.2f-%.2f-%.2f-%.2f-%.2f-%.2f-%.2f" % \
(self.wavelength, self.theta_c, self.period, self.dc, self.ff, self.angle_e, self.taper_length, self.t)
# return "temporary placeholder"
def coerce_parameters_impl(self):
pass
def can_create_from_shape(self, layout, shape, layer):
return False
def produce_impl(self):
# fetch the parameters
dbu = self.layout.dbu
ly = self.layout
shapes = self.cell.shapes
LayerSi = self.layer
LayerSiN = ly.layer(self.layer)
LayerPinRecN = ly.layer(self.pinrec)
LayerDevRecN = ly.layer(self.devrec)
LayerTextN = ly.layer(self.textl)
from math import pi, cos, sin, log, sqrt, tan
lambda_0 = self.wavelength ##um wavelength of light
pin_length = 0.5 ##um extra nub for the waveguid attachment
# Geometry
wh = self.period*self.dc ##thick grating
wl = self.ff*(self.period - wh) ## thin grating
spacing = (self.period - wh - wl)/2 ##space between thick and thin
gc_number = int(round(self.grating_length/self.period)) ##number of periods
gc_number = 3
e = self.n_t*sin((pi/180)*self.theta_c)/self.n_e
N = round(self.taper_length*(1+e)*self.n_e/lambda_0) ##allows room for the taper
start = (pi - (pi/180)*self.angle_e/2)
stop = (pi + (pi/180)*self.angle_e/2)
# Draw coupler grating.
for j in range(gc_number):
# number of points in the arcs:
# calculate such that the vertex & edge placement error is < 0.5 nm.
# see "SiEPIC_EBeam_functions - points_per_circle" for more details
radius = N*lambda_0 / ( self.n_e*( 1 - e )) + j*self.period + spacing
seg_points = int(points_per_circle(radius/dbu)/360.*self.angle_e) # number of points grating arc
theta_up = []
for m in range(seg_points+1):
theta_up = theta_up + [start + m*(stop-start)/seg_points]
theta_down = theta_up[::-1]
##small one
r_up = []
r_down = []
rng = range(len(theta_up))
# find the divider to get desired fab error:
th = min(theta_up)
div = (2*sin(th)/self.fab_error)*(N*lambda_0 / ( self.n_e*( 1 - e*cos(th) )) + j*self.period + spacing)
err = (2*sin(th)/div)*(N*lambda_0 / ( self.n_e*( 1 - e*cos(th) )) + j*self.period + spacing)
# print("div %s, err (double check) %s" % (div, err))
for k in rng:
th = theta_up[k]
# print("%s, %s, %s" % (th, sin(th), 1+sin(th)/10.) )
r_up = r_up + [(1-sin(th)/div) *(N*lambda_0 / ( self.n_e*( 1 - e*cos(th) )) + j*self.period + spacing)]
for k in rng[::-1]:
th = theta_up[k]
# print("%s, %s, %s" % (th, sin(th), 1+sin(th)/10.) )
r_down = r_down + [(1+sin(th)/div) *(N*lambda_0 / ( self.n_e*( 1 - e*cos(th) )) + j*self.period + spacing)]
xr = []
yr = []
for k in range(len(theta_up)):
xr = xr + [r_up[k]*cos(theta_up[k])]
yr = yr + [r_up[k]*sin(theta_up[k])]
xl = []
yl = []
for k in range(len(theta_down)):
xl = xl + [(r_down[k] + wl)*cos(theta_down[k])]
yl = yl + [(r_down[k] + wl)*sin(theta_down[k])]
x = xr + xl
y = yr + yl
pts = []
for i in range(len(x)):
pts.append(Point.from_dpoint(pya.DPoint(x[i]/dbu, y[i]/dbu)))
#small_one = core.Boundary(points)
polygon = Polygon(pts)
shapes(LayerSiN).insert(polygon)
if j==1:
# text label dimensions, for minor grating:
# top
shapes(LayerTextN).insert(Text("%0.0f"%((wl+self.fab_error)*1000), Trans(Trans.R0, xl[0]/dbu,yl[0]/dbu))).text_size = 0.2/dbu
# btm
shapes(LayerTextN).insert(Text("%0.0f"%((wl-self.fab_error)*1000), Trans(Trans.R0, xl[-1]/dbu,yl[-1]/dbu))).text_size = 0.2/dbu
# mid
shapes(LayerTextN).insert(Text("%0.0f"%((wl)*1000), Trans(Trans.R0, xl[int(len(theta_up)/2)]/dbu,yl[int(len(theta_up)/2)]/dbu))).text_size = 0.2/dbu
##big one
r_up = []
r_down = []
# find the divider to get desired fab error:
th = min(theta_up)
div = (2*sin(th)/self.fab_error)*(N*lambda_0 / ( self.n_e*( 1 - e*cos(th) )) + j*self.period + 2*spacing+wl)
err = (2*sin(th)/div)*(N*lambda_0 / ( self.n_e*( 1 - e*cos(th) )) + j*self.period + 2*spacing+wl)
# print("div %s, err (double check) %s" % (div, err))
rng = range(len(theta_up))
for k in rng:
th = theta_up[k]
r_up = r_up + [(1-sin(th)/div) *(N*lambda_0 / ( self.n_e*( 1 - e*cos(th) )) + j*self.period + 2*spacing+wl)]
for k in rng[::-1]:
th = theta_up[k]
r_down = r_down + [(1+sin(th)/div) *(N*lambda_0 / ( self.n_e*( 1 - e*cos(th) )) + j*self.period + 2*spacing+wl)]
xr = []
yr = []
for k in range(len(theta_up)):
xr = xr + [r_up[k]*cos(theta_up[k])]
yr = yr + [r_up[k]*sin(theta_up[k])]
xl = []
yl = []
for k in range(len(theta_down)):
xl = xl + [(r_down[k] + wh)*cos(theta_down[k])]
yl = yl + [(r_down[k] + wh)*sin(theta_down[k])]
x = xr + xl
y = yr + yl
pts = []
for i in range(len(x)):
pts.append(Point.from_dpoint(pya.DPoint(x[i]/dbu, y[i]/dbu)))
polygon = Polygon(pts)
shapes(LayerSiN).insert(polygon)
if j==1:
# text label dimensions, for major grating:
# top
shapes(LayerTextN).insert(Text("%0.0f"%((wh+self.fab_error)*1000), Trans(Trans.R0, xl[0]/dbu,yl[0]/dbu))).text_size = 0.2/dbu
# btm
shapes(LayerTextN).insert(Text("%0.0f"%((wh-self.fab_error)*1000), Trans(Trans.R0, xl[-1]/dbu,yl[-1]/dbu))).text_size = 0.2/dbu
# mid
shapes(LayerTextN).insert(Text("%0.0f"%((wh)*1000), Trans(Trans.R0, xl[int(len(theta_up)/2)]/dbu,yl[int(len(theta_up)/2)]/dbu))).text_size = 0.2/dbu
# -------------------------------------------------------------------------------------------------------------------------------------------------- #
# -------------------------------------------------------------------------------------------------------------------------------------------------- #
class swg_fc(pya.PCellDeclarationHelper):
"""
Sub-wavelength-grating fibre coupler PCell implementation.
Analytical design based on "Grating Coupler Design Based on Silicon-On-Insulator", Yun Wang (2013). Master's Thesis, University of British Columbia, Canada
Some PCell implementation adapted from the SiEPIC_EBeam library by Dr. Lukas Chrostowski, University of British Columbia, Canada
Separate modelling (e.g. Lumerical MODE) is required to determine the "grating effective index" parameter for a given device layer thickness,
cladding type, and period/duty cycle/fill factor.
Script written by Timothy Richards (Simon Fraser University, BC, Canada) and Adam DeAbreu (Simon Fraser University, BC, Canada)
Changelog
2017-07-07 - initial publish
2017-07-07 - change library & component names; commit to github
TO-DO:
- implement mode solver here, or call Lumerical MODE to calculate
Input:
"""
def __init__(self):
# Important: initialize the super class
super(swg_fc, self).__init__()
# declare the parameters
self.param("wavelength", self.TypeDouble, "Design Wavelength (micron)", default = 2.9)
self.param("n_t", self.TypeDouble, "Fiber Mode", default = 1.0)
self.param("n_e", self.TypeDouble, "Grating Index Parameter", default = 3.1)
self.param("angle_e", self.TypeDouble, "Taper Angle (deg)", default = 20.0)
self.param("grating_length", self.TypeDouble, "Grating Length (micron)", default = 32.0)
self.param("taper_length", self.TypeDouble, "Taper Length (micron)", default = 32.0)
self.param("dc", self.TypeDouble, "Duty Cycle", default = 0.488193)
self.param("period", self.TypeDouble, "Grating Period", default = 1.18939)
self.param("ff", self.TypeDouble, "Fill Factor", default = 0.244319)
self.param("t", self.TypeDouble, "Waveguide Width (micron)", default = 1.0)
self.param("theta_c", self.TypeDouble, "Insertion Angle (deg)", default = 8.0)
self.param("w_err", self.TypeDouble, "Width Error (micron)", default = -0.06)
# Width scale parameter is a first pass attempt at designing for length contraction
# at cryogenic temperature. It is applied BEFORE the width error; this is because
# the order of operations in the reverse is over/under-etch, then cool and contract.
# So first scale so that target width is reached after contraction, then add
# fabrication error so that the scaled width is reached.
self.param("w_scale", self.TypeDouble, "Width Scale", default = 1.0)
# Layer parameters
TECHNOLOGY = get_technology_by_name('EBeam')
self.param("layer", self.TypeLayer, "Layer", default = TECHNOLOGY['Waveguide'])
self.param("pinrec", self.TypeLayer, "PinRec Layer", default = TECHNOLOGY['PinRec'])
self.param("devrec", self.TypeLayer, "DevRec Layer", default = TECHNOLOGY['DevRec'])
self.param("textl", self.TypeLayer, "Text Layer", default = TECHNOLOGY['Text'])
def display_text_impl(self):
# Provide a descriptive text for the cell
return "swg_fc_%.1f-%.2f-%.2f-%.2f-%.2f-%.2f-%.2f-%.2f" % \
(self.wavelength, self.theta_c, self.period, self.dc, self.ff, self.angle_e, self.taper_length, self.t)
# return "temporary placeholder"
def coerce_parameters_impl(self):
pass
def can_create_from_shape(self, layout, shape, layer):
return False
def produce_impl(self):
# fetch the parameters
dbu = self.layout.dbu
ly = self.layout
shapes = self.cell.shapes
LayerSi = self.layer
LayerSiN = ly.layer(self.layer)
LayerPinRecN = ly.layer(self.pinrec)
LayerDevRecN = ly.layer(self.devrec)
LayerTextN = ly.layer(self.textl)
from math import pi, cos, sin, log, sqrt, tan
lambda_0 = self.wavelength ##um wavelength of light
pin_length = 0.5 ##um extra nub for the waveguide attachment
# Geometry
wh = self.period*self.dc ##thick grating
wl = self.ff*(self.period - wh) ## thin grating
# Width scale parameter is a first pass attempt at designing for length contraction
# at cryogenic temperature. It is applied BEFORE the width error; this is because
# the order of operations in the reverse is over/under-etch, then cool and contract.
# So first scale so that target width is reached after contraction, then add
# fabrication error so that the scaled width is reached.
wh = (wh*self.w_scale + self.w_err)
wl = (wl*self.w_scale + self.w_err)
spacing = (self.period - wh - wl)/2 ##space between thick and thin
gc_number = int(round(self.grating_length/self.period)) ##number of periods
e = self.n_t*sin((pi/180)*self.theta_c)/self.n_e
N = round(self.taper_length*(1+e)*self.n_e/lambda_0) ##allows room for the taper
start = (pi - (pi/180)*self.angle_e/2)
stop = (pi + (pi/180)*self.angle_e/2)
# Draw coupler grating.
for j in range(gc_number):
# number of points in the arcs:
# calculate such that the vertex & edge placement error is < 0.5 nm.
# see "SiEPIC_EBeam_functions - points_per_circle" for more details
radius = N*lambda_0 / ( self.n_e*( 1 - e )) + j*self.period + spacing
seg_points = int(points_per_circle(radius/dbu)/360.*self.angle_e) # number of points grating arc
theta_up = []
for m in range(seg_points+1):
theta_up = theta_up + [start + m*(stop-start)/seg_points]
theta_down = theta_up[::-1]
##small one
r_up = []
r_down = []
for k in range(len(theta_up)):
r_up = r_up + [N*lambda_0 / ( self.n_e*( 1 - e*cos(float(theta_up[k])) )) + j*self.period + spacing]
r_down = r_up[::-1]
xr = []
yr = []
for k in range(len(theta_up)):
xr = xr + [r_up[k]*cos(theta_up[k])]
yr = yr + [r_up[k]*sin(theta_up[k])]
xl = []
yl = []
for k in range(len(theta_down)):
xl = xl + [(r_down[k] + wl)*cos(theta_down[k])]
yl = yl + [(r_down[k] + wl)*sin(theta_down[k])]
x = xr + xl
y = yr + yl
pts = []
for i in range(len(x)):
pts.append(Point.from_dpoint(pya.DPoint(x[i]/dbu, y[i]/dbu)))
#small_one = core.Boundary(points)
polygon = Polygon(pts)
shapes(LayerSiN).insert(polygon)
##big one
r_up = []
r_down = []
for k in range(len(theta_up)):
r_up = r_up + [N*lambda_0 / ( self.n_e*( 1 - e*cos(float(theta_up[k])) )) + j*self.period + 2*spacing+ wl]
r_down = r_up[::-1]
xr = []
yr = []
for k in range(len(theta_up)):
xr = xr + [r_up[k]*cos(theta_up[k])]
yr = yr + [r_up[k]*sin(theta_up[k])]
xl = []
yl = []
for k in range(len(theta_down)):
xl = xl + [(r_down[k] + wh)*cos(theta_down[k])]
yl = yl + [(r_down[k] + wh)*sin(theta_down[k])]
x = xr + xl
y = yr + yl
pts = []
for i in range(len(x)):
pts.append(Point.from_dpoint(pya.DPoint(x[i]/dbu, y[i]/dbu)))
polygon = Polygon(pts)
shapes(LayerSiN).insert(polygon)
# Taper section
r_up = []
r_down = []
for k in range(len(theta_up)):
r_up = r_up + [N*lambda_0 / ( self.n_e*( 1 - e*cos(float(theta_up[k])) ))]
r_down = r_up[::-1]
xl = []
yl = []
for k in range(len(theta_down)):
xl = xl + [(r_down[k])*cos(theta_down[k])]
yl = yl + [(r_down[k])*sin(theta_down[k])]
yr = [self.t/2., self.t/2., -self.t/2., -self.t/2.]
yl_abs = []
for k in range(len(yl)):
yl_abs = yl_abs + [abs(yl[k])]
y_max = max(yl_abs)
iy_max = yl_abs.index(y_max)
L_o = (y_max - self.t/2)/tan((pi/180)*self.angle_e/2)
xr = [L_o+xl[iy_max], 0, 0, L_o+xl[iy_max]]
x = xr + xl
y = yr + yl
pts = []
for i in range(len(x)):
pts.append(Point.from_dpoint(pya.DPoint(x[i]/dbu, y[i]/dbu)))
polygon = Polygon(pts)
shapes(LayerSiN).insert(polygon)
# Pin on the waveguide:
pin_length = 200
x = 0
t = Trans(x,0)
pin = pya.Path([Point(-pin_length/2,0), Point(pin_length/2,0)], self.t/dbu)
pin_t = pin.transformed(t)
shapes(LayerPinRecN).insert(pin_t)
text = Text ("pin1", t)
shape = shapes(LayerPinRecN).insert(text)
shape.text_size = 0.4/dbu
# Device recognition layer
yr = sin(start) * (N*lambda_0 / ( self.n_e*( 1 - e*cos(float(start)) )) + gc_number*self.period + spacing)
box1 = Box(-(self.grating_length+self.taper_length)/dbu-pin_length*2, yr/dbu, 0, -yr/dbu)
shapes(LayerDevRecN).insert(box1)
class H0c(pya.PCellDeclarationHelper):
"""
Input: length, width
"""
import numpy
def __init__(self):
# Important: initialize the super class
super(H0c, self).__init__()
self.param("a", self.TypeDouble, "lattice constant (microns)", default = 0.744)
self.param("n", self.TypeInt, "Number of holes in x and y direction", default = 30)
self.param("r", self.TypeDouble, "hole radius (microns)", default = 0.179)
self.param("wg_dis", self.TypeInt, "Waveguide distance (number of holes)", default = 3)
self.param("n_bus", self.TypeInt, "Bus number, 1 or 2 ", default = 2)
self.param("n_vertices", self.TypeInt, "Vertices of a hole", default = 32)
self.param("S1x", self.TypeDouble, "S1x shift", default = 0.28)
self.param("S2x", self.TypeDouble, "S2x shift", default = 0.193)
self.param("S3x", self.TypeDouble, "S3x shift", default = 0.194)
self.param("S4x", self.TypeDouble, "S4x shift", default = 0.162)
self.param("S5x", self.TypeDouble, "S5x shift", default = 0.113)
self.param("S1y", self.TypeDouble, "S1y shift", default = -0.016)
self.param("S2y", self.TypeDouble, "S2y shift", default = 0.134)
self.param("etch_condition", self.TypeInt, "etch = 1 if etch box, etch = 2 if no etch box", default = 1)
TECHNOLOGY = get_technology_by_name('EBeam')
self.param("layer", self.TypeLayer, "Layer", default = TECHNOLOGY['Waveguide'])
self.param("pinrec", self.TypeLayer, "PinRec Layer", default = TECHNOLOGY['PinRec'])
self.param("devrec", self.TypeLayer, "DevRec Layer", default = TECHNOLOGY['DevRec'])
self.param("textl", self.TypeLayer, "Text Layer", default = TECHNOLOGY['Text'])
self.param("etch", self.TypeLayer, "oxide etch layer", default = pya.LayerInfo(12, 0))
def display_text_impl(self):
# Provide a descriptive text for the cell
return "H0c_a%s-r%.3f-wg_dis%.3f-n%.3f" % \
(self.a, self.r, self.wg_dis, self.n)
def coerce_parameters_impl(self):
pass
def can_create_from_shape(self, layout, shape, layer):
return False
def produce_impl(self):
# fetch the parameters
dbu = self.layout.dbu
ly = self.layout
LayerSi = self.layer
LayerSiN = ly.layer(self.layer)
LayerPinRecN = ly.layer(self.pinrec)
LayerDevRecN = ly.layer(self.devrec)
LayerTextN = ly.layer(self.textl)
LayerEtch = ly.layer(self.etch)
# Fetch all the parameters:
a = self.a/dbu
r = self.r/dbu
wg_dis = self.wg_dis+1
n_vertices = self.n_vertices
n_bus = self.n_bus
n = int(math.ceil(self.n/2))
Sx = [self.S1x,self.S2x,self.S3x,self.S4x,self.S5x]
Sy = [self.S1y,0,self.S2y]
etch_condition = self.etch_condition
if n_bus == 1:
Sx = [0,0,0,0,0]
Sy = [0,0,0]
if wg_dis%2 == 0:
length_slab_x = (2*n-1)*a
else:
length_slab_x = 2*n*a
length_slab_y = 2*(wg_dis+10)*a*math.sqrt(3)/2
length_anchor_y = length_slab_y + 20 * a
length_anchor_x = length_slab_x + 20 * a
n_x = n
n_y = wg_dis+10
# Define Si slab and hole region for future subtraction
Si_slab = pya.Region()
Si_slab.insert(pya.Box(-length_anchor_x/2, -length_anchor_y/2, length_anchor_x/2, length_anchor_y/2))
hole = pya.Region()
hole_r = r
trench = pya.Region()
#add the trenches for waveguide connection
trench_width = 20/dbu
trench_height = 9*a*math.sqrt(3)/2
wg_pos = a*math.sqrt(3)/2*wg_dis
trench.insert(pya.Box(-trench_width-length_slab_x/2, wg_pos-trench_height/2, -length_slab_x/2, wg_pos+trench_height/2))
trench.insert(pya.Box(length_slab_x/2, wg_pos-trench_height/2, trench_width+length_slab_x/2, wg_pos+trench_height/2))
if n_bus == 2:
wg_pos_2 = -a*math.sqrt(3)/2*wg_dis
trench.insert(pya.Box(length_slab_x/2, wg_pos_2-trench_height/2, trench_width+length_slab_x/2, wg_pos_2+trench_height/2))
# function to generate points to create a circle
def circle(x,y,r):
npts = n_vertices
theta = 2 * math.pi / npts # increment, in radians
pts = []
for i in range(0, npts):
pts.append(Point.from_dpoint(pya.DPoint((x+r*math.cos(i*theta))/1, (y+r*math.sin(i*theta))/1)))
return pts
# raster through all holes with shifts and waveguide
hole_cell = circle(0,0,hole_r)
hole_poly = pya.Polygon(hole_cell)
for j in range(-n_y,n_y+1):
if j%2 == 0 and j != wg_dis:
for i in range(-n_x,n_x+1):
if j == -wg_dis and i > 3 and n_bus == 2:
None
elif j == 0 and i in (1,-1,2,-2,3,-3,4,-4,5,-5):
hole_x = abs(i)/i*(abs(i)-0.5+Sx[abs(i)-1])*a
hole_y = 0
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t = hole_poly.transformed(hole_trans)
hole.insert(hole_t)
elif i!=0:
hole_x = abs(i)/i*(abs(i)-0.5)*a
hole_y = j*a*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t = hole_poly.transformed(hole_trans)
hole.insert(hole_t)
elif j%2 == 1 and j != wg_dis:
for i in range(-n_x,n_x+1):
if j == -wg_dis and i > 3 and n_bus == 2:
None
elif i == 0 and j in (1,-1,3,-3):
hole_x = 0
hole_y = j*a*(math.sqrt(3)/2)+abs(j)/j*a*Sy[abs(j)-1]
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t = hole_poly.transformed(hole_trans)
hole.insert(hole_t)
else:
hole_x = i*a
hole_y = j*a*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t = hole_poly.transformed(hole_trans)
hole.insert(hole_t)
phc = Si_slab - hole - trench
self.cell.shapes(LayerSiN).insert(phc)
if etch_condition == 1 :
box_etch = pya.Box(-(length_slab_x/2-3000), -(length_slab_y/2-3000), length_slab_x/2-3000, length_slab_y/2-3000)
self.cell.shapes(LayerEtch).insert(box_etch)
# Pins on the waveguide:
pin_length = 200
pin_w = a
t = pya.Trans(Trans.R0, -length_slab_x/2,wg_pos)
pin = pya.Path([pya.Point(pin_length/2, 0), pya.Point(-pin_length/2, 0)], pin_w)
pin_t = pin.transformed(t)
self.cell.shapes(LayerPinRecN).insert(pin_t)
text = pya.Text ("pin1", t)
shape = self.cell.shapes(LayerPinRecN).insert(text)
shape.text_size = 0.4/dbu
t = pya.Trans(Trans.R0, length_slab_x/2,wg_pos)
pin = pya.Path([pya.Point(-pin_length/2, 0), pya.Point(pin_length/2, 0)], pin_w)
pin_t = pin.transformed(t)
self.cell.shapes(LayerPinRecN).insert(pin_t)
text = pya.Text ("pin2", t)
shape = self.cell.shapes(LayerPinRecN).insert(text)
shape.text_size = 0.4/dbu
#pin for drop waveguide
if n_bus == 2:
t = pya.Trans(Trans.R0, length_slab_x/2,-wg_pos)
pin_t = pin.transformed(t)
self.cell.shapes(LayerPinRecN).insert(pin_t)
text = pya.Text ("pin3", t)
shape = self.cell.shapes(LayerPinRecN).insert(text)
shape.text_size = 0.4/dbu
# Create the device recognition layer -- make it 1 * wg_width away from the waveguides.
points = [[-length_slab_x/2,0], [length_slab_x/2, 0]]
points = [Point(each[0], each[1]) for each in points]
path = Path(points, length_slab_y)
self.cell.shapes(LayerDevRecN).insert(path.simple_polygon())
class L3c(pya.PCellDeclarationHelper):
"""
Input: length, width
"""
import numpy
def __init__(self):
# Important: initialize the super class
super(L3c, self).__init__()
self.param("a", self.TypeDouble, "lattice constant (microns)", default = 0.720)
self.param("n", self.TypeInt, "Number of holes in x and y direction", default = 30)
self.param("r", self.TypeDouble, "hole radius (microns)", default = 0.181)
self.param("wg_dis", self.TypeInt, "Waveguide distance (number of holes)", default = 3)
self.param("n_bus", self.TypeInt, "Bus number, 1 or 2 ", default = 2)
self.param("n_vertices", self.TypeInt, "Vertices of a hole", default = 32)
self.param("S1x", self.TypeDouble, "S1x shift", default = 0.337)
self.param("S2x", self.TypeDouble, "S2x shift", default = 0.27)
self.param("S3x", self.TypeDouble, "S3x shift", default = 0.088)
self.param("S4x", self.TypeDouble, "S4x shift", default = 0.323)
self.param("S5x", self.TypeDouble, "S5x shift", default = 0.173)
TECHNOLOGY = get_technology_by_name('EBeam')
self.param("layer", self.TypeLayer, "Layer", default = TECHNOLOGY['Waveguide'])
self.param("pinrec", self.TypeLayer, "PinRec Layer", default = TECHNOLOGY['PinRec'])
self.param("devrec", self.TypeLayer, "DevRec Layer", default = TECHNOLOGY['DevRec'])
self.param("textl", self.TypeLayer, "Text Layer", default = TECHNOLOGY['Text'])
self.param("etch", self.TypeLayer, "oxide etch layer", default = pya.LayerInfo(12, 0))
def display_text_impl(self):
# Provide a descriptive text for the cell
return "L3 Cavity_a%s-r%.3f-wg_dis%.3f-n%.3f" % \
(self.a, self.r, self.wg_dis, self.n)
def coerce_parameters_impl(self):
pass
def can_create_from_shape(self, layout, shape, layer):
return False
def produce_impl(self):
# fetch the parameters
dbu = self.layout.dbu
ly = self.layout
LayerSi = self.layer
LayerSiN = ly.layer(self.layer)
LayerPinRecN = ly.layer(self.pinrec)
LayerDevRecN = ly.layer(self.devrec)
LayerTextN = ly.layer(self.textl)
LayerEtch = ly.layer(self.etch)
# Fetch all the parameters:
a = self.a/dbu
r = self.r/dbu
wg_dis = self.wg_dis+1
n_vertices = self.n_vertices
n_bus = self.n_bus
n = int(math.ceil(self.n/2))
Sx = [self.S1x,self.S2x,self.S3x,self.S4x,self.S5x]
if n_bus == 1:
Sx = [0,0,0,0,0]
Sy = [0,0,0]
if wg_dis%2 == 0:
length_slab_x = 2*n*a
else:
length_slab_x = (2*n-1)*a
length_slab_y = 2*(wg_dis+10)*a*math.sqrt(3)/2
length_anchor_y = length_slab_y + 20 * a
length_anchor_x = length_slab_x + 20 * a
n_x = n
n_y = wg_dis+10
# Define Si slab and hole region for future subtraction
Si_slab = pya.Region()
Si_slab.insert(pya.Box(-length_anchor_x/2, -length_anchor_y/2, length_anchor_x/2, length_anchor_y/2))
hole = pya.Region()
hole_r = r
trench = pya.Region()
#add the trenches for waveguide connection
trench_width = 20/dbu
trench_height = 9*a*math.sqrt(3)/2
wg_pos = a*math.sqrt(3)/2*wg_dis
trench.insert(pya.Box(-trench_width-length_slab_x/2, wg_pos-trench_height/2, -length_slab_x/2, wg_pos+trench_height/2))
trench.insert(pya.Box(length_slab_x/2, wg_pos-trench_height/2, trench_width+length_slab_x/2, wg_pos+trench_height/2))
if n_bus == 2:
wg_pos_2 = -a*math.sqrt(3)/2*wg_dis
trench.insert(pya.Box(length_slab_x/2, wg_pos_2-trench_height/2, trench_width+length_slab_x/2, wg_pos_2+trench_height/2))
# function to generate points to create a circle
def circle(x,y,r):
npts = n_vertices
theta = 2 * math.pi / npts # increment, in radians
pts = []
for i in range(0, npts):
pts.append(Point.from_dpoint(pya.DPoint((x+r*math.cos(i*theta))/1, (y+r*math.sin(i*theta))/1)))
return pts
# raster through all holes with shifts and waveguide
hole_cell = circle(0,0,hole_r)
hole_poly = pya.Polygon(hole_cell)
for j in range(-n_y,n_y+1):
if j%2 == 0 and j != wg_dis:
for i in range(-n_x,n_x+1):
if j == -wg_dis and i > 3 and n_bus == 2:
None
elif j == 0 and i in (-1,0,1):
None
elif j == 0 and i in (2,-2,3,-3,4,-4,5,-5,6,-6):
hole_x = (i+(abs(i)/i)*Sx[abs(i)-2])*a
hole_y = 0
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t = hole_poly.transformed(hole_trans)
hole.insert(hole_t)
else:
hole_x = i*a
hole_y = j*a*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t = hole_poly.transformed(hole_trans)
hole.insert(hole_t)
elif j%2 == 1 and j != wg_dis:
for i in range(-n_x,n_x+1):
if j == -wg_dis and i > 3 and n_bus == 2:
None
elif i != 0:
hole_x = abs(i)/i*(abs(i)-0.5)*a
hole_y = j*a*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t = hole_poly.transformed(hole_trans)
hole.insert(hole_t)
phc = Si_slab - hole - trench
self.cell.shapes(LayerSiN).insert(phc)
box_etch = pya.Box(-(length_slab_x/2-3000), -(length_slab_y/2-3000), length_slab_x/2-3000, length_slab_y/2-3000)
self.cell.shapes(LayerEtch).insert(box_etch)
# Pins on the waveguide:
pin_length = 200
pin_w = a
t = pya.Trans(Trans.R0, -length_slab_x/2,wg_pos)
pin = pya.Path([pya.Point(pin_length/2, 0), pya.Point(-pin_length/2, 0)], pin_w)
pin_t = pin.transformed(t)
self.cell.shapes(LayerPinRecN).insert(pin_t)
text = pya.Text ("pin1", t)
shape = self.cell.shapes(LayerPinRecN).insert(text)
shape.text_size = 0.4/dbu
t = pya.Trans(Trans.R0, length_slab_x/2,wg_pos)
pin = pya.Path([pya.Point(-pin_length/2, 0), pya.Point(pin_length/2, 0)], pin_w)
pin_t = pin.transformed(t)
self.cell.shapes(LayerPinRecN).insert(pin_t)
text = pya.Text ("pin2", t)
shape = self.cell.shapes(LayerPinRecN).insert(text)
shape.text_size = 0.4/dbu
#pin for drop waveguide
if n_bus == 2:
t = pya.Trans(Trans.R0, length_slab_x/2,-wg_pos)
pin_t = pin.transformed(t)
self.cell.shapes(LayerPinRecN).insert(pin_t)
text = pya.Text ("pin3", t)
shape = self.cell.shapes(LayerPinRecN).insert(text)
shape.text_size = 0.4/dbu
# Create the device recognition layer -- make it 1 * wg_width away from the waveguides.
points = [[-length_slab_x/2,0], [length_slab_x/2, 0]]
points = [Point(each[0], each[1]) for each in points]
path = Path(points, length_slab_y)
self.cell.shapes(LayerDevRecN).insert(path.simple_polygon())
class H0c_oxide(pya.PCellDeclarationHelper):
"""
Input: length, width
"""
import numpy
def __init__(self):
# Important: initialize the super class
super(H0c_oxide, self).__init__()
self.param("a", self.TypeDouble, "lattice constant (microns)", default = 0.690)
self.param("n", self.TypeInt, "Number of holes in x and y direction", default = 30)
self.param("r", self.TypeDouble, "hole radius (microns)", default = 0.125)
self.param("wg_dis", self.TypeInt, "Waveguide distance (number of holes)", default = 3)
self.param("n_bus", self.TypeInt, "Bus number, 1 or 2 ", default = 2)
self.param("n_vertices", self.TypeInt, "Vertices of a hole", default = 20)
self.param("S1x", self.TypeDouble, "S1x shift", default = 0.28)
self.param("S2x", self.TypeDouble, "S2x shift", default = 0.193)
self.param("S3x", self.TypeDouble, "S3x shift", default = 0.194)
self.param("S4x", self.TypeDouble, "S4x shift", default = 0.162)
self.param("S5x", self.TypeDouble, "S5x shift", default = 0.113)
self.param("S1y", self.TypeDouble, "S1y shift", default = -0.016)
self.param("S2y", self.TypeDouble, "S2y shift", default = 0.134)
TECHNOLOGY = get_technology_by_name('EBeam')
self.param("layer", self.TypeLayer, "Layer", default = TECHNOLOGY['Waveguide'])
self.param("pinrec", self.TypeLayer, "PinRec Layer", default = TECHNOLOGY['PinRec'])
self.param("devrec", self.TypeLayer, "DevRec Layer", default = TECHNOLOGY['DevRec'])
self.param("textl", self.TypeLayer, "Text Layer", default = TECHNOLOGY['Text'])
def display_text_impl(self):
# Provide a descriptive text for the cell
return "H0c_oxide_a%s-r%.3f-wg_dis%.3f-n%.3f" % \
(self.a, self.r, self.wg_dis, self.n)
def coerce_parameters_impl(self):
pass
def can_create_from_shape(self, layout, shape, layer):
return False
def produce_impl(self):
# fetch the parameters
dbu = self.layout.dbu
ly = self.layout
LayerSi = self.layer
LayerSiN = ly.layer(self.layer)
LayerPinRecN = ly.layer(self.pinrec)
LayerDevRecN = ly.layer(self.devrec)
LayerTextN = ly.layer(self.textl)
# Fetch all the parameters:
a = self.a/dbu
r = self.r/dbu
wg_dis = self.wg_dis+1
n_vertices = self.n_vertices
n_bus = self.n_bus
n = int(math.ceil(self.n/2))
Sx = [self.S1x,self.S2x,self.S3x,self.S4x,self.S5x]
Sy = [self.S1y,0,self.S2y]
if n_bus == 1:
Sx = [0,0,0,0,0]
Sy = [0,0,0]
if wg_dis%2 == 0:
length_slab_x = (2*n-1)*a
else:
length_slab_x = 2*n*a
length_slab_y = 2*(wg_dis+15)*a*math.sqrt(3)/2
n_x = n
n_y = wg_dis+10
# Define Si slab and hole region for future subtraction
Si_slab = pya.Region()
Si_slab.insert(pya.Box(-length_slab_x/2, -length_slab_y/2, length_slab_x/2, length_slab_y/2))
hole = pya.Region()
hole_r = r
# function to generate points to create a circle
def circle(x,y,r):
npts = n_vertices
theta = 2 * math.pi / npts # increment, in radians
pts = []
for i in range(0, npts):
pts.append(Point.from_dpoint(pya.DPoint((x+r*math.cos(i*theta))/1, (y+r*math.sin(i*theta))/1)))
return pts
# raster through all holes with shifts and waveguide
hole_cell = circle(0,0,hole_r)
hole_poly = pya.Polygon(hole_cell)
for j in range(-n_y,n_y+1):
if j%2 == 0 and j != wg_dis:
for i in range(-n_x,n_x+1):
if j == -wg_dis and i > 3 and n_bus == 2:
None
elif j == 0 and i in (1,-1,2,-2,3,-3,4,-4,5,-5):
hole_x = abs(i)/i*(abs(i)-0.5+Sx[abs(i)-1])*a
hole_y = 0
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t = hole_poly.transformed(hole_trans)
hole.insert(hole_t)
elif i!=0:
hole_x = abs(i)/i*(abs(i)-0.5)*a
hole_y = j*a*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t = hole_poly.transformed(hole_trans)
hole.insert(hole_t)
elif j%2 == 1 and j != wg_dis:
for i in range(-n_x,n_x+1):
if j == -wg_dis and i > 3 and n_bus == 2:
None
elif i == 0 and j in (1,-1,3,-3):
hole_x = 0
hole_y = j*a*(math.sqrt(3)/2)+abs(j)/j*a*Sy[abs(j)-1]
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t = hole_poly.transformed(hole_trans)
hole.insert(hole_t)
else:
hole_x = i*a
hole_y = j*a*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t = hole_poly.transformed(hole_trans)
hole.insert(hole_t)
phc = Si_slab - hole
self.cell.shapes(LayerSiN).insert(phc)
# Pins on the waveguide:
pin_length = 200
pin_w = a
wg_pos = a*math.sqrt(3)/2*wg_dis
t = pya.Trans(Trans.R0, -length_slab_x/2,wg_pos)
pin = pya.Path([pya.Point(pin_length/2, 0), pya.Point(-pin_length/2, 0)], pin_w)
pin_t = pin.transformed(t)
self.cell.shapes(LayerPinRecN).insert(pin_t)
text = pya.Text ("pin1", t)
shape = self.cell.shapes(LayerPinRecN).insert(text)
shape.text_size = 0.4/dbu
t = pya.Trans(Trans.R0, length_slab_x/2,wg_pos)
pin = pya.Path([pya.Point(-pin_length/2, 0), pya.Point(pin_length/2, 0)], pin_w)
pin_t = pin.transformed(t)
self.cell.shapes(LayerPinRecN).insert(pin_t)
text = pya.Text ("pin2", t)
shape = self.cell.shapes(LayerPinRecN).insert(text)
shape.text_size = 0.4/dbu
#pin for drop waveguide
if n_bus == 2:
t = pya.Trans(Trans.R0, length_slab_x/2,-wg_pos)
pin_t = pin.transformed(t)
self.cell.shapes(LayerPinRecN).insert(pin_t)
text = pya.Text ("pin3", t)
shape = self.cell.shapes(LayerPinRecN).insert(text)
shape.text_size = 0.4/dbu
# Create the device recognition layer -- make it 1 * wg_width away from the waveguides.
points = [[-length_slab_x/2,0], [length_slab_x/2, 0]]
points = [Point(each[0], each[1]) for each in points]
path = Path(points, length_slab_y)
self.cell.shapes(LayerDevRecN).insert(path.simple_polygon())
class PhC_test(pya.PCellDeclarationHelper):
"""
Input: length, width
"""
import numpy
def __init__(self):
# Important: initialize the super class
super(PhC_test, self).__init__()
self.param("a", self.TypeDouble, "lattice constant (microns)", default = 0.744)
self.param("n", self.TypeInt, "Number of holes in x and y direction", default = 5)
self.param("r", self.TypeDouble, "hole radius (microns)", default = 0.179)
self.param("n_sweep", self.TypeInt, "Different sizes of holes", default = 13)
self.param("n_vertices", self.TypeInt, "Vertices of a hole", default = 32)
TECHNOLOGY = get_technology_by_name('EBeam')
self.param("layer", self.TypeLayer, "Layer", default = TECHNOLOGY['Waveguide'])
self.param("pinrec", self.TypeLayer, "PinRec Layer", default = TECHNOLOGY['PinRec'])
self.param("devrec", self.TypeLayer, "DevRec Layer", default = TECHNOLOGY['DevRec'])
self.param("textl", self.TypeLayer, "Text Layer", default = TECHNOLOGY['Text'])
self.param("etch", self.TypeLayer, "oxide etch layer", default = pya.LayerInfo(12, 0))
#def display_text_impl(self):
# Provide a descriptive text for the cell
# return "PhC resolution test_a%s-r%.3f-n%.3f" % \
#(self.a, self.r, self.n)
def coerce_parameters_impl(self):
pass
def can_create_from_shape(self, layout, shape, layer):
return False
def produce_impl(self):
# fetch the parameters
dbu = self.layout.dbu
ly = self.layout
LayerSi = self.layer
LayerSiN = ly.layer(self.layer)
LayerPinRecN = ly.layer(self.pinrec)
LayerDevRecN = ly.layer(self.devrec)
LayerTextN = ly.layer(self.textl)
LayerEtch = ly.layer(self.etch)
TextLayerN = ly.layer(self.textl)
# Fetch all the parameters:
a = self.a/dbu
r = self.r/dbu
n_vertices = self.n_vertices
n = int(math.ceil(self.n/2))
#print(n)
n_sweep = self.n_sweep
n_x = n
n_y = n
# Define Si slab and hole region for future subtraction
Si_slab = pya.Region()
hole = pya.Region()
ruler = pya.Region()
#hole_r = [r+50,r}
'''
# translate to array (to pv)
pv = []
for p in pcell_decl.get_parameters():
if p.name in param:
pv.append(param[p.name])
else:
pv.append(p.default)
pcell_var = self.layout.add_pcell_variant(lib, pcell_decl.id(), pv)
t_text = pya.Trans(x_offset-2*a_k, -y_offset-a_k*0.5)
self.cell.insert(pya.CellInstArray(pcell_var, t_text))
for m in range(0,28):
ruler.insert(pya.Box(-x_width+x_offset_2+x_spacing*m, -y_height+y_offset, x_width+x_offset_2+x_spacing*m, y_height+y_offset))
if m > 23:
None
else:
ruler.insert(pya.Box(-y_height+x_offset_3, -x_width-y_offset_2+x_spacing*m, y_height+x_offset_3, x_width-y_offset_2+x_spacing*m))
for j in range(-n_y,n_y+1):
if j%2 == 0:
for i in range(-n_x,n_x+1):
if i!=0:
hole_x = abs(i)/i*(abs(i)-0.5)*a_k+x_offset
hole_y = j*a_k*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t = hole_poly.transformed(hole_trans)
hole.insert(hole_t)
#print(hole_t)
elif j%2 == 1:
for i in range(-n_x,n_x+1):
hole_x = i*a_k+x_offset
hole_y = j*a_k*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t = hole_poly.transformed(hole_trans)
hole.insert(hole_t)
phc = Si_slab - hole
phc = phc + ruler
self.cell.shapes(LayerSiN).insert(phc)
'''
class Hole_cell_half(pya.PCellDeclarationHelper):
"""
Input: length, width
"""
import numpy
def __init__(self):
# Important: initialize the super class
super(Hole_cell_half, self).__init__()
self.param("a", self.TypeDouble, "lattice constant (microns)", default = 0.744)
self.param("r", self.TypeDouble, "hole radius (microns)", default = 0.179)
TECHNOLOGY = get_technology_by_name('EBeam')
self.param("layer", self.TypeLayer, "Layer", default = TECHNOLOGY['Waveguide'])
self.param("pinrec", self.TypeLayer, "PinRec Layer", default = TECHNOLOGY['PinRec'])
self.param("devrec", self.TypeLayer, "DevRec Layer", default = TECHNOLOGY['DevRec'])
self.param("textl", self.TypeLayer, "Text Layer", default = TECHNOLOGY['Text'])
def display_text_impl(self):
# Provide a descriptive text for the cell
return "Cavity Hole Cell_a%s-r%.3f" % \
(self.a, self.r)
def coerce_parameters_impl(self):
pass
def can_create_from_shape(self, layout, shape, layer):
return False
def produce_impl(self):
# fetch the parameters
dbu = self.layout.dbu
ly = self.layout
LayerSi = self.layer
LayerSiN = ly.layer(self.layer)
LayerPinRecN = ly.layer(self.pinrec)
LayerDevRecN = ly.layer(self.devrec)
LayerTextN = ly.layer(self.textl)
# Fetch all the parameters:
a = self.a/dbu
r = self.r/dbu
# function to generate points to create a circle
def hexagon_hole_half(a,r):
npts = 10
theta_div = math.pi/3
theta_div_hole = math.pi/npts
triangle_length = a/math.sqrt(3)
pts = []
for i in range(0,4):
pts.append(Point.from_dpoint(pya.DPoint(triangle_length*math.cos(i*theta_div-math.pi/2), triangle_length*math.sin(i*theta_div-math.pi/2))))
for i in range(0, npts+1):
pts.append(Point.from_dpoint(pya.DPoint(r*math.cos(math.pi/2-i*theta_div_hole), r*math.sin(math.pi/2-i*theta_div_hole))))
return pts
hole_cell = pya.Region()
hole_cell_pts = hexagon_hole_half(a,r)
hole_cell_poly_half = pya.Polygon(hole_cell_pts)
#hole_cell.insert(hole_cell_poly_0)
self.cell.shapes(LayerSiN).insert(hole_cell_poly_half)
class Hexagon_cell_half(pya.PCellDeclarationHelper):
"""
Input: length, width
"""
import numpy
def __init__(self):
# Important: initialize the super class
super(Hexagon_cell_half, self).__init__()
self.param("a", self.TypeDouble, "lattice constant (microns)", default = 0.744)
self.param("r", self.TypeDouble, "hole radius (microns)", default = 0.179)
TECHNOLOGY = get_technology_by_name('EBeam')
self.param("layer", self.TypeLayer, "Layer", default = TECHNOLOGY['Waveguide'])
self.param("pinrec", self.TypeLayer, "PinRec Layer", default = TECHNOLOGY['PinRec'])
self.param("devrec", self.TypeLayer, "DevRec Layer", default = TECHNOLOGY['DevRec'])
self.param("textl", self.TypeLayer, "Text Layer", default = TECHNOLOGY['Text'])
def display_text_impl(self):
# Provide a descriptive text for the cell
return "Cavity Hole Cell_a%s-r%.3f" % \
(self.a, self.r)
def coerce_parameters_impl(self):
pass
def can_create_from_shape(self, layout, shape, layer):
return False
def produce_impl(self):
# fetch the parameters
dbu = self.layout.dbu
ly = self.layout
LayerSi = self.layer
LayerSiN = ly.layer(self.layer)
LayerPinRecN = ly.layer(self.pinrec)
LayerDevRecN = ly.layer(self.devrec)
LayerTextN = ly.layer(self.textl)
# Fetch all the parameters:
a = self.a/dbu
r = self.r/dbu
# function to generate points to create a circle
def hexagon_half(a):
theta_div = math.pi/3
triangle_length = a/math.sqrt(3)
pts = []
for i in range(0,4):
pts.append(Point.from_dpoint(pya.DPoint(triangle_length*math.cos(i*theta_div-math.pi/2), triangle_length*math.sin(i*theta_div-math.pi/2))))
return pts
hexagon_pts = hexagon_half(a)
hexagon_cell_poly_half = pya.Polygon(hexagon_pts)
#hole_cell.insert(hole_cell_poly_0)
self.cell.shapes(LayerSiN).insert(hexagon_cell_poly_half)
class wg_triangle_tapers(pya.PCellDeclarationHelper):
"""
The PCell declaration for the strip waveguide taper.
"""
def __init__(self):
# Important: initialize the super class
super(wg_triangle_tapers, self).__init__()
# declare the parameters
self.param("tri_base", self.TypeDouble, "Triangle Base (microns)", default = 0.363)
self.param("tri_height", self.TypeDouble, "Triangle Height (microns)", default = 0.426)
self.param("taper_wg_length", self.TypeDouble, "Waveguide Length (microns)", default = 5)
self.param("wg_width", self.TypeDouble, "Waveguide Width (microns)", default = 1)
TECHNOLOGY = get_technology_by_name('EBeam')
self.param("silayer", self.TypeLayer, "Layer", default = TECHNOLOGY['Waveguide'])
self.param("pinrec", self.TypeLayer, "PinRec Layer", default = TECHNOLOGY['PinRec'])
self.param("devrec", self.TypeLayer, "DevRec Layer", default = TECHNOLOGY['DevRec'])
self.param("textl", self.TypeLayer, "Text Layer", default = TECHNOLOGY['Text'])
def display_text_impl(self):
# Provide a descriptive text for the cell
return "waveguide_triangular_tapers_%.3f-%.3f" % (self.taper_wg_length, self.wg_width)
def can_create_from_shape_impl(self):
return False
def produce(self, layout, layers, parameters, cell):
"""
coerce parameters (make consistent)
"""
self._layers = layers
self.cell = cell
self._param_values = parameters
self.layout = layout
shapes = self.cell.shapes
# cell: layout cell to place the layout
# LayerSiN: which layer to use
# w: waveguide width
# length units in dbu
# fetch the parameters
dbu = self.layout.dbu
ly = self.layout
LayerSi = self.silayer
LayerSiN = ly.layer(self.silayer)
LayerPinRecN = ly.layer(self.pinrec)
LayerDevRecN = ly.layer(self.devrec)
LayerTextN = ly.layer(self.textl)
base = int(round(self.tri_base/dbu))
height = int(round(self.tri_height/dbu))
l = int(round(self.taper_wg_length/dbu))
w = int(round(self.wg_width/dbu))
pts = [Point(-l,w/2), Point(-base,w/2), Point(0,w/2+height), Point(0,-(w/2+height)), Point(-base,-w/2),Point(-l,-w/2) ]
shapes(LayerSiN).insert(Polygon(pts))
# Pins on the bus waveguide side:
pin_length = 200
if l < pin_length+1:
pin_length = int(l/3)
pin_length = math.ceil(pin_length / 2.) * 2
if pin_length == 0:
pin_length = 2
t = Trans(Trans.R0, -l,0)
pin = pya.Path([Point(-pin_length/2, 0), Point(pin_length/2, 0)], w)
pin_t = pin.transformed(t)
shapes(LayerPinRecN).insert(pin_t)
text = Text ("pin1", t)
shape = shapes(LayerPinRecN).insert(text)
shape.text_size = 0.4/dbu
t = Trans(Trans.R0, 0,0)
pin_t = pin.transformed(t)
shapes(LayerPinRecN).insert(pin_t)
text = Text ("pin2", t)
shape = shapes(LayerPinRecN).insert(text)
shape.text_size = 0.4/dbu
# Create the device recognition layer -- make it 1 * wg_width away from the waveguides.
#box1 = Box(w/2+height, -(w/2+height), -l, -1)
#shapes(LayerDevRecN).insert(box1)
return "wg_triangle_taper"
def layout_waveguide_abs(cell, layer, points, w, radius):
# create a path, then convert to a polygon waveguide with bends
# cell: cell into which to place the waveguide
# layer: layer to draw on
# points: array of vertices, absolute coordinates on the current cell
# w: waveguide width
# example usage:
# cell = pya.Application.instance().main_window().current_view().active_cellview().cell
# LayerSi = LayerInfo(1, 0)
# points = [ [15, 2.75], [30, 2.75] ] # units of microns.
# layout_waveguide_abs(cell, LayerSi, points, 0.5, 10)
if MODULE_NUMPY:
# numpy version
points=n.array(points)
start_point=points[0]
points = points - start_point
else:
# without numpy:
start_point=[]
start_point.append(points[0][0])
start_point.append(points[0][1])
for i in range(0,2):
for j in range(0,len(points)):
points[j][i] -= start_point[i]
layout_waveguide_rel(cell, layer, start_point, points, w, radius)
def layout_waveguide_rel(cell, layer, start_point, points, w, radius):
# create a path, then convert to a polygon waveguide with bends
# cell: cell into which to place the waveguide
# layer: layer to draw on
# start_point: starting vertex for the waveguide
# points: array of vertices, relative to start_point
# w: waveguide width
# example usage:
# cell = pya.Application.instance().main_window().current_view().active_cellview().cell
# LayerSi = LayerInfo(1, 0)
# points = [ [15, 2.75], [30, 2.75] ] # units of microns.
# layout_waveguide_rel(cell, LayerSi, [0,0], points, 0.5, 10)
#print("* layout_waveguide_rel(%s, %s, %s, %s)" % (cell.name, layer, w, radius) )
ly = cell.layout()
dbu = cell.layout().dbu
start_point=[start_point[0]/dbu, start_point[1]/dbu]
a1 = []
for p in points:
a1.append (pya.DPoint(float(p[0]), float(p[1])))
wg_path = pya.DPath(a1, w)
npoints = points_per_circle(radius/dbu)
param = { "npoints": npoints, "radius": float(radius), "path": wg_path, "layer": layer }
pcell = ly.create_cell("ROUND_PATH", "Basic", param )
# Configure the cell location
trans = Trans(Point(start_point[0], start_point[1]))
# Place the PCell
cell.insert(pya.CellInstArray(pcell.cell_index(), trans))
class H0c_Test_Structure(pya.PCellDeclarationHelper):
"""
The PCell declaration for the test structure with grating couplers and waveguides and a photonic crystal cavity
"""
def __init__(self):
# Important: initialize the super class
super(H0c_Test_Structure, self).__init__()
#taper/wg parameters
self.param("tri_base", self.TypeDouble, "Taper Triangle Base (microns)", default = 0.363)
self.param("tri_height", self.TypeDouble, "Taper Triangle Height (microns)", default = 0.426)
self.param("taper_wg_length", self.TypeDouble, "Taper Length (microns)", default = 5)
self.param("wg_bend_radius", self.TypeDouble, "Waveguide Bend Radius (microns)", default = 15)
#photonic crystal cavity
self.param("a", self.TypeDouble, "lattice constant (microns)", default = 0.744)
self.param("n", self.TypeInt, "Number of holes in x and y direction", default = 30)
self.param("r", self.TypeDouble, "hole radius (microns)", default = 0.179)
self.param("wg_dis", self.TypeInt, "Waveguide distance (number of holes)", default = 3)
self.param("S1x", self.TypeDouble, "S1x shift", default = 0.28)
self.param("S2x", self.TypeDouble, "S2x shift", default = 0.193)
self.param("S3x", self.TypeDouble, "S3x shift", default = 0.194)
self.param("S4x", self.TypeDouble, "S4x shift", default = 0.162)
self.param("S5x", self.TypeDouble, "S5x shift", default = 0.113)
self.param("S1y", self.TypeDouble, "S1y shift", default = -0.016)
self.param("S2y", self.TypeDouble, "S2y shift", default = 0.134)
self.param("phc_xdis", self.TypeDouble, "Distance from GC to middle of Cavity", default = 35)
#GC parameters
self.param("wavelength", self.TypeDouble, "Design Wavelength (micron)", default = 2.9)
self.param("n_t", self.TypeDouble, "Fiber Mode", default = 1.0)
self.param("n_e", self.TypeDouble, "Grating Index Parameter", default = 3.1)
self.param("angle_e", self.TypeDouble, "Taper Angle (deg)", default = 20.0)
self.param("grating_length", self.TypeDouble, "Grating Length (micron)", default = 32.0)
self.param("taper_length", self.TypeDouble, "Taper Length (micron)", default = 32.0)
self.param("dc", self.TypeDouble, "Duty Cycle", default = 0.488193)
self.param("period", self.TypeDouble, "Grating Period", default = 1.18939)
self.param("ff", self.TypeDouble, "Fill Factor", default = 0.244319)
self.param("t", self.TypeDouble, "Waveguide Width (micron)", default = 1.0)
self.param("theta_c", self.TypeDouble, "Insertion Angle (deg)", default = 8.0)
#Layer Parameters
TECHNOLOGY = get_technology_by_name('EBeam')
self.param("layer", self.TypeLayer, "Layer", default = TECHNOLOGY['Waveguide'])
self.param("pinrec", self.TypeLayer, "PinRec Layer", default = TECHNOLOGY['PinRec'])
self.param("devrec", self.TypeLayer, "DevRec Layer", default = TECHNOLOGY['DevRec'])
self.param("textl", self.TypeLayer, "Text Layer", default = TECHNOLOGY['Text'])
def can_create_from_shape_impl(self):
return False
def produce_impl(self):
# This is the main part of the implementation: create the layout
# fetch the parameters
dbu = self.layout.dbu
ly = self.layout
cell = self.cell
shapes = self.cell.shapes
LayerSi = self.layer
LayerSiN = ly.layer(self.layer)
LayerPinRecN = ly.layer(self.pinrec)
LayerDevRecN = ly.layer(self.devrec)
LayerTextN = ly.layer(self.textl)
a = self.a
n = self.n
wg_dis = self.wg_dis
phc_xdis = self.phc_xdis
wg_bend_radius = self.wg_bend_radius
wg_width = self.t
if (wg_dis)%2 == 0:
length_slab_x = n*a
else:
length_slab_x = (n-1)*a
half_slab_x = length_slab_x/2
param_phc = {"a": self.a, "n": self.n, "r": self.r, "wg_dis": self.wg_dis,
"S1x": self.S1x, "S2x": self.S2x,"S3x": self.S3x,"S4x": self.S4x,"S5x": self.S5x,
"S1y": self.S1y,"S2y": self.S2y,
"layer": LayerSi, "pinrec": self.pinrec, "devrec": self.devrec}
pcell_phc = ly.create_cell("H0 cavity with waveguide", "SiQL_PCells", param_phc )
t_phc = Trans(Trans.R0,phc_xdis/dbu,(127)/dbu-(math.sqrt(3)/2*a*(wg_dis+1))/dbu)
instance = cell.insert(pya.CellInstArray(pcell_phc.cell_index(), t_phc))
param_GC = {"wavelength": self.wavelength, "n_t":self.n_t, "n_e":self.n_e, "angle_e":self.angle_e,
"grating_length":self.grating_length, "taper_length":self.taper_length, "dc":self.dc, "period":self.period,
"ff":self.ff, "t":self.t, "theta_c":self.theta_c,
"layer": LayerSi, "pinrec": self.pinrec, "devrec": self.devrec}
pcell_GC = ly.create_cell("SWG Fibre Coupler", "SiQL_PCells", param_GC )
t_GC = Trans(Trans.R0, 0,0)
instance = cell.insert(pya.CellInstArray(pcell_GC.cell_index(), t_GC, Point(0,127/dbu), Point(0,0), 3, 1))
param_taper = {"tri_base": self.tri_base, "tri_height":self.tri_height,
"taper_wg_length":self.taper_wg_length, "silayer":LayerSi,
"pinrec": self.pinrec, "devrec": self.devrec}
pcell_taper = ly.create_cell("Waveguide Triangle Tapers","SiQL_PCells",param_taper)
t_taper1 = Trans(Trans.R0,(phc_xdis-half_slab_x)/dbu,(127)/dbu)
instance = cell.insert(pya.CellInstArray(pcell_taper.cell_index(), t_taper1))
pcell_taper2 = ly.create_cell("Waveguide Triangle Tapers","SiQL_PCells",param_taper)
t_taper2 = Trans(Trans.R180, (phc_xdis+half_slab_x)/dbu,(127)/dbu)
instance = cell.insert(pya.CellInstArray(pcell_taper2.cell_index(), t_taper2))
pcell_taper3 = ly.create_cell("Waveguide Triangle Tapers","SiQL_PCells",param_taper)
t_taper3 = Trans(Trans.R180, (phc_xdis+half_slab_x)/dbu,(127-2*(wg_dis+1)*math.sqrt(3)/2*a)/dbu)
instance = cell.insert(pya.CellInstArray(pcell_taper3.cell_index(), t_taper3))
# gc middle to in port
points = [ [0, 127], [ phc_xdis-half_slab_x-self.taper_wg_length , 127] ]
layout_waveguide_abs(cell, LayerSi, points, wg_width, wg_bend_radius)
# gc top to through port
points2 = [ [0, 254], [ (phc_xdis+half_slab_x+self.taper_wg_length)+wg_bend_radius , 254], [ (phc_xdis+half_slab_x+self.taper_wg_length)+wg_bend_radius , 127], [ (phc_xdis+half_slab_x+self.taper_wg_length) , 127] ]
layout_waveguide_abs(cell, LayerSi, points2, wg_width, wg_bend_radius)
# gc bottom to coupled port
points3 = [ [0, 0], [ (phc_xdis+half_slab_x+self.taper_wg_length)+wg_bend_radius , 0], [ (phc_xdis+half_slab_x+self.taper_wg_length)+wg_bend_radius , 127-2*(wg_dis+1)*a*math.sqrt(3)/2], [ (phc_xdis+half_slab_x+self.taper_wg_length) , 127-2*(wg_dis+1)*a*math.sqrt(3)/2] ]
layout_waveguide_abs(cell, LayerSi, points3, wg_width, wg_bend_radius)
class H0c_oxide_Test_Structure(pya.PCellDeclarationHelper):
"""
The PCell declaration for the test structure with grating couplers and waveguides and a photonic crystal cavity
"""
def __init__(self):
# Important: initialize the super class
super(H0c_oxide_Test_Structure, self).__init__()
#taper/wg parameters
self.param("tri_base", self.TypeDouble, "Taper Triangle Base (microns)", default = 0.363)
self.param("tri_height", self.TypeDouble, "Taper Triangle Height (microns)", default = 0.426)
self.param("taper_wg_length", self.TypeDouble, "Taper Length (microns)", default = 5)
self.param("wg_bend_radius", self.TypeDouble, "Waveguide Bend Radius (microns)", default = 15)
#photonic crystal cavity
self.param("a", self.TypeDouble, "lattice constant (microns)", default = 0.690)
self.param("n", self.TypeInt, "Number of holes in x and y direction", default = 30)
self.param("r", self.TypeDouble, "hole radius (microns)", default = 0.125)
self.param("wg_dis", self.TypeInt, "Waveguide distance (number of holes)", default = 3)
self.param("n_bus", self.TypeInt, "Bus number, 1 or 2 ", default = 2)
self.param("n_vertices", self.TypeInt, "Vertices of a hole", default = 20)
self.param("S1x", self.TypeDouble, "S1x shift", default = 0.28)
self.param("S2x", self.TypeDouble, "S2x shift", default = 0.193)
self.param("S3x", self.TypeDouble, "S3x shift", default = 0.194)
self.param("S4x", self.TypeDouble, "S4x shift", default = 0.162)
self.param("S5x", self.TypeDouble, "S5x shift", default = 0.113)
self.param("S1y", self.TypeDouble, "S1y shift", default = -0.016)
self.param("S2y", self.TypeDouble, "S2y shift", default = 0.134)
self.param("phc_xdis", self.TypeDouble, "Distance from GC to middle of Cavity", default = 35)
#GC parameters
self.param("wavelength", self.TypeDouble, "Design Wavelength (micron)", default = 2.9)
self.param("n_t", self.TypeDouble, "Fiber Mode", default = 1.0)
self.param("n_e", self.TypeDouble, "Grating Index Parameter", default = 3.1)
self.param("angle_e", self.TypeDouble, "Taper Angle (deg)", default = 20.0)
self.param("grating_length", self.TypeDouble, "Grating Length (micron)", default = 32.0)
self.param("taper_length", self.TypeDouble, "Taper Length (micron)", default = 32.0)
self.param("dc", self.TypeDouble, "Duty Cycle", default = 0.488193)
self.param("period", self.TypeDouble, "Grating Period", default = 1.18939)
self.param("ff", self.TypeDouble, "Fill Factor", default = 0.244319)
self.param("t", self.TypeDouble, "Waveguide Width (micron)", default = 1.0)
self.param("theta_c", self.TypeDouble, "Insertion Angle (deg)", default = 8.0)
#Layer Parameters
TECHNOLOGY = get_technology_by_name('EBeam')
self.param("layer", self.TypeLayer, "Layer", default = TECHNOLOGY['Waveguide'])
self.param("pinrec", self.TypeLayer, "PinRec Layer", default = TECHNOLOGY['PinRec'])
self.param("devrec", self.TypeLayer, "DevRec Layer", default = TECHNOLOGY['DevRec'])
self.param("textl", self.TypeLayer, "Text Layer", default = TECHNOLOGY['Text'])
def can_create_from_shape_impl(self):
return False
def produce_impl(self):
# This is the main part of the implementation: create the layout
# fetch the parameters
dbu = self.layout.dbu
ly = self.layout
cell = self.cell
shapes = self.cell.shapes
LayerSi = self.layer
LayerSiN = ly.layer(LayerSi)
a = self.a
n = self.n
wg_dis = self.wg_dis
phc_xdis = self.phc_xdis
wg_bend_radius = self.wg_bend_radius
wg_width = self.t
if (wg_dis)%2 == 0:
length_slab_x = n*a
else:
length_slab_x = (n-1)*a
half_slab_x = length_slab_x/2
param_phc = {"a": self.a, "n": self.n, "r": self.r, "wg_dis": self.wg_dis,
"layer": LayerSi, "pinrec": self.pinrec, "devrec": self.devrec}
pcell_phc = ly.create_cell("H0 cavity with waveguide, no etching", "SiQL_PCells", param_phc )
t_phc = Trans(Trans.R0,phc_xdis/dbu,(127)/dbu-(math.sqrt(3)/2*a*(wg_dis+1))/dbu)
instance = cell.insert(pya.CellInstArray(pcell_phc.cell_index(), t_phc))
param_GC = {"wavelength": self.wavelength, "n_t":self.n_t, "n_e":self.n_e, "angle_e":self.angle_e,
"grating_length":self.grating_length, "taper_length":self.taper_length, "dc":self.dc, "period":self.period,
"ff":self.ff, "t":self.t, "theta_c":self.theta_c,
"layer": LayerSi, "pinrec": self.pinrec, "devrec": self.devrec}
pcell_GC = ly.create_cell("SWG Fibre Coupler", "SiQL_PCells", param_GC )
t_GC = Trans(Trans.R0, 0,0)
instance = cell.insert(pya.CellInstArray(pcell_GC.cell_index(), t_GC, Point(0,127/dbu), Point(0,0), 3, 1))
param_taper = {"tri_base": self.tri_base, "tri_height":self.tri_height,
"taper_wg_length":self.taper_wg_length, "silayer":LayerSi,
"pinrec": self.pinrec, "devrec": self.devrec}
pcell_taper = ly.create_cell("Waveguide Triangle Tapers","SiQL_PCells",param_taper)
t_taper1 = Trans(Trans.R0,(phc_xdis-half_slab_x)/dbu,(127)/dbu)
instance = cell.insert(pya.CellInstArray(pcell_taper.cell_index(), t_taper1))
pcell_taper2 = ly.create_cell("Waveguide Triangle Tapers","SiQL_PCells",param_taper)
t_taper2 = Trans(Trans.R180, (phc_xdis+half_slab_x)/dbu,(127)/dbu)
instance = cell.insert(pya.CellInstArray(pcell_taper2.cell_index(), t_taper2))
pcell_taper3 = ly.create_cell("Waveguide Triangle Tapers","SiQL_PCells",param_taper)
t_taper3 = Trans(Trans.R180, (phc_xdis+half_slab_x)/dbu,(127-2*(wg_dis+1)*math.sqrt(3)/2*a)/dbu)
instance = cell.insert(pya.CellInstArray(pcell_taper3.cell_index(), t_taper3))
# gc middle to in port
points = [ [0, 127], [ phc_xdis-half_slab_x-self.taper_wg_length , 127] ]
layout_waveguide_abs(cell, LayerSi, points, wg_width, wg_bend_radius)
# gc top to through port
points2 = [ [0, 254], [ (phc_xdis+half_slab_x+self.taper_wg_length)+wg_bend_radius , 254], [ (phc_xdis+half_slab_x+self.taper_wg_length)+wg_bend_radius , 127], [ (phc_xdis+half_slab_x+self.taper_wg_length) , 127] ]
layout_waveguide_abs(cell, LayerSi, points2, wg_width, wg_bend_radius)
# gc bottom to coupled port
points3 = [ [0, 0], [ (phc_xdis+half_slab_x+self.taper_wg_length)+wg_bend_radius , 0], [ (phc_xdis+half_slab_x+self.taper_wg_length)+wg_bend_radius , 127-2*(wg_dis+1)*a*math.sqrt(3)/2], [ (phc_xdis+half_slab_x+self.taper_wg_length) , 127-2*(wg_dis+1)*a*math.sqrt(3)/2] ]
layout_waveguide_abs(cell, LayerSi, points3, wg_width, wg_bend_radius)
class L3c_Test_Structure(pya.PCellDeclarationHelper):
"""
The PCell declaration for the test structure with grating couplers and waveguides and a photonic crystal cavity
"""
def __init__(self):
# Important: initialize the super class
super(L3c_Test_Structure, self).__init__()
#taper parameters
self.param("tri_base", self.TypeDouble, "Taper Triangle Base (microns)", default = 0.363)
self.param("tri_height", self.TypeDouble, "Taper Triangle Height (microns)", default = 0.426)
self.param("taper_wg_length", self.TypeDouble, "Taper Length (microns)", default = 5)
self.param("w", self.TypeDouble, "Waveguide Width", default = 1.0)
self.param("wg_bend_radius", self.TypeDouble, "Waveguide Bend Radius (microns)", default = 15)
#photonic crystal cavity
self.param("a", self.TypeDouble, "lattice constant (microns)", default = 0.720)
self.param("n", self.TypeInt, "Number of holes in x and y direction", default = 34)
self.param("r", self.TypeDouble, "hole radius (microns)", default = 0.181)
self.param("wg_dis", self.TypeInt, "Waveguide distance (number of holes)", default = 3)
self.param("S1x", self.TypeDouble, "S1x shift", default = 0.337)
self.param("S2x", self.TypeDouble, "S2x shift", default = 0.27)
self.param("S3x", self.TypeDouble, "S3x shift", default = 0.088)
self.param("S4x", self.TypeDouble, "S4x shift", default = 0.323)
self.param("S5x", self.TypeDouble, "S5x shift", default = 0.0173)
self.param("phc_xdis", self.TypeDouble, "Distance from GC to middle of Cavity", default = 35)
#GC parameters
self.param("wavelength", self.TypeDouble, "Design Wavelength (micron)", default = 2.9)
self.param("n_t", self.TypeDouble, "Fiber Mode", default = 1.0)
self.param("n_e", self.TypeDouble, "Grating Index Parameter", default = 3.1)
self.param("angle_e", self.TypeDouble, "Taper Angle (deg)", default = 20.0)
self.param("grating_length", self.TypeDouble, "Grating Length (micron)", default = 32.0)
self.param("taper_length", self.TypeDouble, "Taper Length (micron)", default = 32.0)
self.param("dc", self.TypeDouble, "Duty Cycle", default = 0.488193)
self.param("period", self.TypeDouble, "Grating Period", default = 1.18939)
self.param("ff", self.TypeDouble, "Fill Factor", default = 0.244319)
self.param("t", self.TypeDouble, "Waveguide Width (micron)", default = 1.0)
self.param("theta_c", self.TypeDouble, "Insertion Angle (deg)", default = 8.0)
#Layer Parameters
TECHNOLOGY = get_technology_by_name('EBeam')
self.param("layer", self.TypeLayer, "Layer", default = TECHNOLOGY['Waveguide'])
self.param("pinrec", self.TypeLayer, "PinRec Layer", default = TECHNOLOGY['PinRec'])
self.param("devrec", self.TypeLayer, "DevRec Layer", default = TECHNOLOGY['DevRec'])
self.param("textl", self.TypeLayer, "Text Layer", default = TECHNOLOGY['Text'])
def can_create_from_shape_impl(self):
return False
def produce_impl(self):
# This is the main part of the implementation: create the layout
# fetch the parameters
dbu = self.layout.dbu
ly = self.layout
cell = self.cell
shapes = self.cell.shapes
LayerSi = self.layer
LayerSiN = ly.layer(self.layer)
LayerPinRecN = ly.layer(self.pinrec)
LayerDevRecN = ly.layer(self.devrec)
LayerTextN = ly.layer(self.textl)
a = self.a
n = self.n
wg_dis = self.wg_dis
phc_xdis = self.phc_xdis
wg_bend_radius = self.wg_bend_radius
wg_width = self.w
if wg_dis%2 == 0:
length_slab_x = (n-1)*a
else:
length_slab_x = n*a
half_slab_x = length_slab_x/2
param_phc = {"a": self.a, "n": self.n, "r": self.r, "wg_dis": self.wg_dis, "S1x":self.S1x, "S2x":self.S2x, "S3x":self.S3x, "S4x":self.S4x, "S5x":self.S5x,
"layer": self.layer, "pinrec": self.pinrec, "devrec": self.devrec}
pcell_phc = ly.create_cell("L3 cavity with waveguide", "SiQL_PCells", param_phc )
t1 = Trans(Trans.R0,phc_xdis/dbu,(127)/dbu-(math.sqrt(3)/2*a*(wg_dis+1))/dbu)
instance = cell.insert(pya.CellInstArray(pcell_phc.cell_index(), t1))
param_GC = {"wavelength": self.wavelength, "n_t":self.n_t, "n_e":self.n_e, "angle_e":self.angle_e,
"grating_length":self.grating_length, "taper_length":self.taper_length, "dc":self.dc, "period":self.period,
"ff":self.ff, "t":self.t, "theta_c":self.theta_c,
"layer": LayerSi, "pinrec": self.pinrec, "devrec": self.devrec}
pcell_GC = ly.create_cell("SWG Fibre Coupler", "SiQL_PCells", param_GC )
t_GC = Trans(Trans.R0, 0,0)
instance = cell.insert(pya.CellInstArray(pcell_GC.cell_index(), t_GC, Point(0,127/dbu), Point(0,0), 3, 1))
param_taper = {"tri_base": self.tri_base, "tri_height":self.tri_height,
"taper_wg_length":self.taper_wg_length, "wg_width": self.w, "silayer":LayerSi,
"pinrec": self.pinrec, "devrec": self.devrec}
pcell_taper = ly.create_cell("Waveguide Triangle Tapers","SiQL_PCells",param_taper)
t_taper1 = Trans(Trans.R0,(phc_xdis-half_slab_x)/dbu,(127)/dbu)
instance = cell.insert(pya.CellInstArray(pcell_taper.cell_index(), t_taper1))
pcell_taper2 = ly.create_cell("Waveguide Triangle Tapers","SiQL_PCells",param_taper)
t_taper2 = Trans(Trans.R180, (phc_xdis+half_slab_x)/dbu,(127)/dbu)
instance = cell.insert(pya.CellInstArray(pcell_taper2.cell_index(), t_taper2))
pcell_taper3 = ly.create_cell("Waveguide Triangle Tapers","SiQL_PCells",param_taper)
t_taper3 = Trans(Trans.R180, (phc_xdis+half_slab_x)/dbu,(127-2*(wg_dis+1)*math.sqrt(3)/2*a)/dbu)
instance = cell.insert(pya.CellInstArray(pcell_taper3.cell_index(), t_taper3))
# gc middle to in port
points = [ [0, 127], [ phc_xdis-half_slab_x-self.taper_wg_length , 127] ]
layout_waveguide_abs(cell, LayerSi, points, wg_width, wg_bend_radius)
# gc top to through port
points2 = [ [0, 254], [ (phc_xdis+half_slab_x+self.taper_wg_length)+wg_bend_radius , 254], [ (phc_xdis+half_slab_x+self.taper_wg_length)+wg_bend_radius , 127], [ (phc_xdis+half_slab_x+self.taper_wg_length) , 127] ]
layout_waveguide_abs(cell, LayerSi, points2, wg_width, wg_bend_radius)
# gc bottom to coupled port
points3 = [ [0, 0], [ (phc_xdis+half_slab_x+self.taper_wg_length)+wg_bend_radius , 0], [ (phc_xdis+half_slab_x+self.taper_wg_length)+wg_bend_radius , 127-2*(wg_dis+1)*a*math.sqrt(3)/2], [ (phc_xdis+half_slab_x+self.taper_wg_length) , 127-2*(wg_dis+1)*a*math.sqrt(3)/2] ]
layout_waveguide_abs(cell, LayerSi, points3, wg_width, wg_bend_radius)
class GC_to_GC_ref1(pya.PCellDeclarationHelper):
"""
The PCell declaration for the test structure with grating couplers and waveguides and a photonic crystal cavity
"""
def __init__(self):
# Important: initialize the super class
super(GC_to_GC_ref1, self).__init__()
#other waveguide parameters
self.param("wg_radius", self.TypeDouble, "Waveguide Radius (microns)", default = 15)
self.param("wg_width", self.TypeDouble, "Waveguide x Distance (microns)", default = 1)
self.param("wg_xdis", self.TypeDouble, "Waveguide x Distance (microns)", default = 5)
#Layer Parameters
TECHNOLOGY = get_technology_by_name('EBeam')
self.param("layer", self.TypeLayer, "Layer", default = TECHNOLOGY['Waveguide'])
self.param("pinrec", self.TypeLayer, "PinRec Layer", default = TECHNOLOGY['PinRec'])
self.param("devrec", self.TypeLayer, "DevRec Layer", default = TECHNOLOGY['DevRec'])
self.param("textl", self.TypeLayer, "Text Layer", default = TECHNOLOGY['Text'])
def can_create_from_shape_impl(self):
return False
def produce_impl(self):
# This is the main part of the implementation: create the layout
# fetch the parameters
dbu = self.layout.dbu
ly = self.layout
cell = self.cell
shapes = self.cell.shapes
LayerSi = self.layer
LayerSiN = ly.layer(self.layer)
LayerPinRecN = ly.layer(self.pinrec)
LayerDevRecN = ly.layer(self.devrec)
LayerTextN = ly.layer(self.textl)
wg_r = self.wg_radius
wg_w = self.wg_width
wg_xdis = self.wg_xdis
#uses the default parameters for the GC
param_GC = { "layer": LayerSi, "pinrec": self.pinrec, "devrec": self.devrec}
pcell_GC = ly.create_cell("SWG Fibre Coupler", "SiQL_PCells", param_GC )
t_GC = Trans(Trans.R0, 0,0)
#instance = cell.insert(pya.place_cell(pcell_GC, t_GC, Point(0,127/dbu), Point(0,0), 2, 1))
#instance=place_cell(cell,pcell_GC,[0.5,0.5])
cell.insert(pya.CellInstArray(pcell_GC.cell_index(),pya.Trans(pya.Trans.R0, 0, 0)))
print("test")
return
points = [ [0, 0], [wg_r+wg_xdis, 0],[wg_r+wg_xdis, 127], [ 0,127]]
#layout_waveguide_abs(cell, LayerSi, points, wg_w, wg_r)
class PhC_W1wg(pya.PCellDeclarationHelper):
"""
Input: length, width
"""
import numpy
def __init__(self):
# Important: initialize the super class
super(PhC_W1wg, self).__init__()
self.param("a", self.TypeDouble, "lattice constant (microns)", default = 0.744)
self.param("n", self.TypeInt, "Number of holes in x and y direction", default = 30)
self.param("r", self.TypeDouble, "hole radius (microns)", default = 0.179)
self.param("wg_dis", self.TypeInt, "Waveguide distance (number of holes)", default = 2)
self.param("n_vertices", self.TypeInt, "Vertices of a hole", default = 32)
self.param("etch_condition", self.TypeInt, "Etch = 1, No Etch = 2", default = 1)
TECHNOLOGY = get_technology_by_name('EBeam')
self.param("layer", self.TypeLayer, "Layer", default = TECHNOLOGY['Waveguide'])
self.param("pinrec", self.TypeLayer, "PinRec Layer", default = TECHNOLOGY['PinRec'])
self.param("devrec", self.TypeLayer, "DevRec Layer", default = TECHNOLOGY['DevRec'])
self.param("textl", self.TypeLayer, "Text Layer", default = TECHNOLOGY['Text'])
self.param("etch", self.TypeLayer, "oxide etch layer", default = pya.LayerInfo(12, 0))
def coerce_parameters_impl(self):
pass
def can_create_from_shape(self, layout, shape, layer):
return False
def produce_impl(self):
# fetch the parameters
dbu = self.layout.dbu
ly = self.layout
LayerSi = self.layer
LayerSiN = ly.layer(self.layer)
LayerPinRecN = ly.layer(self.pinrec)
LayerDevRecN = ly.layer(self.devrec)
LayerTextN = ly.layer(self.textl)
LayerEtch = ly.layer(self.etch)
# Fetch all the parameters:
a = self.a/dbu
r = self.r/dbu
wg_dis = self.wg_dis+1
n_vertices = self.n_vertices
n = int(math.ceil(self.n/2))
n_bus = 1
etch_condition = self.etch_condition
if n_bus == 1:
Sx = [0,0,0,0,0]
Sy = [0,0,0]
if wg_dis%2 == 0:
length_slab_x = (2*n-1)*a
else:
length_slab_x = 2*n*a
length_slab_y = 2*(wg_dis+15)*a*math.sqrt(3)/2
n_x = n
n_y = wg_dis+10
# Define Si slab and hole region for future subtraction
Si_slab = pya.Region()
Si_slab.insert(pya.Box(-length_slab_x/2, -length_slab_y/2, length_slab_x/2, length_slab_y/2))
hole = pya.Region()
hole_r = r
# add suspension beams
beam_width = 3/dbu
beam_length = 20/dbu
beam_x_0 = 8*a
beam_y_0 = beam_length/2+length_slab_y/2-5000
for i in (-1,1):
for j in (-1,1):
beam_x = i*beam_x_0
beam_y = j*beam_y_0
Si_slab.insert(pya.Box(-beam_width/2+beam_x, -length_slab_y/2+beam_y, beam_width/2+beam_x, length_slab_y/2+beam_y))
# function to generate points to create a circle
def circle(x,y,r):
npts = n_vertices
theta = 2 * math.pi / npts # increment, in radians
pts = []
for i in range(0, npts):
pts.append(Point.from_dpoint(pya.DPoint((x+r*math.cos(i*theta))/1, (y+r*math.sin(i*theta))/1)))
return pts
# raster through all holes with shifts and waveguide
hole_cell = circle(0,0,hole_r)
hole_poly = pya.Polygon(hole_cell)
for j in range(-n_y,n_y+1):
if j%2 == 0 and j != wg_dis:
for i in range(-n_x,n_x+1):
if j == -wg_dis and i > 3 and n_bus == 2:
None
elif j == 0 and i in (1,-1,2,-2,3,-3,4,-4,5,-5):
hole_x = abs(i)/i*(abs(i)-0.5+Sx[abs(i)-1])*a
hole_y = 0
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t = hole_poly.transformed(hole_trans)
hole.insert(hole_t)
elif i!=0:
hole_x = abs(i)/i*(abs(i)-0.5)*a
hole_y = j*a*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t = hole_poly.transformed(hole_trans)
hole.insert(hole_t)
elif j%2 == 1 and j != wg_dis:
for i in range(-n_x,n_x+1):
if j == -wg_dis and i > 3 and n_bus == 2:
None
elif i == 0 and j in (1,-1,3,-3):
hole_x = 0
hole_y = j*a*(math.sqrt(3)/2)+abs(j)/j*a*Sy[abs(j)-1]
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t = hole_poly.transformed(hole_trans)
hole.insert(hole_t)
else:
hole_x = i*a
hole_y = j*a*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t = hole_poly.transformed(hole_trans)
hole.insert(hole_t)
phc = Si_slab - hole
self.cell.shapes(LayerSiN).insert(phc)
if etch_condition == 1:
box_etch = pya.Box(-(length_slab_x/2-3000), -(length_slab_y/2-6000), length_slab_x/2-3000, length_slab_y/2-6000)
self.cell.shapes(LayerEtch).insert(box_etch)
# Pins on the waveguide:
pin_length = 200
pin_w = a
wg_pos = a*math.sqrt(3)/2*wg_dis
t = pya.Trans(Trans.R0, -length_slab_x/2,wg_pos)
pin = pya.Path([pya.Point(-pin_length/2, 0), pya.Point(pin_length/2, 0)], pin_w)
pin_t = pin.transformed(t)
self.cell.shapes(LayerPinRecN).insert(pin_t)
text = pya.Text ("pin1", t)
shape = self.cell.shapes(LayerPinRecN).insert(text)
shape.text_size = 0.4/dbu
t = pya.Trans(Trans.R0, length_slab_x/2,wg_pos)
pin_t = pin.transformed(t)
self.cell.shapes(LayerPinRecN).insert(pin_t)
text = pya.Text ("pin2", t)
shape = self.cell.shapes(LayerPinRecN).insert(text)
shape.text_size = 0.4/dbu
#pin for drop waveguide
if n_bus == 2:
t = pya.Trans(Trans.R0, length_slab_x/2,-wg_pos)
pin_t = pin.transformed(t)
self.cell.shapes(LayerPinRecN).insert(pin_t)
text = pya.Text ("pin3", t)
shape = self.cell.shapes(LayerPinRecN).insert(text)
shape.text_size = 0.4/dbu
# Create the device recognition layer -- make it 1 * wg_width away from the waveguides.
points = [[-length_slab_x/2,0], [length_slab_x/2, 0]]
points = [Point(each[0], each[1]) for each in points]
path = Path(points, length_slab_y)
self.cell.shapes(LayerDevRecN).insert(path.simple_polygon())
class PhC_W1wg_reference(pya.PCellDeclarationHelper):
"""
Input: length, width
"""
import numpy
def __init__(self):
# Important: initialize the super class
super(PhC_W1wg_reference, self).__init__()
#phc parameters
self.param("a", self.TypeDouble, "lattice constant (microns)", default = 0.744)
self.param("n", self.TypeInt, "Number of holes in x and y direction", default = 30)
self.param("r", self.TypeDouble, "hole radius (microns)", default = 0.179)
self.param("wg_dis", self.TypeInt, "Waveguide distance (number of holes)", default = 2)
self.param("n_vertices", self.TypeInt, "Vertices of a hole", default = 32)
self.param("etch_condition", self.TypeInt, "Etch = 1, No Etch = 2", default = 1)
self.param("phc_xdis", self.TypeDouble, "Distance to middle of phc", default = 35)
#other waveguide parameters
self.param("wg_radius", self.TypeDouble, "Waveguide Radius (microns)", default = 15)
self.param("wg_width", self.TypeDouble, "Waveguide Radius (microns)", default = 1)
#taper parameters
self.param("tri_base", self.TypeDouble, "Taper Triangle Base (microns)", default = 0.363)
self.param("tri_height", self.TypeDouble, "Taper Triangle Height (microns)", default = 0.426)
self.param("taper_wg_length", self.TypeDouble, "Taper Length (microns)", default = 5)
#Layer Parameters
TECHNOLOGY = get_technology_by_name('EBeam')
self.param("layer", self.TypeLayer, "Layer", default = TECHNOLOGY['Waveguide'])
self.param("pinrec", self.TypeLayer, "PinRec Layer", default = TECHNOLOGY['PinRec'])
self.param("devrec", self.TypeLayer, "DevRec Layer", default = TECHNOLOGY['DevRec'])
self.param("textl", self.TypeLayer, "Text Layer", default = TECHNOLOGY['Text'])
self.param("etch", self.TypeLayer, "oxide etch layer", default = pya.LayerInfo(12, 0))
def can_create_from_shape_impl(self):
return False
def produce_impl(self):
# This is the main part of the implementation: create the layout
# fetch the parameters
dbu = self.layout.dbu
ly = self.layout
cell = self.cell
shapes = self.cell.shapes
LayerSi = self.layer
LayerSiN = ly.layer(self.layer)
LayerPinRecN = ly.layer(self.pinrec)
LayerDevRecN = ly.layer(self.devrec)
LayerTextN = ly.layer(self.textl)
wg_r = self.wg_radius
wg_w = self.wg_width
phc_xdis = self.phc_xdis
wg_dis = self.wg_dis
a = self.a
w = self.wg_width
n = self.n
etch_condition = self.etch_condition
param_GC = {"layer": LayerSi,
"pinrec": self.pinrec, "devrec": self.devrec}
pcell_GC = ly.create_cell("SWG Fibre Coupler", "SiQL_PCells", param_GC )
t_GC = Trans(Trans.R0, 0,0)
instance = cell.insert(pya.CellInstArray(pcell_GC.cell_index(), t_GC, Point(0,127/dbu), Point(0,0), 2, 1))
if etch_condition == 1:
param_phc = { "a": self.a, "n":self.n, "r":self.r, "n_bus": 1, "wg_dis":wg_dis, "etch_condition":etch_condition, "layer": LayerSi,
"n_vertices":self.n_vertices, "pinrec": self.pinrec, "devrec": self.devrec, "etch":self.etch}
pcell_phc = ly.create_cell("H0 cavity with waveguide", "SiQL_PCells", param_phc )
t = Trans(Trans.R0,phc_xdis/dbu,0-((wg_dis+1)*math.sqrt(3)/2*a)/dbu)
instance = cell.insert(pya.CellInstArray(pcell_phc.cell_index(), t))
else:
param_phc = { "a": self.a, "n":self.n, "r":self.r, "n_bus": 1, "wg_dis":wg_dis, "layer": LayerSi,
"n_vertices":self.n_vertices, "pinrec": self.pinrec, "devrec": self.devrec, "etch":self.etch}
pcell_phc = ly.create_cell("H0 cavity with waveguide, no etching", "SiQL_PCells", param_phc )
t = Trans(Trans.R0,phc_xdis/dbu,0-((wg_dis+1)*math.sqrt(3)/2*a)/dbu)
instance = cell.insert(pya.CellInstArray(pcell_phc.cell_index(), t))
param_taper = {"tri_base": self.tri_base, "tri_height":self.tri_height,
"taper_wg_length":self.taper_wg_length, "wg_width": w, "silayer":LayerSi,
"pinrec": self.pinrec, "devrec": self.devrec}
pcell_taper = ly.create_cell("Waveguide Triangle Tapers","SiQL_PCells",param_taper)
t_taper1 = Trans(Trans.R0,(phc_xdis-n*a/2)/dbu,0)
instance = cell.insert(pya.CellInstArray(pcell_taper.cell_index(), t_taper1))
pcell_taper2 = ly.create_cell("Waveguide Triangle Tapers","SiQL_PCells",param_taper)
t_taper2 = Trans(Trans.R180, (phc_xdis+n*a/2)/dbu,0)
instance = cell.insert(pya.CellInstArray(pcell_taper2.cell_index(), t_taper2))
points = [ [0, 0], [phc_xdis-n*a/2-self.taper_wg_length ,0]]
layout_waveguide_abs(cell, LayerSi, points, wg_w, wg_r)
points2 = [[phc_xdis+n*a/2+self.taper_wg_length,0],[phc_xdis+n*a/2+self.taper_wg_length+wg_r,0], [phc_xdis+n*a/2+self.taper_wg_length+wg_r,127], [0,127] ]
layout_waveguide_abs(cell, LayerSi, points2, wg_w, wg_r)
class H0c_new(pya.PCellDeclarationHelper):
"""
Input: length, width
"""
import numpy
def __init__(self):
# Important: initialize the super class
super(H0c_new, self).__init__()
self.param("a", self.TypeDouble, "lattice constant (microns)", default = 0.744)
self.param("n", self.TypeInt, "Number of holes in x and y direction", default = 30)
self.param("r", self.TypeDouble, "hole radius (microns)", default = 0.179)
self.param("wg_dis", self.TypeInt, "Waveguide distance (number of holes)", default = 3)
self.param("S1x", self.TypeDouble, "S1x shift", default = 0.28)
self.param("S2x", self.TypeDouble, "S2x shift", default = 0.193)
self.param("S3x", self.TypeDouble, "S3x shift", default = 0.194)
self.param("S4x", self.TypeDouble, "S4x shift", default = 0.162)
self.param("S5x", self.TypeDouble, "S5x shift", default = 0.113)
self.param("S1y", self.TypeDouble, "S1y shift", default = -0.016)
self.param("S2y", self.TypeDouble, "S2y shift", default = 0.134)
self.param("bus_number", self.TypeInt, "2 for double, 1 for single, max 2", default = 2)
TECHNOLOGY = get_technology_by_name('EBeam')
self.param("layer", self.TypeLayer, "Layer", default = TECHNOLOGY['Waveguide'])
self.param("pinrec", self.TypeLayer, "PinRec Layer", default = TECHNOLOGY['PinRec'])
self.param("devrec", self.TypeLayer, "DevRec Layer", default = TECHNOLOGY['DevRec'])
self.param("textl", self.TypeLayer, "Text Layer", default = TECHNOLOGY['Text'])
def display_text_impl(self):
# Provide a descriptive text for the cell
return "H0 Cavity_a%s-r%.3f-wg_dis%.3f-n%.3f" % \
(self.a, self.r, self.wg_dis, self.n)
def coerce_parameters_impl(self):
pass
def can_create_from_shape(self, layout, shape, layer):
return False
def produce_impl(self):
# fetch the parameters
dbu = self.layout.dbu
ly = self.layout
bus_n = self.bus_number
LayerSi = self.layer
LayerSiN = ly.layer(self.layer)
LayerPinRecN = ly.layer(self.pinrec)
LayerDevRecN = ly.layer(self.devrec)
LayerTextN = ly.layer(self.textl)
a = self.a/dbu
r = self.r/dbu
wg_dis = self.wg_dis+1
n = int(math.ceil(self.n/2))
Sx = [self.S1x,self.S2x,self.S3x,self.S4x,self.S5x]
Sy = [self.S1y,0,self.S2y]
if wg_dis%2 == 0:
length_slab_x = 2*n*a
else:
length_slab_x = (2*n+1)*a
length_slab_y = 2*(n-2)*a
if bus_n == 2:
k = -1
else:
k = 1
#function to creat polygon pts for right half of a hole in a hexagon unit cell
def hexagon_hole_half(a,r):
npts = 10
theta_div = math.pi/3
theta_div_hole = math.pi/npts
triangle_length = a/math.sqrt(3)
pts = []
for i in range(0,4):
pts.append(Point.from_dpoint(pya.DPoint(triangle_length*math.cos(i*theta_div-math.pi/2), triangle_length*math.sin(i*theta_div-math.pi/2))))
for i in range(0, npts+1):
pts.append(Point.from_dpoint(pya.DPoint(r*math.cos(math.pi/2-i*theta_div_hole), r*math.sin(math.pi/2-i*theta_div_hole))))
return pts
def hexagon_shifthole_half(a,r):
npts = 10
theta_div = math.pi/3
theta_div_hole = math.pi/npts
triangle_length = a*1.235/math.sqrt(3)
pts = []
for i in range(0,4):
pts.append(Point.from_dpoint(pya.DPoint(triangle_length*math.cos(i*theta_div-math.pi/2), triangle_length*math.sin(i*theta_div-math.pi/2))))
for i in range(0, npts+1):
pts.append(Point.from_dpoint(pya.DPoint(r*math.cos(math.pi/2-i*theta_div_hole), r*math.sin(math.pi/2-i*theta_div_hole))))
return pts
#function to creat polygon pts for right half of a hexagon unit cell
def hexagon_half(a):
theta_div = math.pi/3
triangle_length = a/math.sqrt(3)
pts = []
for i in range(0,4):
pts.append(Point.from_dpoint(pya.DPoint(triangle_length*math.cos(i*theta_div-math.pi/2), triangle_length*math.sin(i*theta_div-math.pi/2))))
return pts
#create the right and left half of the hole and hexagon cells
#hole_cell = pya.Region()
#hexagon_cell = pya.Region()
hole = pya.Region()
hole_cell_pts = hexagon_hole_half(a,r)
hexagon_pts = hexagon_half(a)
hole_shiftcell_pts = hexagon_shifthole_half(a,r)
hole_cell_poly_0 = pya.Polygon(hole_cell_pts)
hexagon_cell_poly_0 = pya.Polygon(hexagon_pts)
hole_shiftcell_poly_0 = pya.Polygon(hole_shiftcell_pts)
hole_trans = pya.Trans(pya.Trans.R180)
hole_cell_poly_1 = hole_cell_poly_0.transformed(hole_trans)
hexagon_cell_poly_1 = hexagon_cell_poly_0.transformed(hole_trans)
hole_shiftcell_poly_1 = hole_shiftcell_poly_0.transformed(hole_trans)
#create the photonic crystal with shifts and waveguides
for j in range(-n+1,n):
if j%2 == 0:
for i in range(-n,n+1):
#waveguide
if (j == k*wg_dis and i > 3) or (j == wg_dis and i != 0):
hole_x = abs(i)/i*(abs(i)-0.5)*a
hole_y = j*a*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t_0 = hexagon_cell_poly_0.transformed(hole_trans)
hole_t_1 = hexagon_cell_poly_1.transformed(hole_trans)
hole.insert(hole_t_0)
hole.insert(hole_t_1)
#filling the edges with half cell
elif i in (-n,n) and wg_dis%2 == 1:
hole_x = abs(i)/i*(abs(i)+0.5)*a
hole_y = j*a*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
if i == -n:
hole_t = hole_cell_poly_0.transformed(hole_trans)
else:
hole_t = hole_cell_poly_1.transformed(hole_trans)
hole.insert(hole_t)
hole_x = abs(i)/i*(abs(i)-0.5)*a
hole_y = j*a*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t_0 = hole_cell_poly_0.transformed(hole_trans)
hole_t_1 = hole_cell_poly_1.transformed(hole_trans)
hole.insert(hole_t_0)
hole.insert(hole_t_1)
#x shifts
elif j == 0 and i in (1,-1,2,-2,3,-3,4,-4,5,-5):
hole_x = abs(i)/i*(abs(i)-0.5+Sx[abs(i)-1])*a
hole_y = 0
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t_0 = hole_shiftcell_poly_0.transformed(hole_trans)
hole_t_1 = hole_shiftcell_poly_1.transformed(hole_trans)
hole.insert(hole_t_0)
hole.insert(hole_t_1)
elif i!=0:
hole_x = abs(i)/i*(abs(i)-0.5)*a
hole_y = j*a*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t_0 = hole_cell_poly_0.transformed(hole_trans)
hole_t_1 = hole_cell_poly_1.transformed(hole_trans)
hole.insert(hole_t_0)
hole.insert(hole_t_1)
elif j%2 == 1:
for i in range(-n,n+1):
#waveguide
if (j == k*wg_dis and i > 3) or j == wg_dis:
hole_x = i*a
hole_y = j*a*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t_0 = hexagon_cell_poly_0.transformed(hole_trans)
hole_t_1 = hexagon_cell_poly_1.transformed(hole_trans)
hole.insert(hole_t_0)
hole.insert(hole_t_1)
#filling the edges with half cell
elif wg_dis%2 == 0 and i in (-n,n):
hole_x = i*a
hole_y = j*a*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
if i == -n:
hole_t = hole_cell_poly_0.transformed(hole_trans)
else:
hole_t = hole_cell_poly_1.transformed(hole_trans)
hole.insert(hole_t)
#y shifts
elif i == 0 and j in (1,-1,3,-3):
hole_x = 0
hole_y = j*a*(math.sqrt(3)/2)+abs(j)/j*a*Sy[abs(j)-1]
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t_0 = hole_shiftcell_poly_0.transformed(hole_trans)
hole_t_1 = hole_shiftcell_poly_1.transformed(hole_trans)
hole.insert(hole_t_0)
hole.insert(hole_t_1)
else:
hole_x = i*a
hole_y = j*a*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t_0 = hole_cell_poly_0.transformed(hole_trans)
hole_t_1 = hole_cell_poly_1.transformed(hole_trans)
hole.insert(hole_t_0)
hole.insert(hole_t_1)
#print(hole_t_0)
box_l = a/2
hole.insert(pya.Box(-box_l,-box_l,box_l,box_l))
cover_box = pya.Box(-length_slab_x/2, -a/2, length_slab_x/2, a/2)
box_y = n*a*math.sqrt(3)/2
cover_box_trans_0 = pya.Trans(Trans.R0, 0,box_y)
cover_box_trans_1 = pya.Trans(Trans.R0, 0,-box_y)
cover_box_t_0 = cover_box.transformed(cover_box_trans_0)
cover_box_t_1 = cover_box.transformed(cover_box_trans_1)
#hole.insert(pya.Box())
self.cell.shapes(LayerSiN).insert(hole)
self.cell.shapes(LayerSiN).insert(cover_box_t_0)
self.cell.shapes(LayerSiN).insert(cover_box_t_1)
# Pins on the waveguide:
pin_length = 200
pin_w = a
wg_pos = a*math.sqrt(3)/2*wg_dis
t = pya.Trans(Trans.R0, -length_slab_x/2,wg_pos)
pin = pya.Path([pya.Point(-pin_length/2, 0), pya.Point(pin_length/2, 0)], pin_w)
pin_t = pin.transformed(t)
self.cell.shapes(LayerPinRecN).insert(pin_t)
text = pya.Text ("pin1", t)
shape = self.cell.shapes(LayerPinRecN).insert(text)
shape.text_size = 0.4/dbu
t = pya.Trans(Trans.R0, length_slab_x/2,wg_pos)
pin_t = pin.transformed(t)
self.cell.shapes(LayerPinRecN).insert(pin_t)
text = pya.Text ("pin2", t)
shape = self.cell.shapes(LayerPinRecN).insert(text)
shape.text_size = 0.4/dbu
#pin for drop waveguide
t = pya.Trans(Trans.R0, length_slab_x/2,-wg_pos)
pin_t = pin.transformed(t)
self.cell.shapes(LayerPinRecN).insert(pin_t)
text = pya.Text ("pin3", t)
shape = self.cell.shapes(LayerPinRecN).insert(text)
shape.text_size = 0.4/dbu
# Create the device recognition layer -- make it 1 * wg_width away from the waveguides.
points = [[-length_slab_x/2,0], [length_slab_x/2, 0]]
points = [Point(each[0], each[1]) for each in points]
path = Path(points, length_slab_y)
self.cell.shapes(LayerDevRecN).insert(path.simple_polygon())
class pc_gc_hex(pya.PCellDeclarationHelper):
"""
Input: length, width
"""
import numpy
def __init__(self):
# Important: initialize the super class
super(pc_gc_hex, self).__init__()
self.param("a", self.TypeDouble, "lattice constant (microns)", default = 0.243)
self.param("x", self.TypeInt, "Number of holes in x direction", default = 78)
self.param("y", self.TypeInt, "Number of holes in y direction", default = 50)
self.param("r", self.TypeDouble, "hole radius (microns)", default = 0.0735)
self.param("vertices", self.TypeInt, "Number of vertices in circle", default = 32)
TECHNOLOGY = get_technology_by_name('EBeam')
self.param("positive", self.TypeInt, "Positive", default = False)
self.param("apodized", self.TypeInt, "apodized", default = False)
self.param("feature_size", self.TypeDouble, "minimum feature size (microns)", default = 0.06)
self.param("layer", self.TypeLayer, "Layer", default = TECHNOLOGY['31_Si_p6nm'])
self.param("pinrec", self.TypeLayer, "PinRec Layer", default = TECHNOLOGY['PinRec'])
self.param("devrec", self.TypeLayer, "DevRec Layer", default = TECHNOLOGY['DevRec'])
self.param("textl", self.TypeLayer, "Text Layer", default = TECHNOLOGY['Text'])
self.param("invert", self.TypeLayer, "Layer to invert", default =TECHNOLOGY['Waveguide'])
def display_text_impl(self):
# Provide a descriptive text for the cell
return "pc_gc_hex_a%s-r%.3f" % \
(self.a, self.r)
def coerce_parameters_impl(self):
pass
def can_create_from_shape(self, layout, shape, layer):
return False
def produce_impl(self):
# fetch the parameters
dbu = self.layout.dbu
ly = self.layout
LayerSi = self.layer
LayerSiN = ly.layer(self.layer)
LayerPinRecN = ly.layer(self.pinrec)
LayerDevRecN = ly.layer(self.devrec)
LayerTextN = ly.layer(self.textl)
LayerInvert=ly.layer(self.invert)
n_vertices = int(self.vertices)
a = self.a/dbu
r = self.r/dbu
n_x = int(math.ceil(self.x/2))
n_y = int(math.ceil(self.y/2))
positive=bool(self.positive)
minimum_feature=self.feature_size
apodized=bool(self.apodized)
length_slab_x = 2*n_x*a
length_slab_y = 2*(n_y-2)*a
k = 2
#function to creat polygon pts for right half of a hole in a hexagon unit cell
def circle(x,y,r):
npts = n_vertices
theta = 2 * math.pi / npts # increment, in radians
pts = []
for i in range(0, npts):
pts.append(Point.from_dpoint(pya.DPoint((x+r*math.cos(i*theta))/1, (y+r*math.sin(i*theta))/1)))
return pts
def hexagon_hole_half(a,r):
npts = 10
theta_div = math.pi/3
theta_div_hole = math.pi/npts
triangle_length = a/math.sqrt(3)
pts = []
for i in range(0,4):
pts.append(Point.from_dpoint(pya.DPoint(triangle_length*math.cos(i*theta_div-math.pi/2), triangle_length*math.sin(i*theta_div-math.pi/2))))
for i in range(0, npts+1):
pts.append(Point.from_dpoint(pya.DPoint(r*math.cos(math.pi/2-i*theta_div_hole), r*math.sin(math.pi/2-i*theta_div_hole))))
return pts
def hexagon_shifthole_half(a,r):
npts = 10
theta_div = math.pi/3
theta_div_hole = math.pi/npts
triangle_length = a*1.235/math.sqrt(3)
pts = []
for i in range(0,4):
pts.append(Point.from_dpoint(pya.DPoint(triangle_length*math.cos(i*theta_div-math.pi/2), triangle_length*math.sin(i*theta_div-math.pi/2))))
for i in range(0, npts+1):
pts.append(Point.from_dpoint(pya.DPoint(r*math.cos(math.pi/2-i*theta_div_hole), r*math.sin(math.pi/2-i*theta_div_hole))))
return pts
#function to creat polygon pts for right half of a hexagon unit cell
def hexagon_half(a):
theta_div = math.pi/3
triangle_length = a/math.sqrt(3)
pts = []
for i in range(0,4):
pts.append(Point.from_dpoint(pya.DPoint(triangle_length*math.cos(i*theta_div-math.pi/2), triangle_length*math.sin(i*theta_div-math.pi/2))))
return pts
#create the right and left half of the hole and hexagon cells
#hole_cell = pya.Region()
#hexagon_cell = pya.Region()
# Define Si slab and hole region for future subtraction
Si_slab = pya.Region()
Si_slab.insert(pya.Box(-length_slab_x/2+a*2, -length_slab_y/2, length_slab_x/2-a, length_slab_y/2))
hole = pya.Region()
hole_r = r
# function to generate points to create a circle
def circle(x,y,r):
npts = n_vertices
theta = 2 * math.pi / npts # increment, in radians
pts = []
for i in range(0, npts):
pts.append(Point.from_dpoint(pya.DPoint((x+r*math.cos(i*theta))/1, (y+r*math.sin(i*theta))/1)))
return pts
import numpy as np
def gaussian(x, mu, sig):
return 1./(np.sqrt(2.*np.pi)*sig)*np.exp(-np.power((x - mu)/sig, 2.)/2)
# raster through all holes with shifts and waveguide
for j in range(-n_y,n_y+1):
if j%2 == 0:
skip=0
apodization=0
for i in range(-n_x,n_x+1):
if i==0:
continue
if skip==0:
skip=1
continue
elif skip==1:
skip=2
continue
elif skip==2:
skip=3
apodization=apodization+1
continue
elif skip==3:
skip=1
#radius=r/gaussian(float(i)/(2*(n_x+1)),2*(n_x+1),10)
location=float(i)
location=abs(location)
#radius=r/gaussian(location/(2*(n_x+1)),2*(n_x+1),0.02)
#radius=(((2*n_x)-abs(i+n_x+3)*r)/(2*n_x))
radius=(float(apodization)/((n_x*2/3)-1))*r
if radius<minimum_feature*500:
radius=minimum_feature*500
if apodized==False:
radius=r
#radius=minimum_feature
hole_cell = circle(0,0,radius)
hole_poly = pya.Polygon(hole_cell)
print("x1 "+str(apodization))
hole_x = abs(i)/i*(abs(i)-0.5)*a
hole_y = j*a*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t = hole_poly.transformed(hole_trans)
hole.insert(hole_t)
elif j%2 == 1:
skipodd=0
apodization=0
for i in range(-n_x,n_x+1):
if i==-n_x:
continue
if i==n_x:
continue
if skipodd==0:
skipodd=1
continue
elif skipodd==1:
skipodd=2
apodization=apodization+1
continue
elif skipodd==2:
skipodd=3
elif skipodd==3:
skipodd=1
#radius=(((2*n_x)-abs(i+n_x+3)*r)/(2*n_x))
radius=(float(apodization)/((n_x*2/3)-1))*r
if radius<minimum_feature*500:
radius=minimum_feature*500
if apodized==False:
radius=r
#radius=minimum_feature
#radius=r*(float(abs(i))/(2*(n_x+1)))
hole_cell = circle(0,0,radius)
hole_poly = pya.Polygon(hole_cell)
print("x2 "+str(apodization))
print("p "+str(n_x))
hole_x = i*a
hole_y = j*a*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t = hole_poly.transformed(hole_trans)
hole.insert(hole_t)
if positive==True:
phc = hole
else:
phc = Si_slab - hole
self.cell.shapes(LayerSiN).insert(phc)
#print(hole_t_0)
box_l = a/2
# Pins on the waveguide:
pin_length = 200
pin_w = a
t = pya.Trans(Trans.R0, -length_slab_x/2+a*2,0)
pin = pya.Path([pya.Point(-pin_length/2, 0), pya.Point(pin_length/2, 0)], pin_w)
pin_t = pin.transformed(t)
self.cell.shapes(LayerPinRecN).insert(pin_t)
text = pya.Text ("pin1", t)
shape = self.cell.shapes(LayerPinRecN).insert(text)
shape.text_size = 0.4/dbu
# Create the device recognition layer -- make it 1 * wg_width away from the waveguides.
points = [[-length_slab_x/2+a*2,0], [length_slab_x/2-a, 0]]
points = [Point(each[0], each[1]) for each in points]
path = Path(points, length_slab_y)
self.cell.shapes(LayerDevRecN).insert(path.simple_polygon())
if positive==True:
self.cell.shapes(LayerInvert).insert(path.simple_polygon())
return
hole = pya.Region()
hole_cell_pts = hexagon_hole_half(a,r)
hexagon_pts = hexagon_half(a)
hole_shiftcell_pts = hexagon_shifthole_half(a,r)
hole_cell_poly_0 = pya.Polygon(hole_cell_pts)
hexagon_cell_poly_0 = pya.Polygon(hexagon_pts)
hole_shiftcell_poly_0 = pya.Polygon(hole_shiftcell_pts)
hole_trans = pya.Trans(pya.Trans.R180)
hole_cell_poly_1 = hole_cell_poly_0.transformed(hole_trans)
hexagon_cell_poly_1 = hexagon_cell_poly_0.transformed(hole_trans)
hole_shiftcell_poly_1 = hole_shiftcell_poly_0.transformed(hole_trans)
skip=1
skip2=0
#create the photonic crystal with shifts and waveguides
for j in range(-y+1,y):
if j%2 == 0:
for i in range(-x,x+1):
#waveguide
if (j == k and i > 3):
hole_x = abs(i)/i*(abs(i)-0.5)*a
hole_y = j*a*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t_0 = hole_cell_poly_0.transformed(hole_trans)
hole_t_1 = hole_cell_poly_1.transformed(hole_trans)
hole.insert(hole_t_0)
hole.insert(hole_t_1)
#filling the edges with half cell
elif i!=0:
hole_x = abs(i)/i*(abs(i)-0.5)*a
hole_y = j*a*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t_0 = hole_cell_poly_0.transformed(hole_trans)
hole_t_1 = hole_cell_poly_1.transformed(hole_trans)
hole.insert(hole_t_0)
hole.insert(hole_t_1)
elif j%2 == 1:
for i in range(-x,x+1):
if skip==0 and i%3==0:
skip=1
continue
if skip==1:
skip=2
continue
if skip==2:
skip=0
if i== -x:
hole_x = abs(i)/i*(abs(i)-0.5)*a
hole_y = j*a*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t_1 = hexagon_cell_poly_1.transformed(hole_trans)
hole.insert(hole_t_1)
if i== x:
hole_x = abs(i)/i*(abs(i)-0.5)*a
hole_y = j*a*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t_0 = hexagon_cell_poly_0.transformed(hole_trans)
hole.insert(hole_t_0)
#waveguide
if (j == k and i > 3):
hole_x =i*a
hole_y =j*a*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t_0 = hexagon_cell_poly_0.transformed(hole_trans)
hole_t_1 = hexagon_cell_poly_1.transformed(hole_trans)
hole.insert(hole_t_0)
hole.insert(hole_t_1)
#filling the edges with half cell
elif i in (-x,x):
hole_x =i*a
hole_y =j*a*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
if i == -x:
hole_t = hole_cell_poly_0.transformed(hole_trans)
else:
hole_t = hole_cell_poly_1.transformed(hole_trans)
hole.insert(hole_t)
else:
hole_x = i*a
hole_y = j*a*math.sqrt(3)/2
hole_trans = pya.Trans(Trans.R0, hole_x,hole_y)
hole_t_0 = hole_cell_poly_0.transformed(hole_trans)
hole_t_1 = hole_cell_poly_1.transformed(hole_trans)
hole.insert(hole_t_0)
hole.insert(hole_t_1)
#print(hole_t_0)
box_l = a/2
cover_box = pya.Box(-length_slab_x/2, -a/2, length_slab_x/2, a/2)
box_y = y*a*math.sqrt(3)/2
cover_box_trans_0 = pya.Trans(Trans.R0, 0,box_y)
cover_box_trans_1 = pya.Trans(Trans.R0, 0,-box_y)
cover_box_t_0 = cover_box.transformed(cover_box_trans_0)
cover_box_t_1 = cover_box.transformed(cover_box_trans_1)
#hole.insert(pya.Box())
self.cell.shapes(LayerSiN).insert(hole)
self.cell.shapes(LayerSiN).insert(cover_box_t_0)
self.cell.shapes(LayerSiN).insert(cover_box_t_1)
# Pins on the waveguide:
pin_length = 200
pin_w = a
t = pya.Trans(Trans.R0, -length_slab_x/2,0)
pin = pya.Path([pya.Point(-pin_length/2, 0), pya.Point(pin_length/2, 0)], pin_w)
pin_t = pin.transformed(t)
self.cell.shapes(LayerPinRecN).insert(pin_t)
text = pya.Text ("pin1", t)
shape = self.cell.shapes(LayerPinRecN).insert(text)
shape.text_size = 0.4/dbu
# Create the device recognition layer -- make it 1 * wg_width away from the waveguides.
points = [[-length_slab_x/2,0], [length_slab_x/2, 0]]
points = [Point(each[0], each[1]) for each in points]
path = Path(points, length_slab_y)
self.cell.shapes(LayerDevRecN).insert(path.simple_polygon())
| 39.919315
| 276
| 0.625458
| 16,924
| 111,814
| 3.962716
| 0.038052
| 0.03677
| 0.013286
| 0.013062
| 0.903139
| 0.890658
| 0.885395
| 0.879848
| 0.872825
| 0.868307
| 0
| 0.032852
| 0.230928
| 111,814
| 2,801
| 277
| 39.919315
| 0.747038
| 0.129268
| 0
| 0.854555
| 0
| 0
| 0.092041
| 0.002793
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047416
| false
| 0.00586
| 0.00959
| 0.014917
| 0.090037
| 0.002131
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2794a3f9f7b3f0033a4dfaf570abe7fe54600096
| 123
|
py
|
Python
|
inac8hr/levels/__init__.py
|
th-bunratta/8hr.insomniac
|
5173500a1ad7197096d513b38258aa65b035fcf3
|
[
"BSD-3-Clause"
] | null | null | null |
inac8hr/levels/__init__.py
|
th-bunratta/8hr.insomniac
|
5173500a1ad7197096d513b38258aa65b035fcf3
|
[
"BSD-3-Clause"
] | null | null | null |
inac8hr/levels/__init__.py
|
th-bunratta/8hr.insomniac
|
5173500a1ad7197096d513b38258aa65b035fcf3
|
[
"BSD-3-Clause"
] | null | null | null |
from inac8hr.levels.base import Level
from inac8hr.levels.lv1_ballot import LV1Level
from inac8hr.levels.tilemaps import *
| 30.75
| 46
| 0.845528
| 18
| 123
| 5.722222
| 0.555556
| 0.320388
| 0.495146
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045045
| 0.097561
| 123
| 3
| 47
| 41
| 0.882883
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
279ada69975a3bfae2b2193fbbc223fd014ff481
| 76,803
|
py
|
Python
|
RDF/StitchingFilterCalculator.py
|
NJManganelli/FourTopNAOD
|
9743d5b49bdbad27a74abb7b2d5b7295f678a0e3
|
[
"Apache-2.0"
] | 1
|
2022-01-17T17:29:38.000Z
|
2022-01-17T17:29:38.000Z
|
RDF/StitchingFilterCalculator.py
|
NJManganelli/FourTopNAOD
|
9743d5b49bdbad27a74abb7b2d5b7295f678a0e3
|
[
"Apache-2.0"
] | null | null | null |
RDF/StitchingFilterCalculator.py
|
NJManganelli/FourTopNAOD
|
9743d5b49bdbad27a74abb7b2d5b7295f678a0e3
|
[
"Apache-2.0"
] | 1
|
2021-12-15T10:56:50.000Z
|
2021-12-15T10:56:50.000Z
|
from __future__ import print_function
import os, time
import ROOT
import collections
from IPython.display import Image, display, SVG
#import graphviz
useSpark = True
if useSpark:
import PyRDF
#PyRDF.use("spark", {'npartitions': '64'}) #was 32 in example
PyRDF.use("local")
RDF = PyRDF.RDataFrame
else:
ROOT.ROOT.EnableImplicitMT()
RS = ROOT.ROOT
RDF = RS.RDataFrame
#FIXME: Need filter efficiency calculated for single lepton generator filtered sample. First approximation will be from MCCM (0.15) but as seen before, it's not ideal.
#May need to recalculate using genWeight/sumWeights instead of sign(genWeight)/(nPositiveEvents - nNegativeEvents), confirm if there's any difference.
lumi = {"2017": 41.53,
"2018": 1}
era = "2017"
leg_dict = {"ttbar_DL-GF": ROOT.kAzure-2,
"ttbar_DL": ROOT.kRed,
"ttbar_SL-GF": ROOT.kYellow,
"ttbar_SL": ROOT.kCyan,
}
source_DL_V2 = {
"tt_DL":{
"era": "2017",
"isData": False,
"nEvents": 69098644,
"nEventsPositive": 68818780,
"nEventsNegative": 279864,
"sumWeights": 4980769113.241218,
"sumWeights2": 364913493679.955078,
"isSignal": False,
"crossSection": 89.0482,
"color": leg_dict["ttbar_DL"],
"source": "/eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_SL-NOM_2017_v2.root",
"sourceSPARK": ["root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/2A0100C0-5A95-0145-B62F-0CA9D9639F68.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/4ADB829B-0293-0D48-8AEA-31AAFD1936B8.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/FF239AE9-D713-5147-BB2C-FAFF45770541.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/FEFFCB04-A0CD-2945-BB46-D0D9013CD4F4.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/FB37F4B8-4878-AC41-80AD-1AC7BCC96FBF.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/F86F8AAA-A400-7340-A1B2-1BEDDD5C634C.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/F61A315A-0C50-F545-9D27-5821F2A16665.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/E8AEC963-FB46-604E-BFCA-4BAD27E9C457.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/E5D51928-D702-3B4E-93FF-10B011657478.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/E08462B0-0C0E-E54F-BFC6-8B09D73ABD59.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/DE610AC6-52C8-F243-B726-266E986C67C7.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/DC5D2C4D-0FA1-9448-BDAD-8B3212A417AC.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/DB9DB17D-00F1-C540-BF6A-0A3314CD31F7.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/DB2F7B58-0EFA-B241-B52A-8A14E3DC5356.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/DB097816-5864-3640-A472-37E4518131AD.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/D8B47A61-B47A-494C-B6B7-E2BE3F250C9E.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/D3582719-8222-9A48-8FD6-FE7CA90C10F2.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/CD496386-C278-0C4C-8F7E-BE62903ADD57.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/CAACE0A1-EA68-154E-8F4E-2D2298D087ED.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/C805AA80-6F12-E84C-B5F6-6AC7CDBDD568.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/C329E2BB-0C74-A640-9F7B-DFC5505DA4A9.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/C2DFBA51-FCE3-954D-B2FD-050DCF3BA2AE.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/C0CFCDB9-4C19-9243-B1F5-4CC8B34A5F53.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/B8C82709-DAFE-DE46-8207-ECB035DBE32C.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/B5A0D925-E6D2-964F-8FBF-B6DCF8311983.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/B40D0E48-C30F-144C-954D-C79F2E74BAC0.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/B1C73F9C-E932-E148-8178-AC2E912F77C7.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/B0CDDA70-01A8-DA47-A6DA-7E518609D349.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/A65E69A8-4F9A-6B4B-889E-787546455F50.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/A1ACAB6F-3CE4-8E4B-A148-5CFB78AAB153.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/9D587515-51E3-FB41-856E-41406CF1AA94.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/9C4ECF47-F241-E841-9017-524C3FE3F782.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/99E411C5-8086-3C41-B5E0-8356B93A62AE.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/9826D66E-2230-9A4E-AF59-99404C9CA0F8.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/96EA0AD0-7850-9742-800B-8732972FE897.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/96787088-4414-194C-9045-CB7B81923664.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/93648D0C-3759-5A4F-890B-5275C66BC423.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/8A267C63-CC35-3D49-9AA9-1D5E89C3FA8A.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/854ACC40-D83A-CB4D-8096-A3D5AB0CCEB7.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/8482B4BA-D619-CA42-92B8-D8AC7EE3E14A.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/824DD978-02EE-8540-84F4-45C81D901868.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/73C3619B-A47F-1D49-B4EC-E347B144C067.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/7027E474-2CF4-354C-928D-26A03AC64602.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/6974B155-E6FB-6046-8CF0-861DD75C65E9.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/67897CF3-3F11-AC48-9CB2-926BF1CF2088.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/66B78AE0-D4B0-A04D-B103-DB78ACC047E7.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/6530E34B-0886-6C41-A78A-74B945B9E23E.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/63BA2496-580D-CE4A-A9D5-FC81E299FEAC.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/5BB4B096-AC3F-BD49-B599-D43E0176890F.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/5AEEEC98-7170-114E-B4FF-FA3F9BDC3217.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/5A8E9758-665D-264D-9ACF-A7C69D56523B.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/55B03D69-AE5B-6142-80A6-1517F2B9F6CC.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/54EC1465-9EDA-7B40-8042-1FD34081497A.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/51640F96-C070-694F-A3EA-59507F27FA3B.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/33B12362-B2B9-2D46-ADBF-8BB30E9949B0.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/2C64BBC9-4082-424C-81F2-D0ED3406CBC9.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/52B5612E-888D-FA4C-8C83-C60104F70DD3.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/544DB558-0C6F-EC41-BFDC-B6EE46EC986F.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/46239DBE-8E5E-9744-B7F4-B72B44803619.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/460F269F-337C-7F4F-92FD-5A18525B33F8.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/42D378EC-A09E-C945-99C7-BC1F00E41D88.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/4179956C-6AC8-7041-AC46-DC2DE881F788.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/3992DEA8-A6A0-F946-9563-1FA0AACC9A0C.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/38E81C59-132D-6847-B0D8-77A1D0D5ED56.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/1663D9BD-F7FA-784B-A313-D540068B4BAB.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/14F0276E-7349-2741-9186-B5713E7EBEA8.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/0AA9D783-4D4C-924A-9B92-709702ED7915.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/03263E02-B201-3540-BE86-03A3DFF898F2.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/0010502D-08FD-9A45-9B8F-A2FB501C776D.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/1930F644-A4DF-9441-BCDE-48B2D6045607.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/1BE985E2-0F6A-7B4D-98D8-A5CC8BDF64C0.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/1C71F860-D8D2-E342-9FA3-A4815ABD60EA.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_new_pmx_102X_mc2017_realistic_v7-v1/110000/27A7A157-10B1-024C-AD51-53D05797FB47.root",
],
"stitch": {"mode": 'Flag', "condition": 'Fail', "channel": 'DL'}
},
"tt_DL-GF":{
"era": "2017",
"isData": False,
"nEvents": 8510388,
"nEventsPositive": 8467543,
"nEventsNegative": 42845,
"sumWeights": 612101836.284397,
"sumWeights2": 44925503249.097206,
"isSignal": False,
"crossSection": 89.0482, #1.4705, #After applying filter efficiency...
"color": leg_dict["ttbar_DL-GF"],
"source": "/eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_SL-NOM_2017_v2.root",
"sourceSPARK": ["root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_HT500Njet7_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/250000/F600BA86-6C79-F14E-843F-A18E5A82DD01.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_HT500Njet7_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/250000/536EBC00-F7F9-B140-AEDC-ADE2B39AC3FA.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_HT500Njet7_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/250000/464A3326-28B7-EC4D-834A-F9B8B826CA0A.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_HT500Njet7_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/250000/3E4CF0E5-D1E3-BD48-AA6B-5E4549BE0B1B.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_HT500Njet7_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/250000/3DFC9865-DFDA-6040-B0F8-009206A5D631.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_HT500Njet7_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/B28BA460-6A5A-8542-A237-BDA8F4B4AA51.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_HT500Njet7_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/7CE6ACA8-A74E-2342-8B4D-6CC1B26F4209.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_HT500Njet7_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/7AC648FD-8C1D-6647-BA0F-BAA05E97DA00.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_HT500Njet7_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/681BF9B5-1040-8547-A7E0-7CC780A404C5.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_HT500Njet7_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/4F512FE3-7DD1-9C46-A949-0FEFB217B957.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_HT500Njet7_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/2F732BBD-2C2C-5E4E-A5DC-48ECF57636EB.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_HT500Njet7_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/100000/CED06574-B2B8-F74A-9214-8FC0861D12DC.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_HT500Njet7_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/100000/C86A7E85-E6A1-F847-BE52-99E73DC04808.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_HT500Njet7_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/100000/C309E308-2D85-3341-969B-DD8154E1E5C3.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_HT500Njet7_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/100000/8FAA6F0A-6A3C-1D4E-A554-B4151598282C.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTTo2L2Nu_HT500Njet7_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/100000/2201EDF5-C5FE-C148-AE83-27F9240FBF4A.root",
],
"stitch": {"mode": 'Flag', "condition": 'Pass', "channel": 'DL'}
},
}
source_SL_V2 = {
"tt_SL":{
"era": "2017",
"isData": False,
"nEvents": 20122010,
"nEventsPositive": 20040607,
"nEventsNegative": 81403,
"sumWeights": 6052480345.748356,
"sumWeights2": 1850350248120.376221,
"isSignal": False,
"crossSection": 366.2073,
"color": leg_dict["ttbar_SL"],
"source": "/eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_SL-NOM_2017_v2.root",
"sourceSPARK": ["root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/70000/D686FCCA-429E-C044-8B98-B99D55C65859.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/70000/D4EF446F-CF32-A445-B8D9-03FC868BCFBA.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/70000/CC62B699-A9FF-394B-8C8C-4E3856FD98D2.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/70000/C421BAF3-F52B-E74D-98B9-49B24650B835.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/70000/BA76C3A0-5953-9D4B-8033-EECFFDA51A6A.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/70000/A2A53855-CB90-AD46-B70C-604615947D35.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/70000/94F03AF3-B18C-9A45-BD61-2DD6A82BCF4A.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/70000/3DB84A5A-5479-FD46-A6F1-2F50490F7189.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/70000/33C03093-04D1-1D49-97F4-BFFD2365B994.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/70000/32A69C33-0262-4449-BF5D-AD1BE1A47C85.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/70000/23636497-18C3-3842-90E7-9FB8C1402680.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/EF0B6094-19C2-9745-BFB5-234D8AD41332.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/E502CBAE-CFE6-9C46-8F24-724A0075DA19.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/D7909568-5273-3148-ABEA-F7CB0D1866CE.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/D394B8AA-7BB2-DC44-94CD-EE41978E6ACC.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/C11F6A73-627C-F341-9AD1-E91B8A4A92EF.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/A6746E57-C754-D149-8AC8-6FE6DD73E0E5.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/A2D14FA4-A93C-4D43-AB87-13BDA4D1C7D6.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/A213145C-B8BB-3A4F-A308-83D4C63D1E00.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/A1DC2776-3A7D-4047-A0E4-1094D2D8FA40.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/9CD2FFBB-AA38-3C46-99E3-7641F7F78013.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/93AF8675-716E-4F46-8179-775D4492D567.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/92963635-0E8A-2D47-A164-F46B6C7F5C0E.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/8A67C2BD-4E12-7643-93E8-A82A90E0F96C.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/68BAB3CE-588B-CB4F-B83E-E0A97F41ABDF.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/67674532-76D0-3940-B196-F3135299B87C.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/672FCC65-B125-0D45-B10E-128DBF16B460.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/5C722357-ADDC-9B44-B160-6EC6F283C4D3.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/57C0579F-1A15-B74F-A7F6-81706D7CA364.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/5202C039-E420-DC4B-8D33-264889424EFD.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/41B39F60-7580-A24F-BFD1-73F2FABC8451.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/3C940579-D915-D44D-A9B9-1155EAC0CB4A.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/3845EC7C-6772-D442-A88D-7C183BD4BDEC.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/37F6FD55-B43F-994E-B278-6E25EE225CFF.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/3368BAF5-1F1C-2E45-BAFF-715C3CFFCC20.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/2D39555F-38D8-854C-A59F-ED2F04833448.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/2CF31824-76F9-FE40-937A-A2ECB38B8AF1.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/1F01EA75-4A6F-2042-8CE2-5CA5823963AC.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/05DF7E5F-93BC-D749-9ED0-D34609A086B3.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/60000/04449851-5D38-D345-91F4-71A53FF5256F.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/30000/F449E769-0706-A34C-AD7C-F369864CA977.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/30000/C64D9C81-8E64-5A47-9928-B56F107FF36B.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/30000/98ED7A34-73CE-3C47-B90E-E9BE11BC05EE.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/30000/7749A2B3-BA70-FF48-8460-7207A7049E13.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/30000/60DB4489-73FC-A04B-8549-5D71E7DA3C95.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/30000/3615F999-6326-D047-A00E-F140CF0EA3D1.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/30000/25AB49E0-1C50-1548-B3AF-D7FEF02EC935.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/30000/0ECACD7C-4805-2F43-86CA-025F41E6D70D.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/30000/044B0BC2-8CA4-2C4B-A766-8519EBE3DE7F.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/F6F9E82C-FD48-C144-8585-4D76DA758BEC.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/EEC376C3-6572-2046-9E46-57FBC413428D.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/ECA31522-5FC7-4549-BAB8-93AB2120C94F.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/EB7998BC-1D8B-4545-BF96-857741C0B086.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/E643F0E5-5DDD-CE46-B5D8-305AC77D3E6B.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/E1F00613-6B22-594A-AF4D-AC739D7408E9.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/DF4B8F12-536D-5741-9F18-2AC5CFB86F7D.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/DB90098A-A3F4-D041-8D43-970E7C120E69.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/D947E9AC-296E-7D42-814F-AEC22C308FF0.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/CD8A7E48-90B2-B14D-A43C-F1344CEE6237.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/C290373E-7127-3744-87BD-CA11CD1FF62C.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/C16B1661-518D-274A-848A-BE04951109D8.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/BBCE8E57-758F-2941-8F8A-4A6F5F35B2B7.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/AE46BBD3-5D20-DC4C-96DB-36FDA4060242.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/AA8DCC03-0EB6-DB4D-8ECA-3D07C23F44E1.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/A194E3B2-7853-C54E-AB5A-5ECCD2F72726.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/9F9008FA-6F7E-4548-A188-79AE98F871FA.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/935AA661-8BD7-0645-B431-5EBB37769B3B.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/92D19833-5F3A-3B43-B402-385111B8666B.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/8C9C4F6F-3E49-0F4C-B805-3B5D5F747E66.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/8437D94E-5F61-9149-8B3A-6B908DBBE95B.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/7CCC70B7-0333-164C-8E4E-8979C4AAC3CD.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/778E67D2-6683-EE45-8FA3-4A852DF0A938.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/76F7EE18-97F1-194B-9346-13C734CD1C1C.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/6EB064B9-4337-414E-83BC-9BB2F82A2D67.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/5F9459CA-8505-C34B-879E-7C87578B7951.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/5D1A2264-B851-6A45-8340-82511047BA48.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/56D3202D-308D-984E-8B34-7F9DA0AB20CF.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/44C5336D-0BF7-5B4D-804F-5B5DE682C8C9.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/3345DAC7-48F8-1C43-8C3B-AA477027C578.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/3304ABCD-DD82-CC48-B1AF-A6DAE1CEC349.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/2D5E8874-07C9-724D-916C-18F8D56A4361.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/2A97FB56-AF3D-604F-B081-6489818B1EDC.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/28A22CFC-E78B-FC4A-ACBA-22FA36DFE3E5.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/1FD04315-053C-8846-9401-AA5210544F88.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/159803B7-8A47-F844-A329-27639CFAC6A9.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/148539FB-8BFA-E343-8E40-EFEC5DBF7067.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/114960A1-1F78-744F-A828-00B3FB77831D.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/05F2E7D5-86BF-344C-A87E-EFA3DD283F9B.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/01150437-2B0C-BF4B-A6EA-6ECD4719D911.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/FAEF2067-51FE-CF4E-AD20-C70DE01C137F.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/F751A8E8-B25E-9D49-AB19-38E9BD2AC04F.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/EEBE73AA-42D3-0A4E-B8C3-01D9F9660619.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/D34F052B-6717-E148-9A2C-3AE4D5C94636.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/D090CFDA-5B5B-A941-BB73-35F17D6EEB89.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/C5BE441F-5595-094B-A4D9-7F9AD6B50B6B.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/C510D032-8263-1B43-9AB1-E9ECF623F676.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/B9F4AAAB-E750-7A47-B202-3DC7D88A21A0.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/B6CA8915-7D4B-8F4F-BE61-7F90C1DEDB49.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/ACFBB48F-D400-4D42-8D2C-7B7314E1DB06.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/A8F58206-1647-0943-85F0-63D6C199424D.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/A8510BE6-4689-FF46-BF79-C33C39817B8B.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/A55142EF-D15D-F74D-90B2-24FC3ECEA87C.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/93242D28-6E2E-A747-AC68-652871EB88C5.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/8F7736DF-8A39-DF4A-9BFA-476967FE21BA.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/8B8867F9-2577-5047-A983-02C244D1A2B9.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/7DD1CB31-0F41-AD42-A508-290B0A423487.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/722319BE-C2FC-7949-82AB-E9B28C302C4D.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/712A4396-C950-4B45-A612-977C3B3D3BD5.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/6F191C45-0C7C-874D-BD72-BEF32AE3F819.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/659F7FAC-15D6-7549-ACE7-AED40975CAE9.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/632BCD48-C17D-3F4F-BF66-10C33F1F5288.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/482573D0-CC42-3F43-BFD6-2E5ECB10D6DA.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/431557AD-7DB2-7E4B-8522-9150B23E8817.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/39635D4F-52C4-EA41-85D4-141BFFE2799A.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/38EAF19E-755F-2A49-A148-9F0738FED364.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/2D5CF857-B3AF-0149-B81A-80E092B846DC.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/177636D2-BAFA-7040-B65B-E013CC6EBB89.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/16C31B91-ADD0-8548-B34E-67366A60BA61.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/0F0E5869-C901-C14D-AEDD-B14D4A947E87.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLeptonic_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/110000/0D2EE97B-AC68-E24A-A451-1D5127322521.root",
],
"stitch": {"mode": 'Flag', "condition": 'Fail', "channel": 'SL'}
},
"tt_SL-GF":{
"era": "2017",
"isData": False,
"nEvents": 8836856,
"nEventsPositive": 8794464,
"nEventsNegative": 42392,
"sumWeights": 2653328498.476976,
"sumWeights2": 812201885978.209229,
"isSignal": False,
"crossSection": 366.2073, #???,
"color": leg_dict["ttbar_SL-GF"],
"source": "/eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_SL-GF_2017_v2.root",
"sourceSPARK": ["root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLepton_HT500Njet9_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/0E27F419-ADDE-1E4C-AC0A-130DA36C1FA6.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLepton_HT500Njet9_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/70000/F5AEB3BB-5D35-5949-A0A3-2664AFFBAA94.root ",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLepton_HT500Njet9_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/70000/4741AE94-855A-8344-A1DA-84AAD948D419.root ",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLepton_HT500Njet9_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/30000/F412C6EF-49E3-F94F-812D-14FCA6B78C51.root ",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLepton_HT500Njet9_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/30000/D2B7659C-9C1E-094A-B0E8-A264BB57EB67.root ",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLepton_HT500Njet9_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/30000/82FBCA1B-F11F-564B-9075-35B4486B45B6.root ",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLepton_HT500Njet9_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/30000/6AF680DD-7ED5-7046-9906-DF0A7174EA61.root ",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLepton_HT500Njet9_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/30000/68B095D2-20EC-1A4D-A93D-F89AF49BE9F6.root ",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLepton_HT500Njet9_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/30000/4E7674E4-536E-B048-B646-DB8012B29D50.root ",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLepton_HT500Njet9_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/30000/3D2CE6BC-4EA6-834A-A036-F4E9D91D97F4.root ",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLepton_HT500Njet9_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/250000/DEB7211A-47D0-474B-A383-770775D86F01.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLepton_HT500Njet9_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/250000/CB0C5BE0-3698-8E4B-B49A-57F0A10F602F.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLepton_HT500Njet9_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/250000/5BA21135-FE65-E449-B954-2640B793FDA0.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLepton_HT500Njet9_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/250000/4A3A4BC6-ACC4-A642-AC3B-3B75E43E3ECE.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLepton_HT500Njet9_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/250000/28D3A741-5A66-AA4D-AB2C-4402F0224331.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLepton_HT500Njet9_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/250000/1E1E3A21-1E31-D843-BCFB-50AE65A615C0.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLepton_HT500Njet9_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/907839C8-A184-484C-9BED-44BEA845FDBB.root",
"root://cms-xrd-global.cern.ch//store/mc/RunIIFall17NanoAODv5/TTToSemiLepton_HT500Njet9_TuneCP5_PSweights_13TeV-powheg-pythia8/NANOAODSIM/PU2017_12Apr2018_Nano1June2019_102X_mc2017_realistic_v7-v1/120000/294AF708-5AAC-2045-A851-6E1676E295E8.root",],
"stitch": {"mode": 'Flag', "condition": 'Pass', "channel": 'SL'}
},
}
stitched_DL_V2 = {
"tt_DL":{
"era": "2017",
"isData": False,
"nEvents": 69098644,
"nEventsPositive": 68818780,
"nEventsNegative": 279864,
"sumWeights": 4980769113.241218,
"sumWeights2": 364913493679.955078,
"isSignal": False,
"crossSection": 89.0482,
"color": leg_dict["ttbar_DL"],
"source": "/eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_DL-NOM-*_2017_v2.root",
"sourceSPARK": ["root://eoshome-n.cern.ch//eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_DL-NOM-1_2017_v2.root",
"root://eoshome-n.cern.ch//eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_DL-NOM-2_2017_v2.root",
"root://eoshome-n.cern.ch//eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_DL-NOM-3_2017_v2.root",
"root://eoshome-n.cern.ch//eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_DL-NOM-4_2017_v2.root",
"root://eoshome-n.cern.ch//eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_DL-NOM-5_2017_v2.root",
"root://eoshome-n.cern.ch//eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_DL-NOM-6_2017_v2.root",
"root://eoshome-n.cern.ch//eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_DL-NOM-7_2017_v2.root",
"root://eoshome-n.cern.ch//eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_DL-NOM-8_2017_v2.root",
"root://eoshome-n.cern.ch//eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_DL-NOM-9_2017_v2.root",
"root://eoshome-n.cern.ch//eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_DL-NOM-10_2017_v2.root",
"root://eoshome-n.cern.ch//eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_DL-NOM-11_2017_v2.root",
"root://eoshome-n.cern.ch//eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_DL-NOM-12_2017_v2.root",
"root://eoshome-n.cern.ch//eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_DL-NOM-13_2017_v2.root",
"root://eoshome-n.cern.ch//eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_DL-NOM-14_2017_v2.root",],
"stitch": {"mode": 'Flag', "condition": 'Fail', "channel": 'DL'}
},
"tt_DL-GF":{
"era": "2017",
"isData": False,
"nEvents": 8510388,
"nEventsPositive": 8467543,
"nEventsNegative": 42845,
"sumWeights": 612101836.284397,
"sumWeights2": 44925503249.097206,
"isSignal": False,
"crossSection": 1.4705,
"color": leg_dict["ttbar_DL-GF"],
"source": "/eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_DL-GF-*_2017_v2.root",
"sourceSPARK": ["root://eoshome-n.cern.ch//eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_DL-GF-1_2017_v2.root",
"root://eoshome-n.cern.ch//eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_DL-GF-2_2017_v2.root",
"root://eoshome-n.cern.ch//eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_DL-GF-3_2017_v2.root",
"root://eoshome-n.cern.ch//eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_DL-GF-4_2017_v2.root",],
"stitch": {"mode": 'Flag', "condition": 'Pass', "channel": 'DL'}
},
}
stitched_SL_V2 = {
"tt_SL":{
"era": "2017",
"isData": False,
"nEvents": 20122010,
"nEventsPositive": 20040607,
"nEventsNegative": 81403,
"sumWeights": 6052480345.748356,
"sumWeights2": 1850350248120.376221,
"isSignal": False,
"crossSection": 366.2073,
"color": leg_dict["ttbar_SL"],
"source": "/eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_SL-NOM_2017_v2.root",
"sourceSPARK": ["root://eoshome-n.cern.ch//eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_SL-NOM_2017_v2.root",],
"stitch": {"mode": 'Flag', "condition": 'Fail', "channel": 'SL'}
},
"tt_SL-GF":{
"era": "2017",
"isData": False,
"nEvents": 8836856,
"nEventsPositive": 8794464,
"nEventsNegative": 42392,
"sumWeights": 2653328498.476976,
"sumWeights2": 812201885978.209229,
"isSignal": False,
"crossSection": 6,
"color": leg_dict["ttbar_SL-GF"],
"source": "/eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_SL-GF_2017_v2.root",
"sourceSPARK": ["root://eoshome-n.cern.ch//eos/user/n/nmangane/SWAN_projects/LogicChainRDF/FilesV2/tt_SL-GF_2017_v2.root"],
"stitch": {"mode": 'Flag', "condition": 'Pass', "channel": 'SL'}
},
}
def defineStitchVars(input_df, crossSection=0, sumWeights=-1, lumi=0,
nEvents=-1, nEventsPositive=2, nEventsNegative=1,
era="2017", verbose=False):
stitchDict = {'2016': {'SL': {'nGenJets': None,
'nGenLeps': None,
'GenHT': None},
'DL': {'nGenJets': None,
'nGenLeps': None,
'GenHT': None}
},
'2017': {'SL': {'nGenJets': 9,
'nGenLeps': 1,
'GenHT': 500},
'DL': {'nGenJets': 7,
'nGenLeps': 2,
'GenHT': 500}
},
'2018': {'SL': {'nGenJets': 9,
'nGenLeps': 1,
'GenHT': 500},
'DL': {'nGenJets': 7,
'nGenLeps': 2,
'GenHT': 500}
}
}
stitchSL = stitchDict[era]['SL']
stitchDL = stitchDict[era]['DL']
defines = collections.OrderedDict()
defines["wgt_SUMW"] = "({xs:s} * {lumi:s} * 1000 * genWeight) / {sumw:s}"\
.format(xs=str(crossSection), lumi=str(lumi), sumw=str(sumWeights))
defines["wgt_NUMW"] = "({xs:s} * {lumi:s} * 1000 * genWeight) / (abs(genWeight) * ( {nevtp:s} - {nevtn:s} ) )"\
.format(xs=str(crossSection), lumi=str(lumi), nevt=str(nEvents),
nevtp=str(nEventsPositive), nevtn=str(nEventsNegative))
defines["jet_mask"] = "GenJet_pt > 30"
defines["HT_mask"] = "GenJet_pt > 30 && GenJet_eta < 2.4"
defines["lep_mask"] = "abs(LHEPart_pdgId) == 15 || abs(LHEPart_pdgId) == 13 || abs(LHEPart_pdgId) == 11"
defines["stitch_nGenLep"] = "LHEPart_pdgId[lep_mask].size()"
defines["stitch_nGenJet"] = "GenJet_pt[jet_mask].size()"
defines["stitch_GenHT"] = "Sum(GenJet_pt[HT_mask])"
rdf = input_df
for k, v in defines.items():
if verbose:
print("Define(\"{}\", \"{}\")".format(k, v))
rdf = rdf.Define(k, v)
return rdf
def fillStitchVars(input_df, weights=["wgt_SUMW", "wgt_NUMW"], Cache=None,
HTBinWidth=50, desiredHTMin=200, desiredHTMax=800,
era="2017", channel="DL", source="Filtered", verbose=False):
stitchDict = {'2016': {'SL': {'nGenJets': None,
'nGenLeps': None,
'GenHT': None},
'DL': {'nGenJets': None,
'nGenLeps': None,
'GenHT': None}
},
'2017': {'SL': {'nGenJets': 9,
'nGenLeps': 1,
'GenHT': 500},
'DL': {'nGenJets': 7,
'nGenLeps': 2,
'GenHT': 500}
},
'2018': {'SL': {'nGenJets': 9,
'nGenLeps': 1,
'GenHT': 500},
'DL': {'nGenJets': 7,
'nGenLeps': 2,
'GenHT': 500}
}
}
stitchSL = stitchDict[era]['SL']
stitchDL = stitchDict[era]['DL']
#Binning variables for determining continuity or normalization factor
#nGenJet and nGenLep (just hardcoded here, don't see much use for varying these that much
nGenJetMin = 2
nGenJetMax = 20
nGenJetBins = nGenJetMax - nGenJetMin
nGenLepMin = 0
nGenLepMax = 5
nGenLepBins = nGenLepMax - nGenLepMin
#HT
HTBinWidth = HTBinWidth
desiredHTMin = desiredHTMin
desiredHTMax = desiredHTMax
cutValue = stitchDict[era][channel]['GenHT']
HTMin = cutValue
HTMax = cutValue
HTBins = 0
while (HTMin > desiredHTMin):
HTMin -= HTBinWidth
HTBins += 1
while (HTMax < desiredHTMax):
HTMax += HTBinWidth
HTBins += 1
if verbose:
print("For desiredHTMin={0:<.1f} and desiredHTMax={1:<.1f}, with HTBinWidth={2:<.1f}, the calculated HTMin={3:<.1f} and HTMax={4:<.1f} with HTBins={5:<d}".format(desiredHTMin, desiredHTMax, HTBinWidth, HTMin, HTMax, HTBins))
rdf = input_df
if Cache == None:
Cache = {}
for wgtVar in weights:
Cache[wgtVar] = {}
Cache[wgtVar]["nGenLep"] = rdf.Histo1D(("nGenLep[{}]".format(wgtVar), "nGenLep[{}]; nGenLep; Events".format(wgtVar), nGenLepBins, nGenLepMin, nGenLepMax), "stitch_nGenLep", wgtVar)
Cache[wgtVar]["nGenJet"] = rdf.Histo1D(("nGenJet[{}]".format(wgtVar), "nGenJet[{}]; nGenJet; Events".format(wgtVar), nGenJetBins, nGenJetMin, nGenJetMax), "stitch_nGenJet", wgtVar)
Cache[wgtVar]["GenHT"] = rdf.Histo1D(("GenHT[{}]".format(wgtVar), "GenHT[{}]; GenHT; Events".format(wgtVar), HTBins, HTMin, HTMax), "stitch_GenHT", wgtVar)
Cache[wgtVar]["GenHTvnGenJet"] = rdf.Histo2D(("GenHT[{}]".format(wgtVar), "GenHT[{}]; GenHT; Events".format(wgtVar),
HTBins, HTMin, HTMax, nGenJetBins, nGenJetMin, nGenJetMax),
"stitch_GenHT", "stitch_nGenJet", wgtVar)
return Cache
##################################################
##################################################
### CHOOSE SAMPLE DICT AND CHANNEL TO ANALYZE ####
##################################################
##################################################
#Focus on limited set of events at a time
levels_of_interest = set(["baseline"])
#Choose the sample dictionary to run
theSampleDict = source_DL_V2 #Unprocessed NanoAODv5 samples
#theSampleDict = source_SL_V2
#theSampleDict = stitched_DL_V2
#theSampleDict = stitched_SL_V2
#Name the channel that's being analyzed for saving files, and the format (.C, .root, .pdf, .eps, .gif, .png, .jpeg, etc)
fileChannel = "StitchCalculation"
theFormat = ".pdf"
filtered = {}
for name, vals in theSampleDict.items():
#if name == "tttt_orig": continue
print("Booking - {}".format(name))
if useSpark == True:
filtered[name] = RDF("Events", vals["sourceSPARK"]).Filter("nGenJet > 0", "trivial")#.Cache()
else:
filtered[name] = RDF("Events", vals["source"]).Filter("nGenJet > 0", "trivial")#.Filter(b[JMLOG], JMLOG)#.Cache()
samples = {}
counts = {}
histos = {}
the_df = {}
print("Starting loop for booking")
for name, vals in theSampleDict.items():
print("Booking - {}".format(name))
the_df[name] = filtered[name]
the_df[name] = defineStitchVars(the_df[name], crossSection=vals["crossSection"], sumWeights=vals["sumWeights"],
lumi=lumi[vals["era"]], nEvents=vals["nEvents"], nEventsPositive=vals["nEventsPositive"],
nEventsNegative=vals["nEventsNegative"],)
counts[name] = the_df[name].Count()
histos[name] = fillStitchVars(the_df[name], weights=["wgt_SUMW", "wgt_NUMW"], Cache=None,
HTBinWidth=50, desiredHTMin=200, desiredHTMax=800,
era=vals["era"], channel=vals["stitch"]["channel"],
)
print("Warning: if filtered[name] RDFs are not reset, then calling Define(*) on them will cause the error"\
" with 'program state reset' due to multiple definitions for the same variable")
loopcounter = 0
start = time.clock()
substart = {}
subfinish = {}
for name, cnt in counts.items():
print("Working...")
substart[name] = time.clock()
loopcounter += 1
print("{} = {}".format(name, str(cnt.GetValue())))
subfinish[name] = time.clock()
finish = time.clock()
print("Took {}s to process".format(finish - start))
for name, val in substart.items():
print("Took {}s to process sample {}".format(subfinish[name] - substart[name], name))
c = ROOT.TCanvas("c", "", 800, 600)
c.cd()
histos["tt_DL-GF"]["wgt_SUMW"]["GenHTvnGenJet"].Draw("COLZ TEXT")
c.Draw()
c.SaveAs("PyRDFTest.pdf")
| 132.418966
| 285
| 0.746533
| 8,976
| 76,803
| 6.11709
| 0.167558
| 0.026991
| 0.041343
| 0.066148
| 0.798113
| 0.79611
| 0.794216
| 0.794216
| 0.791411
| 0.789881
| 0
| 0.21352
| 0.13769
| 76,803
| 579
| 286
| 132.647668
| 0.61554
| 0.013294
| 0
| 0.288073
| 0
| 0.46422
| 0.786415
| 0.74725
| 0
| 0
| 0
| 0.001727
| 0
| 1
| 0.00367
| false
| 0.007339
| 0.011009
| 0
| 0.018349
| 0.020183
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
27b5f99ae07fbf8e8bde4ccc1515fd95951c78fd
| 34
|
py
|
Python
|
branding/views/__init__.py
|
MichiMolle/django-branding
|
6e5bb82244b66b188de0b3c9294bc73bb7406314
|
[
"MIT"
] | null | null | null |
branding/views/__init__.py
|
MichiMolle/django-branding
|
6e5bb82244b66b188de0b3c9294bc73bb7406314
|
[
"MIT"
] | null | null | null |
branding/views/__init__.py
|
MichiMolle/django-branding
|
6e5bb82244b66b188de0b3c9294bc73bb7406314
|
[
"MIT"
] | null | null | null |
from .style_view import style_view
| 34
| 34
| 0.882353
| 6
| 34
| 4.666667
| 0.666667
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 34
| 1
| 34
| 34
| 0.903226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
27f27d12d2d15bb4c41aca7c044be8161bad398a
| 13,752
|
py
|
Python
|
ironic_inspector/test/test_firewall.py
|
NaohiroTamura/ironic-inspector
|
7b7fba72de46806ce84d6d4758a2343b52b0c96d
|
[
"Apache-2.0"
] | null | null | null |
ironic_inspector/test/test_firewall.py
|
NaohiroTamura/ironic-inspector
|
7b7fba72de46806ce84d6d4758a2343b52b0c96d
|
[
"Apache-2.0"
] | null | null | null |
ironic_inspector/test/test_firewall.py
|
NaohiroTamura/ironic-inspector
|
7b7fba72de46806ce84d6d4758a2343b52b0c96d
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 NEC Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import subprocess
import mock
from oslo_config import cfg
from ironic_inspector.common import ironic as ir_utils
from ironic_inspector import firewall
from ironic_inspector import node_cache
from ironic_inspector.test import base as test_base
CONF = cfg.CONF
@mock.patch.object(firewall, '_iptables')
@mock.patch.object(ir_utils, 'get_client')
@mock.patch.object(subprocess, 'check_call')
class TestFirewall(test_base.NodeTest):
def test_update_filters_without_manage_firewall(self, mock_call,
mock_get_client,
mock_iptables):
CONF.set_override('manage_firewall', False, 'firewall')
firewall.update_filters()
self.assertEqual(0, mock_iptables.call_count)
def test_init_args(self, mock_call, mock_get_client, mock_iptables):
rootwrap_path = '/some/fake/path'
CONF.set_override('rootwrap_config', rootwrap_path)
firewall.init()
init_expected_args = [
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport', '67',
'-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-N', CONF.firewall.firewall_chain)]
call_args_list = mock_iptables.call_args_list
for (args, call) in zip(init_expected_args, call_args_list):
self.assertEqual(args, call[0])
expected = ('sudo', 'ironic-inspector-rootwrap', rootwrap_path,
'iptables', '-w')
self.assertEqual(expected, firewall.BASE_COMMAND)
def test_init_args_old_iptables(self, mock_call, mock_get_client,
mock_iptables):
rootwrap_path = '/some/fake/path'
CONF.set_override('rootwrap_config', rootwrap_path)
mock_call.side_effect = subprocess.CalledProcessError(2, '')
firewall.init()
init_expected_args = [
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport', '67',
'-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-N', CONF.firewall.firewall_chain)]
call_args_list = mock_iptables.call_args_list
for (args, call) in zip(init_expected_args, call_args_list):
self.assertEqual(args, call[0])
expected = ('sudo', 'ironic-inspector-rootwrap', rootwrap_path,
'iptables',)
self.assertEqual(expected, firewall.BASE_COMMAND)
def test_init_kwargs(self, mock_call, mock_get_client, mock_iptables):
firewall.init()
init_expected_kwargs = [
{'ignore': True},
{'ignore': True},
{'ignore': True}]
call_args_list = mock_iptables.call_args_list
for (kwargs, call) in zip(init_expected_kwargs, call_args_list):
self.assertEqual(kwargs, call[1])
def test_update_filters_args(self, mock_call, mock_get_client,
mock_iptables):
# Pretend that we have nodes on introspection
node_cache.add_node(self.node.uuid, bmc_address='1.2.3.4')
firewall.init()
update_filters_expected_args = [
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-N', CONF.firewall.firewall_chain),
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', firewall.NEW_CHAIN),
('-F', firewall.NEW_CHAIN),
('-X', firewall.NEW_CHAIN),
('-N', firewall.NEW_CHAIN),
('-A', firewall.NEW_CHAIN, '-j', 'ACCEPT'),
('-I', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', firewall.NEW_CHAIN),
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-E', firewall.NEW_CHAIN, CONF.firewall.firewall_chain)
]
firewall.update_filters()
call_args_list = mock_iptables.call_args_list
for (args, call) in zip(update_filters_expected_args,
call_args_list):
self.assertEqual(args, call[0])
def test_update_filters_kwargs(self, mock_call, mock_get_client,
mock_iptables):
firewall.init()
update_filters_expected_kwargs = [
{'ignore': True},
{'ignore': True},
{'ignore': True},
{},
{'ignore': True},
{'ignore': True},
{'ignore': True},
{},
{},
{},
{'ignore': True},
{'ignore': True},
{'ignore': True}
]
firewall.update_filters()
call_args_list = mock_iptables.call_args_list
for (kwargs, call) in zip(update_filters_expected_kwargs,
call_args_list):
self.assertEqual(kwargs, call[1])
def test_update_filters_with_blacklist(self, mock_call, mock_get_client,
mock_iptables):
active_macs = ['11:22:33:44:55:66', '66:55:44:33:22:11']
inactive_mac = ['AA:BB:CC:DD:EE:FF']
self.macs = active_macs + inactive_mac
self.ports = [mock.Mock(address=m) for m in self.macs]
mock_get_client.port.list.return_value = self.ports
node_cache.add_node(self.node.uuid, mac=active_macs,
bmc_address='1.2.3.4', foo=None)
firewall.init()
update_filters_expected_args = [
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-N', CONF.firewall.firewall_chain),
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', firewall.NEW_CHAIN),
('-F', firewall.NEW_CHAIN),
('-X', firewall.NEW_CHAIN),
('-N', firewall.NEW_CHAIN),
# Blacklist
('-A', firewall.NEW_CHAIN, '-m', 'mac', '--mac-source',
inactive_mac[0], '-j', 'DROP'),
('-A', firewall.NEW_CHAIN, '-j', 'ACCEPT'),
('-I', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', firewall.NEW_CHAIN),
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-E', firewall.NEW_CHAIN, CONF.firewall.firewall_chain)
]
firewall.update_filters(mock_get_client)
call_args_list = mock_iptables.call_args_list
for (args, call) in zip(update_filters_expected_args,
call_args_list):
self.assertEqual(args, call[0])
# check caching
mock_iptables.reset_mock()
firewall.update_filters(mock_get_client)
self.assertFalse(mock_iptables.called)
def test_update_filters_clean_cache_on_error(self, mock_call,
mock_get_client,
mock_iptables):
active_macs = ['11:22:33:44:55:66', '66:55:44:33:22:11']
inactive_mac = ['AA:BB:CC:DD:EE:FF']
self.macs = active_macs + inactive_mac
self.ports = [mock.Mock(address=m) for m in self.macs]
mock_get_client.port.list.return_value = self.ports
node_cache.add_node(self.node.uuid, mac=active_macs,
bmc_address='1.2.3.4', foo=None)
firewall.init()
update_filters_expected_args = [
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', firewall.NEW_CHAIN),
('-F', firewall.NEW_CHAIN),
('-X', firewall.NEW_CHAIN),
('-N', firewall.NEW_CHAIN),
# Blacklist
('-A', firewall.NEW_CHAIN, '-m', 'mac', '--mac-source',
inactive_mac[0], '-j', 'DROP'),
('-A', firewall.NEW_CHAIN, '-j', 'ACCEPT'),
('-I', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', firewall.NEW_CHAIN),
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-E', firewall.NEW_CHAIN, CONF.firewall.firewall_chain)
]
mock_iptables.side_effect = [None, None, RuntimeError()]
self.assertRaises(RuntimeError, firewall.update_filters,
mock_get_client)
# check caching
mock_iptables.reset_mock()
mock_iptables.side_effect = None
firewall.update_filters(mock_get_client)
call_args_list = mock_iptables.call_args_list
for (args, call) in zip(update_filters_expected_args,
call_args_list):
self.assertEqual(args, call[0])
def test_update_filters_args_node_not_found_hook(self, mock_call,
mock_get_client,
mock_iptables):
# DHCP should be always opened if node_not_found hook is set
CONF.set_override('node_not_found_hook', 'enroll', 'processing')
firewall.init()
update_filters_expected_args = [
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-N', CONF.firewall.firewall_chain),
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', firewall.NEW_CHAIN),
('-F', firewall.NEW_CHAIN),
('-X', firewall.NEW_CHAIN),
('-N', firewall.NEW_CHAIN),
('-A', firewall.NEW_CHAIN, '-j', 'ACCEPT'),
('-I', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', firewall.NEW_CHAIN),
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-E', firewall.NEW_CHAIN, CONF.firewall.firewall_chain)
]
firewall.update_filters()
call_args_list = mock_iptables.call_args_list
for (args, call) in zip(update_filters_expected_args,
call_args_list):
self.assertEqual(args, call[0])
def test_update_filters_args_no_introspection(self, mock_call,
mock_get_client,
mock_iptables):
firewall.init()
update_filters_expected_args = [
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-N', CONF.firewall.firewall_chain),
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', firewall.NEW_CHAIN),
('-F', firewall.NEW_CHAIN),
('-X', firewall.NEW_CHAIN),
('-N', firewall.NEW_CHAIN),
('-A', firewall.NEW_CHAIN, '-j', 'REJECT'),
('-I', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', firewall.NEW_CHAIN),
('-D', 'INPUT', '-i', 'br-ctlplane', '-p', 'udp', '--dport',
'67', '-j', CONF.firewall.firewall_chain),
('-F', CONF.firewall.firewall_chain),
('-X', CONF.firewall.firewall_chain),
('-E', firewall.NEW_CHAIN, CONF.firewall.firewall_chain)
]
firewall.update_filters()
call_args_list = mock_iptables.call_args_list
for (args, call) in zip(update_filters_expected_args,
call_args_list):
self.assertEqual(args, call[0])
# Check caching enabled flag
mock_iptables.reset_mock()
firewall.update_filters()
self.assertFalse(mock_iptables.called)
# Adding a node changes it back
node_cache.add_node(self.node.uuid, bmc_address='1.2.3.4')
mock_iptables.reset_mock()
firewall.update_filters()
mock_iptables.assert_any_call('-A', firewall.NEW_CHAIN, '-j', 'ACCEPT')
| 41.297297
| 79
| 0.546975
| 1,548
| 13,752
| 4.615633
| 0.133075
| 0.10077
| 0.123163
| 0.153954
| 0.812176
| 0.795101
| 0.779286
| 0.773408
| 0.743177
| 0.722043
| 0
| 0.013195
| 0.300102
| 13,752
| 332
| 80
| 41.421687
| 0.729143
| 0.056864
| 0
| 0.821012
| 0
| 0
| 0.107129
| 0.003862
| 0
| 0
| 0
| 0
| 0.062257
| 1
| 0.038911
| false
| 0
| 0.027237
| 0
| 0.070039
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e347da6ce49bbd8f8ceb305b9906f220612e75a4
| 168
|
py
|
Python
|
webapp/tests/__init__.py
|
romanek-adam/graphite-web
|
f6d7d16551a6953a5d0a1c19978efbf93fd3f869
|
[
"Apache-2.0"
] | 4,281
|
2015-01-01T12:35:03.000Z
|
2022-03-31T20:06:59.000Z
|
webapp/tests/__init__.py
|
romanek-adam/graphite-web
|
f6d7d16551a6953a5d0a1c19978efbf93fd3f869
|
[
"Apache-2.0"
] | 1,809
|
2015-01-01T21:16:36.000Z
|
2022-03-31T21:25:13.000Z
|
webapp/tests/__init__.py
|
romanek-adam/graphite-web
|
f6d7d16551a6953a5d0a1c19978efbf93fd3f869
|
[
"Apache-2.0"
] | 970
|
2015-01-02T19:49:21.000Z
|
2022-03-27T09:48:44.000Z
|
import os
DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), 'data'))
TEST_CONF_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), 'conf'))
| 33.6
| 80
| 0.744048
| 29
| 168
| 3.931034
| 0.37931
| 0.315789
| 0.157895
| 0.280702
| 0.754386
| 0.754386
| 0.754386
| 0.754386
| 0.754386
| 0.754386
| 0
| 0
| 0.065476
| 168
| 4
| 81
| 42
| 0.726115
| 0
| 0
| 0
| 0
| 0
| 0.047619
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
e38687655962e048dbbac2d02185ba785d0efdce
| 8,319
|
py
|
Python
|
tests/test_macros.py
|
minatoyuichiro/Blueqat
|
1be0150ca48bf40527936561d1bf4687dbf435b4
|
[
"Apache-2.0"
] | 357
|
2019-02-24T07:21:03.000Z
|
2022-03-15T22:59:13.000Z
|
tests/test_macros.py
|
mdrft/blueqat
|
6c5f26b377bc3ce0d02adec8b9132d70870b3d95
|
[
"Apache-2.0"
] | 35
|
2019-03-29T02:13:09.000Z
|
2021-10-15T02:19:06.000Z
|
tests/test_macros.py
|
mdrft/blueqat
|
6c5f26b377bc3ce0d02adec8b9132d70870b3d95
|
[
"Apache-2.0"
] | 49
|
2019-03-09T13:19:40.000Z
|
2022-03-11T08:31:16.000Z
|
from math import pi
import math
import cmath
import random
from collections import Counter
import pytest
import numpy as np
from blueqat import Circuit
from blueqat.circuit_funcs import circuit_to_unitary
import blueqat.macros
PAULI_X = np.array([[0, 1], [1, 0]])
ANGLES = [0.0, pi / 4, pi / 2, pi, -pi / 2, 2 * pi, -7 * pi]
ANGLES_SMALL = [0.0, -pi / 2, pi, 2 * pi]
def test_c3z():
assert np.allclose(circuit_to_unitary(Circuit().c3z(0, 1, 2, 3)),
np.diag([1] * 15 + [-1]))
def test_c4z():
assert np.allclose(circuit_to_unitary(Circuit().c4z(0, 1, 2, 3, 4)),
np.diag([1] * 31 + [-1]))
def test_mcz_gray_0():
assert np.allclose(circuit_to_unitary(Circuit().mcz_gray([], 0)),
np.diag([1, -1]))
def test_mcz_gray_1():
assert np.allclose(circuit_to_unitary(Circuit().mcz_gray([0], 1)),
np.diag([1, 1, 1, -1]))
def test_mcz_gray_4():
assert np.allclose(circuit_to_unitary(Circuit().mcz_gray([0, 1, 2, 3], 4)),
np.diag([1] * 31 + [-1]))
def test_c3x():
expected = np.eye(16)
expected[7, 7] = expected[15, 15] = 0
expected[7, 15] = expected[15, 7] = 1
assert np.allclose(circuit_to_unitary(Circuit().c3x(0, 1, 2, 3)), expected)
def test_c4x():
expected = np.eye(32)
expected[15, 15] = expected[31, 31] = 0
expected[15, 31] = expected[31, 15] = 1
assert np.allclose(circuit_to_unitary(Circuit().c4x(0, 1, 2, 3, 4)),
expected)
def test_mcx_gray_0():
assert np.allclose(circuit_to_unitary(Circuit().mcx_gray([], 0)),
np.array([[0, 1], [1, 0]]))
def test_mcx_gray_1():
assert np.allclose(circuit_to_unitary(Circuit().mcx_gray([0], 1)),
circuit_to_unitary(Circuit().cx[0, 1]))
def test_mcx_gray_2():
assert np.allclose(circuit_to_unitary(Circuit().mcx_gray([0, 1], 2)),
circuit_to_unitary(Circuit().ccx[0, 1, 2]))
def test_mcx_gray_4():
expected = np.eye(32)
expected[15, 15] = expected[31, 31] = 0
expected[15, 31] = expected[31, 15] = 1
assert np.allclose(circuit_to_unitary(Circuit().mcx_gray([0, 1, 2, 3], 4)),
expected)
@pytest.mark.parametrize("theta", ANGLES)
@pytest.mark.parametrize("n", [0, 1, 2, 4])
def test_mcrx_gray_n(theta, n):
u = circuit_to_unitary(Circuit().rx(theta)[0])
expected = np.eye(2**(n + 1), dtype=complex)
expected[2**n - 1, 2**n - 1] = u[0, 0]
expected[2**(n + 1) - 1, 2**n - 1] = u[1, 0]
expected[2**n - 1, 2**(n + 1) - 1] = u[0, 1]
expected[2**(n + 1) - 1, 2**(n + 1) - 1] = u[1, 1]
assert np.allclose(
circuit_to_unitary(Circuit().mcrx_gray(theta, list(range(n)), n)),
expected)
@pytest.mark.parametrize("theta", ANGLES)
@pytest.mark.parametrize("n", [0, 1, 2, 4])
def test_mcry_gray_n(theta, n):
u = circuit_to_unitary(Circuit().ry(theta)[0])
expected = np.eye(2**(n + 1), dtype=complex)
expected[2**n - 1, 2**n - 1] = u[0, 0]
expected[2**(n + 1) - 1, 2**n - 1] = u[1, 0]
expected[2**n - 1, 2**(n + 1) - 1] = u[0, 1]
expected[2**(n + 1) - 1, 2**(n + 1) - 1] = u[1, 1]
assert np.allclose(
circuit_to_unitary(Circuit().mcry_gray(theta, list(range(n)), n)),
expected)
@pytest.mark.parametrize("theta", ANGLES)
@pytest.mark.parametrize("n", [0, 1, 2, 4])
def test_mcrz_gray_n(theta, n):
u = circuit_to_unitary(Circuit().rz(theta)[0])
expected = np.eye(2**(n + 1), dtype=complex)
expected[2**n - 1, 2**n - 1] = u[0, 0]
expected[2**(n + 1) - 1, 2**n - 1] = u[1, 0]
expected[2**n - 1, 2**(n + 1) - 1] = u[0, 1]
expected[2**(n + 1) - 1, 2**(n + 1) - 1] = u[1, 1]
assert np.allclose(
circuit_to_unitary(Circuit().mcrz_gray(theta, list(range(n)), n)),
expected)
@pytest.mark.parametrize("theta", ANGLES)
@pytest.mark.parametrize("n", [0, 1, 2, 4])
def test_mcrz_gray_n(theta, n):
u = circuit_to_unitary(Circuit().r(theta)[0])
expected = np.eye(2**(n + 1), dtype=complex)
expected[2**n - 1, 2**n - 1] = u[0, 0]
expected[2**(n + 1) - 1, 2**n - 1] = u[1, 0]
expected[2**n - 1, 2**(n + 1) - 1] = u[0, 1]
expected[2**(n + 1) - 1, 2**(n + 1) - 1] = u[1, 1]
assert np.allclose(
circuit_to_unitary(Circuit().mcr_gray(theta, list(range(n)), n)),
expected)
@pytest.mark.parametrize("theta", ANGLES_SMALL)
@pytest.mark.parametrize("phi", ANGLES_SMALL)
@pytest.mark.parametrize("lam", ANGLES_SMALL)
@pytest.mark.parametrize("gamma", ANGLES_SMALL)
@pytest.mark.parametrize("n", [0, 1, 2, 4])
def test_mcu_gray_n(theta, phi, lam, gamma, n):
u = circuit_to_unitary(Circuit().u(theta, phi, lam, gamma)[0])
expected = np.eye(2**(n + 1), dtype=complex)
expected[2**n - 1, 2**n - 1] = u[0, 0]
expected[2**(n + 1) - 1, 2**n - 1] = u[1, 0]
expected[2**n - 1, 2**(n + 1) - 1] = u[0, 1]
expected[2**(n + 1) - 1, 2**(n + 1) - 1] = u[1, 1]
assert np.allclose(
circuit_to_unitary(Circuit().mcu_gray(theta, phi, lam, gamma, list(range(n)), n)),
expected)
def test_mcx_with_ancilla_0():
assert np.allclose(
circuit_to_unitary(Circuit().mcx_with_ancilla([], 1, 0)),
circuit_to_unitary(Circuit().x[1])
)
def test_mcx_with_ancilla_1():
assert np.allclose(
circuit_to_unitary(Circuit().mcx_with_ancilla([1], 2, 0)),
circuit_to_unitary(Circuit().cx[1, 2])
)
def test_mcx_with_ancilla_2():
assert np.allclose(
circuit_to_unitary(Circuit().mcx_with_ancilla([1, 2], 3, 0)),
circuit_to_unitary(Circuit().ccx[1, 2, 3])
)
def test_mcx_with_ancilla_3():
assert np.allclose(
circuit_to_unitary(Circuit().mcx_with_ancilla([1, 2, 3], 4, 0)),
circuit_to_unitary(Circuit().c3x(1, 2, 3, 4))
)
def test_mcx_with_ancilla_4():
assert np.allclose(
circuit_to_unitary(Circuit().mcx_with_ancilla([1, 2, 3, 4], 5, 0)),
circuit_to_unitary(Circuit().c4x(1, 2, 3, 4, 5))
)
def test_mcx_with_ancilla_5():
assert np.allclose(
circuit_to_unitary(Circuit().mcx_with_ancilla([1, 2, 3, 4, 5], 6, 0)),
circuit_to_unitary(Circuit().mcx_gray([1, 2, 3, 4, 5], 6))
)
def test_mcx_with_ancilla_6():
assert np.allclose(
circuit_to_unitary(Circuit().mcx_with_ancilla([1, 2, 3, 4, 5, 6], 7, 0)),
circuit_to_unitary(Circuit().mcx_gray([1, 2, 3, 4, 5, 6], 7))
)
def test_mcz_with_ancilla_0():
assert np.allclose(
circuit_to_unitary(Circuit().mcz_with_ancilla([], 1, 0)),
circuit_to_unitary(Circuit().z[1])
)
def test_mcz_with_ancilla_1():
assert np.allclose(
circuit_to_unitary(Circuit().mcz_with_ancilla([1], 2, 0)),
circuit_to_unitary(Circuit().cz[1, 2])
)
def test_mcz_with_ancilla_2():
assert np.allclose(
circuit_to_unitary(Circuit().mcz_with_ancilla([1, 2], 3, 0)),
circuit_to_unitary(Circuit().ccz[1, 2, 3])
)
def test_mcz_with_ancilla_3():
assert np.allclose(
circuit_to_unitary(Circuit().mcz_with_ancilla([1, 2, 3], 4, 0)),
circuit_to_unitary(Circuit().c3z(1, 2, 3, 4))
)
def test_mcz_with_ancilla_4():
assert np.allclose(
circuit_to_unitary(Circuit().mcz_with_ancilla([1, 2, 3, 4], 5, 0)),
circuit_to_unitary(Circuit().mcz_gray([1, 2, 3, 4], 5))
)
def test_mcz_with_ancilla_5():
assert np.allclose(
circuit_to_unitary(Circuit().mcz_with_ancilla([1, 2, 3, 4, 5], 6, 0)),
circuit_to_unitary(Circuit().mcz_gray([1, 2, 3, 4, 5], 6))
)
def test_mcz_with_ancilla_6():
assert np.allclose(
circuit_to_unitary(Circuit().mcz_with_ancilla([1, 2, 3, 4, 5, 6], 7, 0)),
circuit_to_unitary(Circuit().mcz_gray([1, 2, 3, 4, 5, 6], 7))
)
def test_mcx_gray_12():
cnt = Circuit().x[:12].mcx_gray(range(12), 12).m[:].shots(10)
assert cnt == Counter({'1' * 13: 10})
def test_mcx_with_ancilla_12():
cnt = Circuit().x[:12].mcx_with_ancilla(range(12), 12, 13).m[:].shots(10)
assert cnt == Counter({'1' * 13 + '0': 10})
| 31.631179
| 90
| 0.576151
| 1,326
| 8,319
| 3.417044
| 0.062594
| 0.02516
| 0.183624
| 0.258883
| 0.870889
| 0.798279
| 0.765615
| 0.744207
| 0.712867
| 0.6791
| 0
| 0.079072
| 0.23837
| 8,319
| 262
| 91
| 31.751908
| 0.636048
| 0
| 0
| 0.362694
| 0
| 0
| 0.005289
| 0
| 0
| 0
| 0
| 0
| 0.165803
| 1
| 0.165803
| false
| 0
| 0.051813
| 0
| 0.217617
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8b63c12219e55ad642ead7828b643c8a57901f33
| 94
|
py
|
Python
|
is_core/context_processors/__init__.py
|
zzuzzy/django-is-core
|
3f87ec56a814738683c732dce5f07e0328c2300d
|
[
"BSD-3-Clause"
] | null | null | null |
is_core/context_processors/__init__.py
|
zzuzzy/django-is-core
|
3f87ec56a814738683c732dce5f07e0328c2300d
|
[
"BSD-3-Clause"
] | null | null | null |
is_core/context_processors/__init__.py
|
zzuzzy/django-is-core
|
3f87ec56a814738683c732dce5f07e0328c2300d
|
[
"BSD-3-Clause"
] | null | null | null |
from django.conf import settings
def is_js_dev(req):
return {'JS_DEV': settings.JS_DEV}
| 15.666667
| 38
| 0.734043
| 16
| 94
| 4.0625
| 0.6875
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159574
| 94
| 5
| 39
| 18.8
| 0.822785
| 0
| 0
| 0
| 0
| 0
| 0.06383
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
8bde312d1cdf3106ec508880e7a6377dbe0e115b
| 11,023
|
py
|
Python
|
statics/pipe_templates.py
|
sesam-community/template-generator
|
790e0eaae82561509d9dbc3a92b136cb6c40639d
|
[
"Apache-2.0"
] | null | null | null |
statics/pipe_templates.py
|
sesam-community/template-generator
|
790e0eaae82561509d9dbc3a92b136cb6c40639d
|
[
"Apache-2.0"
] | null | null | null |
statics/pipe_templates.py
|
sesam-community/template-generator
|
790e0eaae82561509d9dbc3a92b136cb6c40639d
|
[
"Apache-2.0"
] | null | null | null |
def collect_pipe(system, pipeNameAndDatatype, config_group):
if config_group != "Default":
config = {
"_id": f"{config_group}-{pipeNameAndDatatype}-collect",
"type": "pipe",
"source": {
"type": f"{system['type']}",
"system": f"{system['_id']}"
},
"metadata": {
"$config-group": f"{config_group}"
},
"add_namespaces": False
}
else:
config = {
"_id": f"{pipeNameAndDatatype}-collect",
"type": "pipe",
"source": {
"type": f"{system['type']}",
"system": f"{system['_id']}"
},
"add_namespaces": False
}
return config
def enrich_pipe(pipeNameAndDatatype, config_group):
if config_group != "Default":
config = {
"_id": f"{config_group}-{pipeNameAndDatatype}-enrich",
"type": "pipe",
"source": {
"type": "dataset",
"dataset": f"{pipeNameAndDatatype}-collect"
},
"transform": {
"type": "dtl",
"rules": {
"default": [
["copy", "*"],
["comment", "*** convention here is to add namespaced identifiers ***"],
["make-ni", "system-datatype", "datatype"],
["comment", "*** convention here is to add the property rdf:type ***"],
["add", "rdf:type",
["ni", "template:Example"]
]
]
}
},
"metadata": {
"$config-group": f"{config_group}"
},
"add_namespaces": True,
"namespaces": {
"identity": f"{pipeNameAndDatatype}",
"property": f"{pipeNameAndDatatype}"
}
}
else:
config = {
"_id": f"{pipeNameAndDatatype}-enrich",
"type": "pipe",
"source": {
"type": "dataset",
"dataset": f"{pipeNameAndDatatype}-collect"
},
"transform": {
"type": "dtl",
"rules": {
"default": [
["copy", "*"],
["comment", "*** convention here is to add namespaced identifiers ***"],
["make-ni", "system-datatype", "datatype"],
["comment", "*** convention here is to add the property rdf:type ***"],
["add", "rdf:type",
["ni", "template:Example"]
]
]
}
},
"add_namespaces": True,
"namespaces": {
"identity": f"{pipeNameAndDatatype}",
"property": f"{pipeNameAndDatatype}"
}
}
return config
def global_pipe(pipeNameAndDatatype, config_group):
if config_group != "Default":
config = {
"_id": f"{config_group}-temporary",
"type": "pipe",
"source": {
"type": "merge",
"datasets": [f"{config_group}-{pipeNameAndDatatype}-enrich"],
"equality": [],
"identity": "first",
"strategy": "compact",
"version": 2
},
"metadata": {
"global": True,
"$config-group": f"{config_group}",
"tags": ["add your logical grouping here"]
}
}
else:
config = {
"_id": "global-temporary",
"type": "pipe",
"source": {
"type": "merge",
"datasets": [f"{pipeNameAndDatatype}-enrich"],
"equality": [],
"identity": "first",
"strategy": "compact",
"version": 2
},
"metadata": {
"global": True,
"tags": ["add your logical grouping here"]
}
}
return config
def transform_pipe(pipeNameAndDatatype, config_group):
if config_group != "Default":
config = {
"_id": f"{config_group}-{pipeNameAndDatatype}-transform",
"type": "pipe",
"source": {
"type": "dataset",
"dataset": "global-template"
},
"transform": {
"type": "dtl",
"rules": {
"default": [
["comment", "*** convention to filter data on rdf:type ***"],
["filter",
["in",
["ni", "template:Example"], "_S.rdf:type"]
],
["comment", "*** Add target system properties ***"],
["add", "someNameForTargetSystem",
"_S.pick_a_global_property"
]
]
}
},
"metadata": {
"$config-group": f"{config_group}"
},
"remove_namespaces": True
}
else:
config = {
"_id": f"{pipeNameAndDatatype}-transform",
"type": "pipe",
"source": {
"type": "dataset",
"dataset": "global-template"
},
"transform": {
"type": "dtl",
"rules": {
"default": [
["comment", "*** convention to filter data on rdf:type ***"],
["filter",
["in",
["ni", "template:Example"], "_S.rdf:type"]
],
["comment", "*** Add target system properties ***"],
["add", "someNameForTargetSystem",
"_S.pick_a_global_property"
]
]
}
},
"remove_namespaces": True
}
return config
def share_pipe(pipeNameAndDatatype, config_group):
if config_group != "Default":
config = {
"_id": f"{config_group}-{pipeNameAndDatatype}-share-operation",
"type": "pipe",
"source": {
"type": "dataset",
"dataset": f"{config_group}-{pipeNameAndDatatype}-transform"
},
"sink": {
"type": "temporary",
"system": "temporary",
"operation": "temporary"
},
"transform": [{
"type": "dtl",
"rules": {
"default": [
["comment", "*** add discard or filter here to only expose curated data ***"],
["discard",
["is-not-empty", "_S.critial_property"]
],
["comment", "filter",
["eq", "_S._deleted", False]
],
["copy", "*"]
]
}
}, {
"type": "template",
"system": "template",
"operation": "get",
"replace_entity": False
}, {
"type": "dtl",
"rules": {
"default": [
["comment", "*** the above external transform is only required when checking for optimistic locking in updates ***"],
["comment", "*** optimistic locking ***"],
["add", "_old",
["first",
["hops", {
"datasets": ["you-collect-dataflow-pipe a"],
"where": [
["eq", "_S._id", "a._id"]
]
}]
]
],
["add", "_json_old",
["json-transit",
["apply", "remove-under", "_T._old"]
]
],
["add", "_json_new",
["first",
["json-transit",
["apply", "remove-under",
["first", "_S."]
]
]
]
],
["add", "_hash_old",
["hash128", "murmur3", "_T._json_old"]
],
["add", "_hash_new",
["hash128", "murmur3", "_T._json_new"]
],
["if",
["eq", "_T._hash_old", "_T._hash_new"],
[
["comment", "*** same data in system as in sesam collect ***"],
["comment", "*** expose your data ***"],
["comment", "*** example for a rest system is provided below ***"],
["add", "::payload",
["apply", "remove-under", "_S."]
],
["add", "::properties",
["dict", "url",
["concat", "your-endpoint-ressource/", "_S.entity.id"]
]
]
],
[
["comment", "**** different data in system than in sesam collect ****"],
["discard"]
]
]
],
"remove-under": [
["copy", "*", "_*"]
]
}
}],
"metadata": {
"$config-group": f"{config_group}"
},
"batch_size": 1
}
else:
config = {
"_id": f"{pipeNameAndDatatype}-share-operation",
"type": "pipe",
"source": {
"type": "dataset",
"dataset": f"{pipeNameAndDatatype}-transform"
},
"sink": {
"type": "temporary",
"system": "temporary",
"operation": "temporary"
},
"transform": [{
"type": "dtl",
"rules": {
"default": [
["comment", "*** add discard or filter here to only expose curated data ***"],
["discard",
["is-not-empty", "_S.critial_property"]
],
["comment", "filter",
["eq", "_S._deleted", False]
],
["copy", "*"]
]
}
}, {
"type": "template",
"system": "template",
"operation": "get",
"replace_entity": False
}, {
"type": "dtl",
"rules": {
"default": [
["comment", "*** the above external transform is only required when checking for optimistic locking in updates ***"],
["comment", "*** optimistic locking ***"],
["add", "_old",
["first",
["hops", {
"datasets": ["you-collect-dataflow-pipe a"],
"where": [
["eq", "_S._id", "a._id"]
]
}]
]
],
["add", "_json_old",
["json-transit",
["apply", "remove-under", "_T._old"]
]
],
["add", "_json_new",
["first",
["json-transit",
["apply", "remove-under",
["first", "_S."]
]
]
]
],
["add", "_hash_old",
["hash128", "murmur3", "_T._json_old"]
],
["add", "_hash_new",
["hash128", "murmur3", "_T._json_new"]
],
["if",
["eq", "_T._hash_old", "_T._hash_new"],
[
["comment", "*** same data in system as in sesam collect ***"],
["comment", "*** expose your data ***"],
["comment", "*** example for a rest system is provided below ***"],
["add", "::payload",
["apply", "remove-under", "_S."]
],
["add", "::properties",
["dict", "url",
["concat", "your-endpoint-ressource/", "_S.entity.id"]
]
]
],
[
["comment", "**** different data in system than in sesam collect ****"],
["discard"]
]
]
],
"remove-under": [
["copy", "*", "_*"]
]
}
}],
"batch_size": 1
}
return config
| 28.931759
| 129
| 0.410415
| 819
| 11,023
| 5.368742
| 0.150183
| 0.067546
| 0.03275
| 0.040937
| 0.950193
| 0.928133
| 0.900387
| 0.900387
| 0.86468
| 0.86468
| 0
| 0.003063
| 0.407693
| 11,023
| 381
| 130
| 28.931759
| 0.670394
| 0
| 0
| 0.740053
| 0
| 0
| 0.41074
| 0.074202
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013263
| false
| 0
| 0
| 0
| 0.026525
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
47a0d79842b3d57b909df12194cd5f38b868e4d5
| 3,288
|
py
|
Python
|
florinsim.py
|
alexwhittemore/florinsim
|
60dbfb649419c36c71e03295e92f6acf28be688a
|
[
"MIT"
] | null | null | null |
florinsim.py
|
alexwhittemore/florinsim
|
60dbfb649419c36c71e03295e92f6acf28be688a
|
[
"MIT"
] | null | null | null |
florinsim.py
|
alexwhittemore/florinsim
|
60dbfb649419c36c71e03295e92f6acf28be688a
|
[
"MIT"
] | null | null | null |
import random
def did_i_win(p=.5):
if random.random() < p:
return True
else:
return False
tickets_to_spend = 10000
florins = 1000
florins_start = florins
print "Tickets spent: {}".format(tickets_to_spend)
print "Florins at start: {}".format(florins_start)
# Strategy 1, the sure thing
florins = florins_start
payout = 10
print ""
print "Strategy 1: 10 florins, 100%"
for ticket in range(0, tickets_to_spend):
if did_i_win(p=1):
florins += payout
print "Final florins: {}".format(florins)
print "Profit: {}".format(florins-florins_start)
print "Profit per ticket: {}".format(float((florins-florins_start))/float(tickets_to_spend))
# Strategy 2, 15% chance
# Ultra-long-term, about 16.3 florins per ticket.
florins = florins_start
payout_start = 50
payout = payout_start
print ""
print "Strategy 2: 50+ florins, 15%"
print "Initial Purse: {}".format(payout_start)
for ticket in range(0, tickets_to_spend):
if did_i_win(p=.15):
florins += payout
payout = payout_start
else:
payout +=10
print "Final florins: {}".format(florins)
print "Profit: {}".format(florins-florins_start)
print "Profit per ticket: {}".format(float((florins-florins_start))/float(tickets_to_spend))
# This strategy sucks, uncomment to see for yourself.
# Strategy 2.5, 30% chance for 20 florins
florins = florins_start
payout_start = 50
payout = payout_start
print ""
print "Strategy 2.5: 50+ florins, 30% with 20 florins"
print "Initial Purse: {}".format(payout_start)
for ticket in range(0, tickets_to_spend):
florins -= 20
if did_i_win(p=.3):
florins += payout
payout = payout_start
else:
payout +=10
print "Final florins: {}".format(florins)
print "Profit: {}".format(florins-florins_start)
print "Profit per ticket: {}".format(float((florins-florins_start))/float(tickets_to_spend))
# Strategy 3, 5% chance
# Ultra-long-term, about 14.1 florins per ticket.
florins = florins_start
payout_start = 100
payout = payout_start
print ""
print "Strategy 3: 100+ florins, 5%"
print "Initial Purse: {}".format(payout_start)
for ticket in range(0, tickets_to_spend):
if did_i_win(p=.05):
florins += payout
payout = payout_start
else:
payout +=15
print "Final florins: {}".format(florins)
print "Profit: {}".format(florins-florins_start)
print "Profit per ticket: {}".format(float((florins-florins_start))/float(tickets_to_spend))
# This strategy sucks, uncomment to see for yourself.
# Strategy 3.5, 10% chance + 20 florins
florins = florins_start
payout_start = 100
payout = payout_start
print ""
print "Strategy 3.5: 100+ florins, 10% + 20 florins"
print "Initial Purse: {}".format(payout_start)
for ticket in range(0, tickets_to_spend):
florins -= 20
if did_i_win(p=.1):
florins += payout
payout = payout_start
else:
payout +=15
print "Final florins: {}".format(florins)
print "Profit: {}".format(florins-florins_start)
print "Profit per ticket: {}".format(float((florins-florins_start))/float(tickets_to_spend))
# Test did_i_win
# answers = []
# trials = 100000
# for num in range(0, trials):
# answers.append(did_i_win(p=1))
# print "N: {}".format(len(answers))
# print "Average: {}".format(float(sum(answers))/float(len(answers)))
| 29.357143
| 92
| 0.697689
| 471
| 3,288
| 4.715499
| 0.154989
| 0.091851
| 0.128321
| 0.025214
| 0.779829
| 0.754165
| 0.754165
| 0.74606
| 0.724899
| 0.724899
| 0
| 0.0406
| 0.168491
| 3,288
| 112
| 93
| 29.357143
| 0.771763
| 0.170316
| 0
| 0.746988
| 0
| 0
| 0.191372
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.012048
| null | null | 0.373494
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
47d0242eee1f4f472b3038216cdb9f4d37fa738d
| 218
|
py
|
Python
|
samcli/lib/utils/profile.py
|
torresxb1/aws-sam-cli
|
d307f2eb6e1a91a476a5e2ca6070f974b0c913f1
|
[
"BSD-2-Clause",
"Apache-2.0"
] | 859
|
2020-08-25T03:53:17.000Z
|
2022-03-31T12:33:07.000Z
|
samcli/lib/utils/profile.py
|
torresxb1/aws-sam-cli
|
d307f2eb6e1a91a476a5e2ca6070f974b0c913f1
|
[
"BSD-2-Clause",
"Apache-2.0"
] | 1,369
|
2020-08-25T10:57:03.000Z
|
2022-03-31T23:00:25.000Z
|
samcli/lib/utils/profile.py
|
torresxb1/aws-sam-cli
|
d307f2eb6e1a91a476a5e2ca6070f974b0c913f1
|
[
"BSD-2-Clause",
"Apache-2.0"
] | 275
|
2020-08-25T19:33:50.000Z
|
2022-03-26T08:32:52.000Z
|
"""
Module for aws profile related helpers
"""
from typing import List, cast
from botocore.session import Session
def list_available_profiles() -> List[str]:
return cast(List[str], Session().available_profiles)
| 19.818182
| 56
| 0.752294
| 29
| 218
| 5.551724
| 0.62069
| 0.21118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142202
| 218
| 10
| 57
| 21.8
| 0.860963
| 0.174312
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
9a1c5702af2d659e76c22497835b4ddbca7d503f
| 49
|
py
|
Python
|
wifi/reset_device/manual_reset.py
|
swissbyte/symlite
|
e46e4d95986da5716e153a22c59b229004ac2a70
|
[
"MIT"
] | 6
|
2021-03-02T21:55:28.000Z
|
2022-01-15T23:20:48.000Z
|
wifi/reset_device/manual_reset.py
|
swissbyte/symlite
|
e46e4d95986da5716e153a22c59b229004ac2a70
|
[
"MIT"
] | 1
|
2021-03-03T03:34:06.000Z
|
2021-03-03T19:18:56.000Z
|
wifi/reset_device/manual_reset.py
|
swissbyte/symlite
|
e46e4d95986da5716e153a22c59b229004ac2a70
|
[
"MIT"
] | 2
|
2021-03-03T05:29:53.000Z
|
2021-03-07T20:07:09.000Z
|
import reset_lib
reset_lib.reset_to_host_mode()
| 12.25
| 30
| 0.857143
| 9
| 49
| 4.111111
| 0.666667
| 0.432432
| 0.702703
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 49
| 3
| 31
| 16.333333
| 0.822222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
ef99a8dda8f03731f74060ee4561cbddde89594a
| 29,138
|
py
|
Python
|
qa327_test/frontend/registration/test_25.py
|
marywhetham11/3xCoolTech
|
582e00a4c16016e545fedcbb14a745d125db94e0
|
[
"MIT"
] | null | null | null |
qa327_test/frontend/registration/test_25.py
|
marywhetham11/3xCoolTech
|
582e00a4c16016e545fedcbb14a745d125db94e0
|
[
"MIT"
] | null | null | null |
qa327_test/frontend/registration/test_25.py
|
marywhetham11/3xCoolTech
|
582e00a4c16016e545fedcbb14a745d125db94e0
|
[
"MIT"
] | 1
|
2021-07-08T20:27:50.000Z
|
2021-07-08T20:27:50.000Z
|
import pytest
from seleniumbase import BaseCase
from qa327_test.conftest import base_url
from unittest.mock import patch
from qa327.models import db, User
from werkzeug.security import generate_password_hash, check_password_hash
"""
This file defines all requirement tests for R2.5.
R2.5 - Email, password, password2 all have to satisfy the same required as defined in R1
"""
# Mock a sample user
test_user = User(
email='test_frontend@test.com',
name='test_frontend',
password=generate_password_hash('test_frontend')
)
class FrontEndRegistrationR5(BaseCase):
def test_emailEmpty(self, *_):
"""
This function tests that the user registration fails and an error message
is returned if the email input box is empty
"""
# open logout page
self.open(base_url + '/logout')
# open register page
self.open(base_url + '/register')
# fill email, user name and password
self.type("#email", "") # email: empty
self.type("#name", test_user.name)
self.type("#password", test_user.password)
self.type("#password2", test_user.password)
# click enter button
self.click('input[type="submit"]')
# test if the login page loads correctly
# test if the login title loads correctly
self.assert_element("h1")
self.assert_text("Log In", "h1")
# test if the error message loads correctly
self.assert_element("#message")
self.assert_text("Email format is incorrect: Cannot be empty", "#message")
# test if the login form loads correctly
self.assert_element("form")
# test if the email element loads correctly
self.assert_element('form div label[for="email"]')
self.assert_text("Email", 'form div label[for="email"]')
self.assert_element("form div #email")
# test if the password element loads correctly
self.assert_element('form div label[for="password"]')
self.assert_text("Password", 'form div label[for="password"]')
self.assert_element("form div #password")
# test if the login button loads correctly
self.assert_element('form div input[type="submit"]')
def test_emailLocalTooLong(self, *_):
"""
This function tests that the user registration fails and an error message
is returned if the email entered does not follow addr-spec defined in RFC 5322
because the local is longer than 64 characters
"""
# open logout page
self.open(base_url + '/logout')
# open register page
self.open(base_url + '/register')
# fill email, user name and password
self.type("#email", "test_frontend012345678901234567890123456789012345678901234567890123456789@test.com") #email: local is too long
self.type("#name", test_user.name)
self.type("#password", test_user.password)
self.type("#password2", test_user.password)
# click enter button
self.click('input[type="submit"]')
# test if the login page loads correctly
# test if the login title loads correctly
self.assert_element("h1")
self.assert_text("Log In", "h1")
# test if the error message loads correctly
self.assert_element("#message")
self.assert_text("Email format is incorrect: Not a valid email", "#message")
# test if the login form loads correctly
self.assert_element("form")
# test if the email element loads correctly
self.assert_element('form div label[for="email"]')
self.assert_text("Email", 'form div label[for="email"]')
self.assert_element("form div #email")
# test if the password element loads correctly
self.assert_element('form div label[for="password"]')
self.assert_text("Password", 'form div label[for="password"]')
self.assert_element("form div #password")
# test if the login button loads correctly
self.assert_element('form div input[type="submit"]')
def test_emailDomainTooLong(self, *_):
"""
This function tests that the user registration fails and an error message
is returned if the email entered does not follow addr-spec defined in RFC 5322
because the domain is longer than 255 characters
"""
# open logout page
self.open(base_url + '/logout')
# open register page
self.open(base_url + '/register')
# fill email, user name and password
self.type("#email", "test_frontend@test012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789.com") #email: domain is too long
self.type("#name", test_user.name)
self.type("#password", test_user.password)
self.type("#password2", test_user.password)
# click enter button
self.click('input[type="submit"]')
# test if the login page loads correctly
# test if the login title loads correctly
self.assert_element("h1")
self.assert_text("Log In", "h1")
# test if the error message loads correctly
self.assert_element("#message")
self.assert_text("Email format is incorrect: Not a valid email", "#message")
# test if the login form loads correctly
self.assert_element("form")
# test if the email element loads correctly
self.assert_element('form div label[for="email"]')
self.assert_text("Email", 'form div label[for="email"]')
self.assert_element("form div #email")
# test if the password element loads correctly
self.assert_element('form div label[for="password"]')
self.assert_text("Password", 'form div label[for="password"]')
self.assert_element("form div #password")
# test if the login button loads correctly
self.assert_element('form div input[type="submit"]')
def test_emailPeriodFirstChar(self, *_):
"""
This function tests that the user registration fails and an error message
is returned if the email entered does not follow addr-spec defined in RFC 5322
because a period is the first character in the local
"""
# open logout page
self.open(base_url + '/logout')
# open register page
self.open(base_url + '/register')
# fill email, user name and password
self.type("#email", ".test_frontend@test.com") #email: period is first character of local
self.type("#name", test_user.name)
self.type("#password", test_user.password)
self.type("#password2", test_user.password)
# click enter button
self.click('input[type="submit"]')
# test if the login page loads correctly
# test if the login title loads correctly
self.assert_element("h1")
self.assert_text("Log In", "h1")
# test if the error message loads correctly
self.assert_element("#message")
self.assert_text("Email format is incorrect: Not a valid email", "#message")
# test if the login form loads correctly
self.assert_element("form")
# test if the email element loads correctly
self.assert_element('form div label[for="email"]')
self.assert_text("Email", 'form div label[for="email"]')
self.assert_element("form div #email")
# test if the password element loads correctly
self.assert_element('form div label[for="password"]')
self.assert_text("Password", 'form div label[for="password"]')
self.assert_element("form div #password")
# test if the login button loads correctly
self.assert_element('form div input[type="submit"]')
def test_emailPeriodLastChar(self, *_):
"""
This function tests that the user registration fails and an error message
is returned if the email entered does not follow addr-spec defined in RFC 5322
because a period is the last character in the local
"""
# open logout page
self.open(base_url + '/logout')
# open register page
self.open(base_url + '/register')
# fill email, user name and password
self.type("#email", "test_frontend.@test.com") #email: period is last character of local
self.type("#name", test_user.name)
self.type("#password", test_user.password)
self.type("#password2", test_user.password)
# click enter button
self.click('input[type="submit"]')
# test if the login page loads correctly
# test if the login title loads correctly
self.assert_element("h1")
self.assert_text("Log In", "h1")
# test if the error message loads correctly
self.assert_element("#message")
self.assert_text("Email format is incorrect: Not a valid email", "#message")
# test if the login form loads correctly
self.assert_element("form")
# test if the email element loads correctly
self.assert_element('form div label[for="email"]')
self.assert_text("Email", 'form div label[for="email"]')
self.assert_element("form div #email")
# test if the password element loads correctly
self.assert_element('form div label[for="password"]')
self.assert_text("Password", 'form div label[for="password"]')
self.assert_element("form div #password")
# test if the login button loads correctly
self.assert_element('form div input[type="submit"]')
def test_emailConsecutivePeriods(self, *_):
"""
This function tests that the user registration fails and an error message
is returned if the email entered does not follow addr-spec defined in RFC 5322
because it contains consecutive periods in the local
"""
# open logout page
self.open(base_url + '/logout')
# open register page
self.open(base_url + '/register')
# fill email, user name and password
self.type("#email", "test...frontend@test.com") #email: consecutive periods in local
self.type("#name", test_user.name)
self.type("#password", test_user.password)
self.type("#password2", test_user.password)
# click enter button
self.click('input[type="submit"]')
# test if the login page loads correctly
# test if the login title loads correctly
self.assert_element("h1")
self.assert_text("Log In", "h1")
# test if the error message loads correctly
self.assert_element("#message")
self.assert_text("Email format is incorrect: Not a valid email", "#message")
# test if the login form loads correctly
self.assert_element("form")
# test if the email element loads correctly
self.assert_element('form div label[for="email"]')
self.assert_text("Email", 'form div label[for="email"]')
self.assert_element("form div #email")
# test if the password element loads correctly
self.assert_element('form div label[for="password"]')
self.assert_text("Password", 'form div label[for="password"]')
self.assert_element("form div #password")
# test if the login button loads correctly
self.assert_element('form div input[type="submit"]')
def test_emailHypenFirstChar(self, *_):
"""
This function tests that the user registration fails and an error message
is returned if the email entered does not follow addr-spec defined in RFC 5322
because a hypen cannot be the first character in the domain
"""
# open logout page
self.open(base_url + '/logout')
# open register page
self.open(base_url + '/register')
# fill email, user name and password
self.type("#email", "test_frontend@-test.com") #email: hypen is first character of domain
self.type("#name", test_user.name)
self.type("#password", test_user.password)
self.type("#password2", test_user.password)
# click enter button
self.click('input[type="submit"]')
# test if the login page loads correctly
# test if the login title loads correctly
self.assert_element("h1")
self.assert_text("Log In", "h1")
# test if the error message loads correctly
self.assert_element("#message")
self.assert_text("Email format is incorrect: Not a valid email", "#message")
# test if the login form loads correctly
self.assert_element("form")
# test if the email element loads correctly
self.assert_element('form div label[for="email"]')
self.assert_text("Email", 'form div label[for="email"]')
self.assert_element("form div #email")
# test if the password element loads correctly
self.assert_element('form div label[for="password"]')
self.assert_text("Password", 'form div label[for="password"]')
self.assert_element("form div #password")
# test if the login button loads correctly
self.assert_element('form div input[type="submit"]')
def test_emailHypenLastChar(self, *_):
"""
This function tests that the user registration fails and an error message
is returned if the email entered does not follow addr-spec defined in RFC 5322
because a hypen cannot be the last character in the domain
"""
# open logout page
self.open(base_url + '/logout')
# open register page
self.open(base_url + '/register')
# fill email, user name and password
self.type("#email", "test_frontend@test.com-") #email: hypen is last character of domain
self.type("#name", test_user.name)
self.type("#password", test_user.password)
self.type("#password2", test_user.password)
# click enter button
self.click('input[type="submit"]')
# test if the login page loads correctly
# test if the login title loads correctly
self.assert_element("h1")
self.assert_text("Log In", "h1")
# test if the error message loads correctly
self.assert_element("#message")
self.assert_text("Email format is incorrect: Not a valid email", "#message")
# test if the login form loads correctly
self.assert_element("form")
# test if the email element loads correctly
self.assert_element('form div label[for="email"]')
self.assert_text("Email", 'form div label[for="email"]')
self.assert_element("form div #email")
# test if the password element loads correctly
self.assert_element('form div label[for="password"]')
self.assert_text("Password", 'form div label[for="password"]')
self.assert_element("form div #password")
# test if the login button loads correctly
self.assert_element('form div input[type="submit"]')
def test_emailMultipleAtSymbols(self, *_):
"""
This function tests that the user registration fails and an error message
is returned if the email entered does not follow addr-spec defined in RFC 5322
because it cannot contain multiple @ symbols
"""
# open logout page
self.open(base_url + '/logout')
# open register page
self.open(base_url + '/register')
# fill email, user name and password
self.type("#email", "test@frontend@test.com") #email: multple @ symbols
self.type("#name", test_user.name)
self.type("#password", test_user.password)
self.type("#password2", test_user.password)
# click enter button
self.click('input[type="submit"]')
# test if the login page loads correctly
# test if the login title loads correctly
self.assert_element("h1")
self.assert_text("Log In", "h1")
# test if the error message loads correctly
self.assert_element("#message")
self.assert_text("Email format is incorrect: Not a valid email", "#message")
# test if the login form loads correctly
self.assert_element("form")
# test if the email element loads correctly
self.assert_element('form div label[for="email"]')
self.assert_text("Email", 'form div label[for="email"]')
self.assert_element("form div #email")
# test if the password element loads correctly
self.assert_element('form div label[for="password"]')
self.assert_text("Password", 'form div label[for="password"]')
self.assert_element("form div #password")
# test if the login button loads correctly
self.assert_element('form div input[type="submit"]')
def test_emailSpecialCharDomain(self, *_):
"""
This function tests that the user registration fails and an error message
is returned if the email entered does not follow addr-spec defined in RFC 5322
because it cannot contain special characters in the domain
"""
# open logout page
self.open(base_url + '/logout')
# open register page
self.open(base_url + '/register')
# fill email, user name and password
self.type("#email", "test_frontend@test#.com") #email: special character in domain
self.type("#name", test_user.name)
self.type("#password", test_user.password)
self.type("#password2", test_user.password)
# click enter button
self.click('input[type="submit"]')
# test if the login page loads correctly
# test if the login title loads correctly
self.assert_element("h1")
self.assert_text("Log In", "h1")
# test if the error message loads correctly
self.assert_element("#message")
self.assert_text("Email format is incorrect: Not a valid email", "#message")
# test if the login form loads correctly
self.assert_element("form")
# test if the email element loads correctly
self.assert_element('form div label[for="email"]')
self.assert_text("Email", 'form div label[for="email"]')
self.assert_element("form div #email")
# test if the password element loads correctly
self.assert_element('form div label[for="password"]')
self.assert_text("Password", 'form div label[for="password"]')
self.assert_element("form div #password")
# test if the login button loads correctly
self.assert_element('form div input[type="submit"]')
def test_emailNonPrintableChar(self, *_):
"""
This function tests that the user registration fails and an error message
is returned if the email entered does not follow addr-spec defined in RFC 5322
because it cannot contain non-printable characters
"""
# open logout page
self.open(base_url + '/logout')
# open register page
self.open(base_url + '/register')
# fill email, user name and password
self.type("#email", "test_\"frontend\"@test.com") #email: contains non-printable characters
self.type("#name", test_user.name)
self.type("#password", test_user.password)
self.type("#password2", test_user.password)
# click enter button
self.click('input[type="submit"]')
# test if the login page loads correctly
# test if the login title loads correctly
self.assert_element("h1")
self.assert_text("Log In", "h1")
# test if the error message loads correctly
self.assert_element("#message")
self.assert_text("Email format is incorrect: Not a valid email", "#message")
# test if the login form loads correctly
self.assert_element("form")
# test if the email element loads correctly
self.assert_element('form div label[for="email"]')
self.assert_text("Email", 'form div label[for="email"]')
self.assert_element("form div #email")
# test if the password element loads correctly
self.assert_element('form div label[for="password"]')
self.assert_text("Password", 'form div label[for="password"]')
self.assert_element("form div #password")
# test if the login button loads correctly
self.assert_element('form div input[type="submit"]')
def test_passwordEmpty(self, *_):
"""
This function tests that the user registration fails and an error message
is returned if the password input box is empty
"""
# open logout page
self.open(base_url + '/logout')
# open register page
self.open(base_url + '/register')
# fill email, user name and password
self.type("#email", test_user.email)
self.type("#name", test_user.name)
self.type("#password", "") # password: empty
self.type("#password2", "")
# click enter button
self.click('input[type="submit"]')
# test if the login page loads correctly
# test if the login title loads correctly
self.assert_element("h1")
self.assert_text("Log In", "h1")
# test if the error message loads correctly
self.assert_element("#message")
self.assert_text("Password format is incorrect: Cannot be empty", "#message")
# test if the login form loads correctly
self.assert_element("form")
# test if the email element loads correctly
self.assert_element('form div label[for="email"]')
self.assert_text("Email", 'form div label[for="email"]')
self.assert_element("form div #email")
# test if the password element loads correctly
self.assert_element('form div label[for="password"]')
self.assert_text("Password", 'form div label[for="password"]')
self.assert_element("form div #password")
# test if the login button loads correctly
self.assert_element('form div input[type="submit"]')
def test_passwordTooShort(self, *_):
"""
This function tests that the user registration fails and an error message
is returned if the password entered is less than 6 characters
"""
# open logout page
self.open(base_url + '/logout')
# open register page
self.open(base_url + '/register')
# fill email, user name and password
self.type("#email", test_user.email)
self.type("#name", test_user.name)
self.type("#password", "Test@") # password: too short
self.type("#password2", "Test@")
# click enter button
self.click('input[type="submit"]')
# test if the login page loads correctly
# test if the login title loads correctly
self.assert_element("h1")
self.assert_text("Log In", "h1")
# test if the error message loads correctly
self.assert_element("#message")
self.assert_text("Password format is incorrect: Cannot be less than 6 characters", "#message")
# test if the login form loads correctly
self.assert_element("form")
# test if the email element loads correctly
self.assert_element('form div label[for="email"]')
self.assert_text("Email", 'form div label[for="email"]')
self.assert_element("form div #email")
# test if the password element loads correctly
self.assert_element('form div label[for="password"]')
self.assert_text("Password", 'form div label[for="password"]')
self.assert_element("form div #password")
# test if the login button loads correctly
self.assert_element('form div input[type="submit"]')
def test_passwordNoUppercase(self, *_):
"""
This function tests that the user registration fails and an error message
is returned if the password entered has no uppercase letter
"""
# open logout page
self.open(base_url + '/logout')
# open register page
self.open(base_url + '/register')
# fill email, user name and password
self.type("#email", test_user.email)
self.type("#name", test_user.name)
self.type("#password", "test_password@") # password: no uppercase letter
self.type("#password2", "test_password@")
# click enter button
self.click('input[type="submit"]')
# test if the login page loads correctly
# test if the login title loads correctly
self.assert_element("h1")
self.assert_text("Log In", "h1")
# test if the error message loads correctly
self.assert_element("#message")
self.assert_text("Password format is incorrect: Does not contain an uppercase character", "#message")
# test if the login form loads correctly
self.assert_element("form")
# test if the email element loads correctly
self.assert_element('form div label[for="email"]')
self.assert_text("Email", 'form div label[for="email"]')
self.assert_element("form div #email")
# test if the password element loads correctly
self.assert_element('form div label[for="password"]')
self.assert_text("Password", 'form div label[for="password"]')
self.assert_element("form div #password")
# test if the login button loads correctly
self.assert_element('form div input[type="submit"]')
def test_passwordNoLowercase(self, *_):
"""
This function tests that the user registration fails and an error message
is returned if the password entered has no lowercase letter
"""
# open logout page
self.open(base_url + '/logout')
# open register page
self.open(base_url + '/register')
# fill email, user name and password
self.type("#email", test_user.email)
self.type("#name", test_user.name)
self.type("#password", "TEST_PASSWORD@") # password: no lowercase letter
self.type("#password2", "TEST_PASSWORD@")
# click enter button
self.click('input[type="submit"]')
# test if the login page loads correctly
# test if the login title loads correctly
self.assert_element("h1")
self.assert_text("Log In", "h1")
# test if the error message loads correctly
self.assert_element("#message")
self.assert_text("Password format is incorrect: Does not contain a lowercase character", "#message")
# test if the login form loads correctly
self.assert_element("form")
# test if the email element loads correctly
self.assert_element('form div label[for="email"]')
self.assert_text("Email", 'form div label[for="email"]')
self.assert_element("form div #email")
# test if the password element loads correctly
self.assert_element('form div label[for="password"]')
self.assert_text("Password", 'form div label[for="password"]')
self.assert_element("form div #password")
# test if the login button loads correctly
self.assert_element('form div input[type="submit"]')
def test_passwordNoSpecialCharacter(self, *_):
"""
This function tests that the user registration fails and an error message
is returned if the password entered has no special character
"""
# open logout page
self.open(base_url + '/logout')
# open register page
self.open(base_url + '/register')
# fill email, user name and password
self.type("#email", test_user.email)
self.type("#name", test_user.name)
self.type("#password", "Testpassword") # password: no special characters
self.type("#password2", "Testpassword")
# click enter button
self.click('input[type="submit"]')
# test if the login page loads correctly
# test if the login title loads correctly
self.assert_element("h1")
self.assert_text("Log In", "h1")
# test if the error message loads correctly
self.assert_element("#message")
self.assert_text("Password format is incorrect: Does not contain a special character", "#message")
# test if the login form loads correctly
self.assert_element("form")
# test if the email element loads correctly
self.assert_element('form div label[for="email"]')
self.assert_text("Email", 'form div label[for="email"]')
self.assert_element("form div #email")
# test if the password element loads correctly
self.assert_element('form div label[for="password"]')
self.assert_text("Password", 'form div label[for="password"]')
self.assert_element("form div #password")
# test if the login button loads correctly
self.assert_element('form div input[type="submit"]')
| 46.770465
| 380
| 0.644691
| 3,698
| 29,138
| 4.992428
| 0.041915
| 0.103997
| 0.117864
| 0.124797
| 0.913985
| 0.911711
| 0.911711
| 0.911711
| 0.911711
| 0.911711
| 0
| 0.021441
| 0.250909
| 29,138
| 623
| 381
| 46.770465
| 0.824392
| 0.322191
| 0
| 0.834337
| 1
| 0
| 0.322255
| 0.06684
| 0
| 0
| 0
| 0
| 0.578313
| 1
| 0.048193
| false
| 0.277108
| 0.018072
| 0
| 0.069277
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
efdafae6844f230c124a36135faffbd53c46f486
| 24
|
py
|
Python
|
test/run/t513.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
test/run/t513.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
test/run/t513.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
x = [1.0, 2.0]
print x
| 6
| 14
| 0.458333
| 7
| 24
| 1.571429
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.235294
| 0.291667
| 24
| 3
| 15
| 8
| 0.411765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.5
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
efeb83cde9190d7754f8501fabfe16ceddca8732
| 157,841
|
py
|
Python
|
dlkit/abstract_osid/configuration/sessions.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 2
|
2018-02-23T12:16:11.000Z
|
2020-10-08T17:54:24.000Z
|
dlkit/abstract_osid/configuration/sessions.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 87
|
2017-04-21T18:57:15.000Z
|
2021-12-13T19:43:57.000Z
|
dlkit/abstract_osid/configuration/sessions.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 1
|
2018-03-01T16:44:25.000Z
|
2018-03-01T16:44:25.000Z
|
"""Implementations of configuration abstract base class sessions."""
# pylint: disable=invalid-name
# Method names comply with OSID specification.
# pylint: disable=no-init
# Abstract classes do not define __init__.
# pylint: disable=too-few-public-methods
# Some interfaces are specified as 'markers' and include no methods.
# pylint: disable=too-many-public-methods
# Number of methods are defined in specification
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
# pylint: disable=too-many-arguments
# Argument signature defined in specification.
# pylint: disable=duplicate-code
# All apparent duplicates have been inspected. They aren't.
import abc
class ValueRetrievalSession:
"""This session is used to retrieve active configuration values.
Two views of the configuration data are defined:
* federated: parameters defined in configurations that are a
parent of this configuration in the configuration hierarchy are
included
* isolated: parameters are contained to within this configuration
* conditional: values are filtered that do not pass any defined
conditions, whether or not they are explciity passed into the
lookup methods of this session
* unconditional: values are filtered only for the conditions that
are explicity passed as parameters. Any conditions defined for
the value that do not require explicit data for retrieval are
ignored.
This session assumes an active view.
Values are not OSID objects and are obtained using a reference to a
Parameter.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_configuration_id(self):
"""Gets the ``Configuration`` ``Id`` associated with this session.
:return: the ``Configuration`` ``Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
configuration_id = property(fget=get_configuration_id)
@abc.abstractmethod
def get_configuration(self):
"""Gets the ``Configuration`` associated with this session.
:return: the ``Configuration`` associated with this session
:rtype: ``osid.configuration.Configuration``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.Configuration
configuration = property(fget=get_configuration)
@abc.abstractmethod
def can_lookup_values(self):
"""Tests if this user can perform ``Value`` lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations to unauthorized users.
:return: ``false`` if lookup methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def use_comparative_value_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as
authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_plenary_value_view(self):
"""A complete view of the ``Value`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_federated_configuration_view(self):
"""Federates the view for methods in this session.
A federated view will include values from parent configurations
in the configuration hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_isolated_configuration_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to this configuration only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_conditional_view(self):
"""Returns only values that pass the defined parameter condition.
Some parameter conditions do not require explicit conditional
data to be passed and the ``Values`` returned from any method in
this session are filtered on an implicit condition.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_unconditional_view(self):
"""Values that are filtered based on an implicit condition are not filtered out from methods in this session.
Methods that take an explicit condition as a parameter are
filtered on only those conditions that are specified.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def get_value_by_parameter(self, parameter_id):
"""Gets a ``Value`` for the given parameter ``Id``.
If more than one value exists for the given parameter, the most
preferred value is returned. This method can be used as a
convenience when only one value is expected.
``get_values_by_parameters()`` should be used for getting all
the active values.
:param parameter_id: the ``Id`` of the ``Parameter`` to retrieve
:type parameter_id: ``osid.id.Id``
:return: the value
:rtype: ``osid.configuration.Value``
:raise: ``NotFound`` -- the ``parameter_id`` not found or no value available
:raise: ``NullArgument`` -- the ``parameter_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.Value
@abc.abstractmethod
def get_values_by_parameter(self, parameter_id):
"""Gets all the ``Values`` for the given parameter ``Id``.
:param parameter_id: the ``Id`` of the ``Parameter`` to retrieve
:type parameter_id: ``osid.id.Id``
:return: the value list
:rtype: ``osid.configuration.ValueList``
:raise: ``NotFound`` -- the ``parameter_id`` not found
:raise: ``NullArgument`` -- the ``parameter_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ValueList
@abc.abstractmethod
def get_values_by_parameters(self, parameter_ids):
"""Gets the ``Values`` for the given parameter ``Ids``.
:param parameter_ids: the ``Id`` of the ``Parameter`` to retrieve
:type parameter_ids: ``osid.id.IdList``
:return: the value list
:rtype: ``osid.configuration.ValueList``
:raise: ``NotFound`` -- a parameter ``Id`` is not found
:raise: ``NullArgument`` -- ``parameter_ids`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ValueList
@abc.abstractmethod
def get_value_condition(self, parameter_id):
"""Gets a value condition for the given parameter.
:param parameter_id: the ``Id`` of a ``Parameter``
:type parameter_id: ``osid.id.Id``
:return: a value condition
:rtype: ``osid.configuration.ValueCondition``
:raise: ``NullArgument`` -- ``parameter_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ValueCondition
@abc.abstractmethod
def get_value_by_parameter_on_condition(self, parameter_id, value_condition):
"""Gets a value in this configuration based on a condition.
If multiple values are available the most preferred one is
returned. The condition specified is applied to any or all
parameters in this configuration as applicable.
:param parameter_id: the ``Id`` of a ``Parameter``
:type parameter_id: ``osid.id.Id``
:param value_condition: the condition
:type value_condition: ``osid.configuration.ValueCondition``
:return: the value
:rtype: ``osid.configuration.Value``
:raise: ``NotFound`` -- parameter ``Id`` is not found
:raise: ``NullArgument`` -- ``parameter_id`` or ``value_condition`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``value_condition`` not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.Value
@abc.abstractmethod
def get_values_by_parameter_on_condition(self, parameter_id, value_condition):
"""Gets all the values for a parameter based on a condition.
In plenary mode, all values are returned or an error results. In
comparative mode, inaccessible values may be omitted.
:param parameter_id: the ``Id`` of a ``Parameter``
:type parameter_id: ``osid.id.Id``
:param value_condition: the condition
:type value_condition: ``osid.configuration.ValueCondition``
:return: the value list
:rtype: ``osid.configuration.ValueList``
:raise: ``NotFound`` -- parameter ``Id`` is not found
:raise: ``NullArgument`` -- ``parameter_id`` or ``value_condition`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``value_condition`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ValueList
@abc.abstractmethod
def get_values_by_parameters_on_condition(self, parameter_ids, value_condition):
"""Gets the values for parameters based on a condition.
The specified condition is applied to any or all of the
parameters as applicable. In plenary mode, all values are
returned or an error results. In comparative mode, inaccessible
values may be omitted.
:param parameter_ids: the ``Id`` of a ``Parameter``
:type parameter_ids: ``osid.id.IdList``
:param value_condition: the condition
:type value_condition: ``osid.configuration.ValueCondition``
:return: the value list
:rtype: ``osid.configuration.ValueList``
:raise: ``NotFound`` -- a parameter ``Id`` is not found
:raise: ``NullArgument`` -- ``parameter_ids`` or ``value_condition`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``value_condition`` not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ValueList
class ValueLookupSession:
"""This session is used to retrieve configuration values.
Two views of the configuration data are defined:
* federated: parameters defined in configurations that are a
parent of this configuration in the configuration hierarchy are
included
* isolated: parameters are contained to within this configuration
* conditional: values are filtered that do not pass any defined
conditions, whether or not they are explciity passed into the
lookup methods of this session
* unconditional: values are filtered only for the conditions that
are explicity passed as parameters. Any conditions defined for
the value that do not require explicit data for retrieval are
ignored.
* active value view: All value lookup methods return active
values.
* any status value view: Values of any active or inactive status
are returned from methods.
Values are not OSID objects and are obtained using a reference to a
Parameter.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def use_active_value_view(self):
"""Only active values are returned by methods in this session.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_any_status_value_view(self):
"""All active and inactive values are returned by methods in this session.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def get_value(self, value_id):
"""Gets the ``Value`` specified by its ``Id``.
In plenary mode, the exact ``Id`` is found or a ``NotFound``
results. Otherwise, the returned ``Value`` may have a different
``Id`` than requested, such as the case where a duplicate ``Id``
was assigned to a ``Value`` and retained for compatibility.
:param value_id: the ``Id`` of the ``Value`` to retrieve
:type value_id: ``osid.id.Id``
:return: the returned ``Value``
:rtype: ``osid.configuration.Value``
:raise: ``NotFound`` -- no ``Value`` found with the given ``Id``
:raise: ``NullArgument`` -- ``value_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.Value
@abc.abstractmethod
def get_values_by_ids(self, value_ids):
"""Gets a ``ValueList`` corresponding to the given ``IdList``.
In plenary mode, the returned list contains all of the values
specified in the ``Id`` list, in the order of the list,
including duplicates, or an error results if an ``Id`` in the
supplied list is not found or inaccessible. Otherwise,
inaccessible ``Values`` may be omitted from the list and may
present the elements in any order including returning a unique
set.
:param value_ids: the list of ``Ids`` to retrieve
:type value_ids: ``osid.id.IdList``
:return: the returned ``Value`` list
:rtype: ``osid.configuration.ValueList``
:raise: ``NotFound`` -- an ``Id was`` not found
:raise: ``NullArgument`` -- ``value_ids`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ValueList
@abc.abstractmethod
def get_values_by_genus_type(self, value_genus_type):
"""Gets a ``ValueList`` corresponding to the given value genus ``Type`` which does not include values of genus
types derived from the specified ``Type``.
:param value_genus_type: a value genus type
:type value_genus_type: ``osid.type.Type``
:return: the returned ``Value list``
:rtype: ``osid.configuration.ValueList``
:raise: ``NullArgument`` -- ``value_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ValueList
@abc.abstractmethod
def get_values_by_parent_genus_type(self, value_genus_type):
"""Gets a ``ValueList`` corresponding to the given value genus ``Type`` and include any additional values with
genus types derived from the specified ``Type``.
:param value_genus_type: a value genus type
:type value_genus_type: ``osid.type.Type``
:return: the returned ``Value list``
:rtype: ``osid.configuration.ValueList``
:raise: ``NullArgument`` -- ``value_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ValueList
@abc.abstractmethod
def get_values_by_record_type(self, value_record_type):
"""Gets a ``ValueList`` corresponding to the given value record ``Type`` which does not include values of record
types derived from the specified ``Type``.
:param value_record_type: a value type
:type value_record_type: ``osid.type.Type``
:return: the returned ``Value`` list
:rtype: ``osid.configuration.ValueList``
:raise: ``NullArgument`` -- ``value_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ValueList
@abc.abstractmethod
def get_values(self):
"""Gets all the values in this configuration.
In plenary mode, all values are returned or an error results. In
comparative mode, inaccessible values may be omitted.
In plenary mode, the returned list contains all known values or
an error results. Otherwise, the returned list may contain only
those values that are accessible through this session.
In active mode, values are returned that are currently active.
In any status mode, active and inactive values are returned.
:return: the value list
:rtype: ``osid.configuration.ValueList``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ValueList
values = property(fget=get_values)
@abc.abstractmethod
def get_values_on_condition(self, value_condition):
"""Gets the values in this configuration based on a condition.
The condition specified is applied to any or all parameters in
this configuration as applicable. In plenary mode, all values
are returned or an error results. In comparative mode,
inaccessible values may be omitted.
:param value_condition: a value condition
:type value_condition: ``osid.configuration.ValueCondition``
:return: the value list
:rtype: ``osid.configuration.ValueList``
:raise: ``NullArgument`` -- ``value_condition`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``value_condition`` not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ValueList
class ValueQuerySession:
"""This session provides methods for searching ``Value`` objects.
The search query is constructed using the ``ValueQuery``. The
parameter ``Type`` also specifies the record for the value query.
Two views of the configuration data are defined;
* federated: values defined in configurations that are a parent of
this configuration in the configuration hierarchy are included
* isolated: values are contained to within this configuration
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_configuration_id(self):
"""Gets the ``Configuration`` ``Id`` associated with this session.
:return: the ``Configuration`` ``Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
configuration_id = property(fget=get_configuration_id)
@abc.abstractmethod
def get_configuration(self):
"""Gets the ``Configuration`` associated with this session.
:return: the ``Configuration`` associated with this session
:rtype: ``osid.configuration.Configuration``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.Configuration
configuration = property(fget=get_configuration)
@abc.abstractmethod
def can_search_values(self):
"""Tests if this user can perform ``Value`` searches.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer search
operations to unauthorized users.
:return: ``false`` if lookup methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def use_federated_configuration_view(self):
"""Federates the view for methods in this session.
A federated view will include values from parent configurations
in the configuration hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_isolated_configuration_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts searches to this configuration only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def get_value_query(self):
"""Gets a value query.
:return: the value query
:rtype: ``osid.configuration.ValueQuery``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ValueQuery
value_query = property(fget=get_value_query)
@abc.abstractmethod
def get_values_by_query(self, value_query):
"""Gets a list of ``Values`` matching the given value query.
:param value_query: the value query
:type value_query: ``osid.configuration.ValueQuery``
:return: the returned ``ValueList``
:rtype: ``osid.configuration.ValueList``
:raise: ``NullArgument`` -- ``value_query`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- a query form is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ValueList
class ValueSearchSession:
"""This session provides methods for searching ``Value`` objects.
The search query is constructed using the ``ValueQuery``. The
parameter ``Type`` also specifies the record for the value query.
``get_values_by_query()`` is the basic search method and returns a
list of ``Values``. A more advanced search may be performed with
``getValuesBySearch()``. It accepts a ``ValueSearch`` in addition to
the query for the purpose of specifying additional options affecting
the entire search, such as ordering. ``get_values_by_search()``
returns a ``ValueSearchResults`` that can be used to access the
resulting ``ValueList`` or be used to perform a search within the
result set through ``ValueSearch``.
Two views of the configuration data are defined;
* federated: values defined in configurations that are a parent of
this configuration in the configuration hierarchy are included
* isolated: values are contained to within this configuration
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_value_search(self):
"""Gets a value search.
:return: the value search
:rtype: ``osid.configuration.ValueSearch``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ValueSearch
value_search = property(fget=get_value_search)
@abc.abstractmethod
def get_value_search_order(self):
"""Gets a value search order.
The ``ValueSearchOrder`` is supplied to a ``ValueSearch`` to
specify the ordering of results.
:return: the value search order
:rtype: ``osid.configuration.ValueSearchOrder``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ValueSearchOrder
value_search_order = property(fget=get_value_search_order)
@abc.abstractmethod
def get_values_by_search(self, value_query, value_search):
"""Gets a list of ``Values`` matching the given search query using the given search.
:param value_query: the value query
:type value_query: ``osid.configuration.ValueQuery``
:param value_search: the value search
:type value_search: ``osid.configuration.ValueSearch``
:return: the serach results
:rtype: ``osid.configuration.ValueSearchResults``
:raise: ``NullArgument`` -- ``value_query`` or ``value_search`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``value_query`` or ``value_search`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ValueSearchResults
@abc.abstractmethod
def get_value_query_from_inspector(self, value_query_inspector):
"""Gets a value query from an inspector.
The inspector is available from a ``ValueSearchResults``.
:param value_query_inspector: a value query inspector
:type value_query_inspector: ``osid.configuration.ValueQueryInspector``
:return: the value query
:rtype: ``osid.configuration.ValueQuery``
:raise: ``NullArgument`` -- ``value_query_inspector`` is ``null``
:raise: ``Unsupported`` -- ``value_query_inspector`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ValueQuery
class ValueAdminSession:
"""This session creates, updates, and deletes ``Values`` The data for create and update is provided by the consumer via the
form object.
``OsidForms`` are requested for each create or update and may not be
reused.
Create and update operations differ in their usage. To create a
``Value,`` a ``ValueForm`` is requested using
``get_value_form_for_create()`` specifying the desired parameter and
record ``Types`` or none if no record ``Types`` are needed. The
returned ``ValueForm`` will indicate that it is to be used with a
create operation and can be used to examine metdata or validate data
prior to creation. Once the ``ValueForm`` is submiited to a create
operation, it cannot be reused with another create operation unless
the first operation was unsuccessful. Each ``ValueForm`` corresponds
to an attempted transaction.
For updates, ``ValueForms`` are requested to the ``Value`` ``Id``
that is to be updated using ``getValueFormForUpdate()``. Similarly,
the ``ValueForm`` has metadata about the data that can be updated
and it can perform validation before submitting the update. The
``ValueForm`` can only be used once for a successful update and
cannot be reused.
This session includes an ``Id`` aliasing mechanism to assign an
external ``Id`` to an internally assigned Id.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_configuration_id(self):
"""Gets the ``Configuration`` ``Id`` associated with this session.
:return: the ``Configuration`` ``Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
configuration_id = property(fget=get_configuration_id)
@abc.abstractmethod
def get_configuration(self):
"""Gets the ``Configuration`` associated with this session.
:return: the ``Configuration`` associated with this session
:rtype: ``osid.configuration.Configuration``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.Configuration
configuration = property(fget=get_configuration)
@abc.abstractmethod
def support_value_conditions(self):
"""Tests if applying conditions to values is supported.
:return: ``true`` if ``Value`` conditions are supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def can_create_values(self):
"""Tests if this user can create ``Values``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating a ``Value``
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer create
operations to an unauthorized user.
:return: ``false`` if ``Value`` creation is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def can_create_value_with_record_types(self, value_record_types):
"""Tests if this user can create a single ``Value`` using the desired record types.
While ``ConfigurationManager.getValueRecordTypes()`` can be used
to examine which records are supported, this method tests which
record(s) are required for creating a specific ``Value``.
Providing an empty array tests if a ``Value`` can be created
with no records.
:param value_record_types: array of value record types
:type value_record_types: ``osid.type.Type[]``
:return: ``true`` if ``Value`` creation using the specified record ``Types`` is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``value_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_value_form_for_create(self, parameter_id, value_record_types):
"""Gets the form for creating new values.
A new form should be requested for each create transaction.
:param parameter_id: the parameter
:type parameter_id: ``osid.id.Id``
:param value_record_types: array of value record types
:type value_record_types: ``osid.type.Type[]``
:return: the value form
:rtype: ``osid.configuration.ValueForm``
:raise: ``NotFound`` -- ``parameter_id`` is not found
:raise: ``NullArgument`` -- ``parameter_id`` or ``value_record_types`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- unable to get form for requested record types
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ValueForm
@abc.abstractmethod
def create_value(self, value_form):
"""Creates a value.
:param value_form: the form
:type value_form: ``osid.configuration.ValueForm``
:return: the value
:rtype: ``osid.configuration.Value``
:raise: ``IllegalState`` -- ``value_form`` already used in a create transaction
:raise: ``InvalidArgument`` -- one or more of the form elements is invalid
:raise: ``NullArgument`` -- ``value_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``value_form`` did not originate from ``get_value_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.Value
@abc.abstractmethod
def can_update_values(self):
"""Tests if this user can update ``Values``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating a ``Value``
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer update
operations to an unauthorized user.
:return: ``false`` if ``Value`` modification is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_value_form_for_update(self, value_id):
"""Gets the value form for updating an existing value.
A new value form should be requested for each update
transaction.
:param value_id: the ``Id`` of the ``Value``
:type value_id: ``osid.id.Id``
:return: the value form
:rtype: ``osid.configuration.ValueForm``
:raise: ``NotFound`` -- the value is not found
:raise: ``NullArgument`` -- ``parameter_id`` or ``value_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ValueForm
@abc.abstractmethod
def update_value(self, value_form):
"""Updates an existing value.
:param value_form: the form containing the elemnts to be updated
:type value_form: ``osid.configuration.ValueForm``
:raise: ``IllegalState`` -- ``value_form`` already used in an update transaction
:raise: ``InvalidArgument`` -- the form contains an invalid value
:raise: ``NullArgument`` -- ``parameter_id, value_id`` or ``value_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``value_form`` did not originate from ``get_value_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def can_delete_values(self):
"""Tests if this user can delete ``Values``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting a ``Value``
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer delete
operations to an unauthorized user.
:return: ``false`` if ``Value`` deletion is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def delete_value(self, value_id):
"""Deletes the specified value.
:param value_id: the ``Id`` of the ``Value`` to delete
:type value_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``value_id`` is not found
:raise: ``NullArgument`` -- ``value_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def can_manage_value_aliases(self):
"""Tests if this user can manage ``Id`` aliases for ``Values``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known changing an alias
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer alias
operations to an unauthorized user.
:return: ``false`` if ``Value`` aliasing is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def alias_value(self, value_id, alias_id):
"""Adds an ``Id`` to a ``Value`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Value`` is determined by the
provider. The new ``Id`` performs as an alias to the primary
``Id``. If the alias is a pointer to another value it is
reassigned to the given value ``Id``.
:param value_id: the ``Id`` of a ``Value``
:type value_id: ``osid.id.Id``
:param alias_id: the alias ``Id``
:type alias_id: ``osid.id.Id``
:raise: ``AlreadyExists`` -- ``alias_id`` is already assigned
:raise: ``NotFound`` -- ``value_id`` not found
:raise: ``NullArgument`` -- ``value_id`` or ``alias_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
class ValueNotificationSession:
"""This session defines methods to receive notifications on changes to ``Values``.
A particular value in a set may have been added or deleted, but all
changes appear as a change to a parameter. Once a change
notification is received, the new value list can be obtained through
the ``ValueLookupSession``. This session is intended for adapters
and providers needing to synchronize their state with this service
without the use of polling. Notifications are cancelled when this
session is closed.
Two views are defined;
* federated: parameters defined in configurations that are a
parent of this configuration in the configuration hierarchy are
included for notifications
* isolated: notifications are restricted to parameters are defined
to within this configuration
The methods ``federate_valuer_view()`` and ``isolate_value_view()``
behave as a radio group and one should be selected before invoking
any lookup methods.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_configuration_id(self):
"""Gets the ``Configuration`` ``Id`` associated with this session.
:return: the ``Configuration`` ``Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
configuration_id = property(fget=get_configuration_id)
@abc.abstractmethod
def get_configuration(self):
"""Gets the ``Configuration`` associated with this session.
:return: the ``Configuration`` associated with this session
:rtype: ``osid.configuration.Configuration``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.Configuration
configuration = property(fget=get_configuration)
@abc.abstractmethod
def can_register_for_value_notifications(self):
"""Tests if this user can register for ``Value`` notifications.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer
notification operations.
:return: ``false`` if notification methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def use_federated_configuration_view(self):
"""Federates the view for methods in this session.
A federated view will include parameters in configurations of
which this configuration is a child in the configuration
hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_isolated_configuration_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts notifications for parameter values to
this configuration only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def reliable_value_notifications(self):
"""Reliable notifications are desired.
In reliable mode, notifications are to be acknowledged using
``acknowledge_value_notification()`` .
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def unreliable_value_notifications(self):
"""Unreliable notifications are desired.
In unreliable mode, notifications do not need to be
acknowledged.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def acknowledge_value_notification(self, notification_id):
"""Acknowledge a value notification.
:param notification_id: the ``Id`` of the notification
:type notification_id: ``osid.id.Id``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_new_values(self):
"""Assigns a callback for notifications of new values.
``ValueReceiver.newValues()`` is invoked when a new ``Value`` is
added to this configuration.
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_new_values_for_parameter(self, parameter_id):
"""Assigns a callback for notifications of new values for the given parameter.
``ValueReceiver.newValues()`` is invoked when a new ``Value`` is
added to this configuration.
:param parameter_id: the ``Id`` of the ``Parameter`` to monitor
:type parameter_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``parameter_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_changed_values(self):
"""Assigns a callback for notification of updated parameter values in this configuration.
``ValueReceiver.changedValues()`` is invoked when a ``Value`` is
changed in this configuration.
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_changed_values_for_parameter(self, parameter_id):
"""Assigns a callback for notifications of changed values for the given parameter.
``ValueReceiver.changedValues()`` is invoked when a ``Value`` is
updated to this configuration.
:param parameter_id: the ``Id`` of the ``Parameter`` to monitor
:type parameter_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``parameter_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_changed_value(self, value_id):
"""Assigns a callback for notifications of an update to a value in this configuration.
``ValueReceiver.changedValues()`` is invoked when the specified
``Value`` is updated in this configuration.
:param value_id: the ``Id`` of the ``Value`` to monitor
:type value_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``value_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_deleted_values(self):
"""Assigns a callback for notification of deleted values in this configuration.
``ValueReceiver.changedValues()`` is invoked when a ``Value`` is
removed from this configuration.
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_deleted_values_for_parameter(self, parameter_id):
"""Assigns a callback for notifications of changed values for the given parameter.
``ValueReceiver.changedValues()`` is invoked when a ``Value`` is
removed from this configuration.
:param parameter_id: the ``Id`` of the ``Parameter`` to monitor
:type parameter_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``parameter_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_deleted_value(self, value_id):
"""Assigns a callback for notifications of an update to a value in this configuration.
``ValueReceiver.changedValues()`` is invoked when the specified
``Value`` is removed from this configuration.
:param value_id: the ``Id`` of the ``Value`` to monitor
:type value_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``value_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
class ParameterLookupSession:
"""This session is used to retrieve parameters from a configuration registry of parameters."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_configuration_id(self):
"""Gets the ``Configuration`` ``Id`` associated with this session.
:return: the ``Configuration Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
configuration_id = property(fget=get_configuration_id)
@abc.abstractmethod
def get_configuration(self):
"""Gets the ``Configuration`` associated with this session.
:return: the ``Configuration`` associated with this session
:rtype: ``osid.configuration.Configuration``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.Configuration
configuration = property(fget=get_configuration)
@abc.abstractmethod
def can_lookup_parameters(self):
"""Tests if this user can perform ``Parameter`` lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations to unauthorized users.
:return: ``false`` if lookup methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def use_comparative_parameter_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as
authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_plenary_parameter_view(self):
"""A complete view of the ``Parameter`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_federated_configuration_view(self):
"""Federates the view for methods in this session.
A federated view will include paramaters from parent
configurations in the configuration hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_isolated_configuration_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to this configuration only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_active_parameter_view(self):
"""Only active parameters are returned by methods in this session.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_any_status_parameter_view(self):
"""All active and inactive parameters are returned by methods in this session.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def get_parameter(self, parameter_id):
"""Gets the ``Parameter`` specified by its ``Id``.
:param parameter_id: the ``Id`` of the ``Parameter`` to retrieve
:type parameter_id: ``osid.id.Id``
:return: the returned ``Parameter``
:rtype: ``osid.configuration.Parameter``
:raise: ``NotFound`` -- no ``Parameter`` found with the given ``Id``
:raise: ``NullArgument`` -- ``parameter_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.Parameter
@abc.abstractmethod
def get_parameters_by_ids(self, parameter_ids):
"""Gets a ``ParameterList`` corresponding to the given ``IdList``.
:param parameter_ids: the list of ``Ids`` to retrieve
:type parameter_ids: ``osid.id.IdList``
:return: the returned ``Parameter`` list
:rtype: ``osid.configuration.ParameterList``
:raise: ``NotFound`` -- an ``Id was`` not found
:raise: ``NullArgument`` -- ``parameter_ids`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ParameterList
@abc.abstractmethod
def get_parameters_by_genus_type(self, parameter_genus_type):
"""Gets a ``ParameterList`` corresponding to the given parameter genus ``Type`` which does not include
parameters of genus types derived from the specified ``Type``.
:param parameter_genus_type: a parameter genus type
:type parameter_genus_type: ``osid.type.Type``
:return: the returned ``Parameter list``
:rtype: ``osid.configuration.ParameterList``
:raise: ``NullArgument`` -- ``parameter_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ParameterList
@abc.abstractmethod
def get_parameters_by_parent_genus_type(self, parameter_genus_type):
"""Gets a ``ParameterList`` corresponding to the given parameters genus ``Type`` and include any additional
parameters with genus types derived from the specified ``Type``.
:param parameter_genus_type: a parameter genus type
:type parameter_genus_type: ``osid.type.Type``
:return: the returned ``Parameter list``
:rtype: ``osid.configuration.ParameterList``
:raise: ``NullArgument`` -- ``parameter_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ParameterList
@abc.abstractmethod
def get_parameters_by_record_type(self, parameter_record_type):
"""Gets a ``ParameterList`` corresponding to the given parameter record ``Type`` which does not include
parameters of record types derived from the specified ``Type``.
:param parameter_record_type: a parameter type
:type parameter_record_type: ``osid.type.Type``
:return: the returned ``Parameter`` list
:rtype: ``osid.configuration.ParameterList``
:raise: ``NullArgument`` -- ``parameter_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ParameterList
@abc.abstractmethod
def get_parameters(self):
"""Gets all ``Parameters``.
:return: a list of ``Parameters``
:rtype: ``osid.configuration.ParameterList``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ParameterList
parameters = property(fget=get_parameters)
class ParameterQuerySession:
"""This session provides methods for searching ``Parameter`` objects.
The search query is constructed using the ``ParameterQuery``.
Two views of the configuration data are defined;
* federated: parameters defined in configurations that are a
parent of this configuration in the configuration hierarchy are
included
* isolated: parameters are contained to within this configuration
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_configuration_id(self):
"""Gets the ``Configuration`` ``Id`` associated with this session.
:return: the ``Configuration`` ``Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
configuration_id = property(fget=get_configuration_id)
@abc.abstractmethod
def get_configuration(self):
"""Gets the ``Configuration`` associated with this session.
:return: the ``Configuration`` associated with this session
:rtype: ``osid.configuration.Configuration``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.Configuration
configuration = property(fget=get_configuration)
@abc.abstractmethod
def can_search_parameters(self):
"""Tests if this user can perform ``Parameter`` searches.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer search
operations to unauthorized users.
:return: ``false`` if lookup methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def use_federated_configuration_view(self):
"""Federates the view for methods in this session.
A federated view will include parameters from parent
configurations in the configuration hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_isolated_configuration_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts searches to this configuration only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def get_parameter_query(self):
"""Gets a paraameter query.
:return: the parameter query
:rtype: ``osid.configuration.ParameterQuery``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ParameterQuery
parameter_query = property(fget=get_parameter_query)
@abc.abstractmethod
def get_parameters_by_query(self, parameter_query):
"""Gets a list of ``Parameters`` matching the given query.
:param parameter_query: the parameter query
:type parameter_query: ``osid.configuration.ParameterQuery``
:return: the returned ``ParameterList``
:rtype: ``osid.configuration.ParameterList``
:raise: ``NullArgument`` -- ``parameter_query`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- a query form is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ParameterList
class ParameterSearchSession:
"""This session provides methods for searching ``Parameter`` objects.
The search query is constructed using the ``ParameterQuery``.
``get_parameters_by_query()`` is the basic search method and returns
a list of ``Parameters``. A more advanced search may be performed
with ``getParametersBySearch()``. It accepts a ``ParameterSearch``
in addition to the query for the purpose of specifying additional
options affecting the entire search, such as ordering.
``get_parameters_by_search()`` returns a ``ParameterSearchResults``
that can be used to access the resulting ``ParameterList`` or be
used to perform a search within the result set through
``ParameterSearch``.
Two views of the configuration data are defined;
* federated: parameters defined in configurations that are a
parent of this configuration in the configuration hierarchy are
included
* isolated: parameters are contained to within this configuration
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_parameter_search(self):
"""Gets a parameter search.
:return: the parameter search
:rtype: ``osid.configuration.ParameterSearch``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ParameterSearch
parameter_search = property(fget=get_parameter_search)
@abc.abstractmethod
def get_parameter_search_order(self):
"""Gets a parameter entry search order.
The ``ParameterEntrySearchOrder`` is supplied to an
``ParameterEntrySearch`` to specify the ordering of results.
:return: the parameter search order
:rtype: ``osid.configuration.ParameterSearchOrder``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ParameterSearchOrder
parameter_search_order = property(fget=get_parameter_search_order)
@abc.abstractmethod
def get_parameters_by_search(self, parameter_query, parameter_search):
"""Gets a list of ``Parameters`` matching the given search query using the given search.
:param parameter_query: the parameter query
:type parameter_query: ``osid.configuration.ParameterQuery``
:param parameter_search: the parameter search
:type parameter_search: ``osid.configuration.ParameterSearch``
:return: the parameter search results
:rtype: ``osid.configuration.ParameterSearchResults``
:raise: ``NullArgument`` -- ``parameter_query`` or ``parameter_search`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``parameter_query`` or ``parameter_search`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ParameterSearchResults
@abc.abstractmethod
def get_parameter_query_from_inspector(self, parameter_query_inspector):
"""Gets a parameter query from an inspector.
The inspector is available from a ``ParameterSearchResults``.
:param parameter_query_inspector: a parameter query inspector
:type parameter_query_inspector: ``osid.configuration.ParameterQueryInspector``
:return: the parameter query
:rtype: ``osid.configuration.ParameterQuery``
:raise: ``NullArgument`` -- ``parameter_query_inspector`` is ``null``
:raise: ``Unsupported`` -- ``parameter_query_inspector`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ParameterQuery
class ParameterAdminSession:
"""This session creates, updates, and deletes ``Parameters``.
The data for create and update is provided by the consumer via the
form object. ``OsidForms`` are requested for each create or update
and may not be reused.
Create and update operations differ in their usage. To create a
``Parameter,`` a ``ParameterForm`` is requested using
``get_parameter_form_for_create()`` specifying the desired
relationship peers and record ``Types`` or none if no record
``Types`` are needed. The returned ``ParameterForm`` will indicate
that it is to be used with a create operation and can be used to
examine metdata or validate data prior to creation. Once the
``ParameterForm`` is submiited to a create operation, it cannot be
reused with another create operation unless the first operation was
unsuccessful. Each ``ParameterForm`` corresponds to an attempted
transaction.
For updates, ``ParameterForms`` are requested to the ``Parameter``
``Id`` that is to be updated using ``getParameterFormForUpdate()``.
Similarly, the ``ParameterForm`` has metadata about the data that
can be updated and it can perform validation before submitting the
update. The ``ParameterForm`` can only be used once for a successful
update and cannot be reused.
The delete operations delete ``Parameters``. To unmap a
``Parameter`` from the current ``Configuration,`` the
``ParameterConfigurationAssignmentSession`` should be used. These
delete operations attempt to remove the ``Parameter`` itself thus
removing it from all known ``Configuration`` catalogs.
This session includes an ``Id`` aliasing mechanism to assign an
external ``Id`` to an internally assigned Id.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_configuration_id(self):
"""Gets the ``Configuration`` ``Id`` associated with this session.
:return: the ``Configuration Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
configuration_id = property(fget=get_configuration_id)
@abc.abstractmethod
def get_configuration(self):
"""Gets the ``Configuration`` associated with this session.
:return: the ``Configuration`` associated with this session
:rtype: ``osid.configuration.Configuration``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.Configuration
configuration = property(fget=get_configuration)
@abc.abstractmethod
def can_create_parameters(self):
"""Tests if this user can create ``Parameters``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating a
``Parameter`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
create operations to an unauthorized user.
:return: ``false`` if ``Parameter`` creation is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def can_create_parameter_with_record_types(self, parameter_record_types):
"""Tests if this user can create a single ``Parameter`` using the desired record types.
While ``ConfigurationManager.getParameterRecordTypes()`` can be
used to examine which records are supported, this method tests
which record(s) are required for creating a specific
``Parameter``. Providing an empty array tests if a ``Parameter``
can be created with no records.
:param parameter_record_types: array of parameter record types
:type parameter_record_types: ``osid.type.Type[]``
:return: ``true`` if ``Parameter`` creation using the specified record ``Types`` is supported, ``false``
otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``parameter_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_parameter_form_for_create(self, parameter_record_types):
"""Gets the paramater form for creating new parameters.
:param parameter_record_types: array of parameter record types
:type parameter_record_types: ``osid.type.Type[]``
:return: the parameter form
:rtype: ``osid.configuration.ParameterForm``
:raise: ``NullArgument`` -- ``configuration_record_types`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- unable to get form for requested record types
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ParameterForm
@abc.abstractmethod
def create_parameter(self, parameter_form):
"""Creates a new ``Parameter``.
:param parameter_form: the form for this ``Parameter``
:type parameter_form: ``osid.configuration.ParameterForm``
:return: the new ``Parameter``
:rtype: ``osid.configuration.Parameter``
:raise: ``IllegalState`` -- ``parameter_form`` already used in a create transaction
:raise: ``InvalidArgument`` -- one or more of the form elements is invalid
:raise: ``NullArgument`` -- ``parameter_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``parameter_form`` did not originate from ``get_parameter_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.Parameter
@abc.abstractmethod
def can_update_parameters(self):
"""Tests if this user can update ``Parameters``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating a
``Parameter`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
update operations to an unauthorized user.
:return: ``false`` if ``Parameter`` modification is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_parameter_form_for_update(self, parameter_id):
"""Gets the parameter form for updating an existing parameters.
:param parameter_id: the ``Id`` of the ``Parameter``
:type parameter_id: ``osid.id.Id``
:return: the parameter form
:rtype: ``osid.configuration.ParameterForm``
:raise: ``NotFound`` -- ``parameter_id`` is not found
:raise: ``NullArgument`` -- ``parameter_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ParameterForm
@abc.abstractmethod
def update_parameter(self, parameter_form):
"""Updates an existing parameter.
:param parameter_form: the form containing the elements to be updated
:type parameter_form: ``osid.configuration.ParameterForm``
:raise: ``IllegalState`` -- ``parameter_form`` already used in an update transaction
:raise: ``InvalidArgument`` -- the form contains an invalid value
:raise: ``NullArgument`` -- ``parameter_id`` or ``parameter_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``parameter_form`` did not originate from ``get_parameter_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def can_delete_parameters(self):
"""Tests if this user can delete ``Parameters``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting a
``Parameter`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
delete operations to an unauthorized user.
:return: ``false`` if ``Parameter`` deletion is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def delete_parameter(self, parameter_id):
"""Deletes a ``Parameter``.
:param parameter_id: the ``Id`` of the ``Parameter`` to remove
:type parameter_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``parameter_id`` not found
:raise: ``NullArgument`` -- ``parameter_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def can_manage_parameter_aliases(self):
"""Tests if this user can manage ``Id`` aliases for ``Parameters``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known changing an alias
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer alias
operations to an unauthorized user.
:return: ``false`` if ``Parameter`` aliasing is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def alias_parameter(self, parameter_id, alias_id):
"""Adds an ``Id`` to a ``Parameter`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Parameter`` is determined by the
provider. The new ``Id`` performs as an alias to the primary
``Id``. If the alias is a pointer to another parameter it is
reassigned to the given parameter ``Id``.
:param parameter_id: the ``Id`` of a ``Parameter``
:type parameter_id: ``osid.id.Id``
:param alias_id: the alias ``Id``
:type alias_id: ``osid.id.Id``
:raise: ``AlreadyExists`` -- ``alias_id`` is already assigned
:raise: ``NotFound`` -- ``parameter_id`` not found
:raise: ``NullArgument`` -- ``parameter_id`` or ``alias_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
class ParameterNotificationSession:
"""This session defines methods to receive notifications on adds/changes to ``Configurations`` and their properties.
This session is intended for adapters and providers needing to
synchronize their state with this service without the use of
polling. Notifications are cancelled when this session is closed.
Two views are defined;
* federated: parameters defined in configurations that are a
parent of this configuration in the configuration hierarchy are
included for notifications
* isolated: notifications are restricted to parameters are defined
to within this configuration
The methods ``federate_parameter_view()`` and
``isolate_parameter_view()`` behave as a radio group and one should
be selected before invoking any lookup methods.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_configuration_id(self):
"""Gets the ``Configuration`` ``Id`` associated with this session.
:return: the ``Configuration Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
configuration_id = property(fget=get_configuration_id)
@abc.abstractmethod
def get_configuration(self):
"""Gets the ``Configuration`` associated with this session.
:return: the ``Configuration`` associated with this session
:rtype: ``osid.configuration.Configuration``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.Configuration
configuration = property(fget=get_configuration)
@abc.abstractmethod
def can_register_for_parameter_notifications(self):
"""Tests if this user can register for ``Parameter`` notifications.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer
notification operations.
:return: ``false`` if notification methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def use_federated_configuration_view(self):
"""Federates the view for methods in this session.
A federated view will include parameters in configurations of
which this registries is a child in the configuration hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_isolated_configuration_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts notifications for parameter values to
this configuration only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def reliable_parameter_notifications(self):
"""Reliable notifications are desired.
In reliable mode, notifications are to be acknowledged using
``acknowledge_parameter_notification()`` .
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def unreliable_parameter_notifications(self):
"""Unreliable notifications are desired.
In unreliable mode, notifications do not need to be
acknowledged.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def acknowledge_parameter_notification(self, notification_id):
"""Acknowledge a parameter notification.
:param notification_id: the ``Id`` of the notification
:type notification_id: ``osid.id.Id``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_new_parameters(self):
"""Assigns a callback for notifications of new parameters.
``ParameterReceiver.newParameters()`` is invoked when a new
``Parameter`` is added to this configuration.
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_changed_parameters(self):
"""Assigns a callback for notification of updated parameters.
``ParameterReceiver.changedParameters()`` is invoked when a
``Parameter`` is changed in this configuration.
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_changed_parameter(self, parameter_id):
"""Assigns a callback for notifications of an update to a parameter.
``ParamaterReceiver.changedParameters()`` is invoked when the
specified ``Parameter`` is changed in this configuration.
:param parameter_id: the ``Id`` of the ``Parameter`` to monitor
:type parameter_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``parameter_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_deleted_parameters(self):
"""Assigns a callback for notification of deleted parameters.
``ParameterReceiver.deletedParamaters()`` is invoked when a
``Parameter`` is deleted or removed from this configuration.
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_deleted_parameter(self, parameter_id):
"""Assigns a callback for notifications of a deleted parameter.
``ParameterReceiver.deletedParameters()`` is invoked when the
specified ``Parameter`` is deleted or removed from this
configuration.
:param parameter_id: the ``Id`` of the ``Parameter`` to monitor
:type parameter_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``parameter_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
class ParameterConfigurationSession:
"""This session defines methods for accessing the configurations of a parameter.
A ``Parameter`` may appear in multiple ``Configurations``. Each
``Configuration`` may have its own authorizations governing who is
allowed to look at it.
This lookup session defines two views:
* comparative view: elements may be silently omitted or re-ordered
* plenary view: provides a complete result set or is an error
condition
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def can_lookup_parameter_configurations(self):
"""Tests if this user can perform lookups on configurations of parameters.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations.
:return: ``false`` if lookups are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def use_comparative_parameter_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as
authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_plenary_parameter_view(self):
"""A complete view of the ``Parameter`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def get_parameter_ids_by_configuration(self, configuration_id):
"""Gets the list of ``Parameter`` ``Ids`` associated with a ``Configuration``.
:param configuration_id: ``Id`` of the ``Configuration``
:type configuration_id: ``osid.id.Id``
:return: list of matching parameter ``Ids``
:rtype: ``osid.id.IdList``
:raise: ``NotFound`` -- ``configuration_id`` is not found
:raise: ``NullArgument`` -- ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
@abc.abstractmethod
def get_parameters_by_configuration(self, configuration_id):
"""Gets the list of ``Parameters`` associated with a ``Configuration``.
:param configuration_id: ``Id`` of the ``Configuration``
:type configuration_id: ``osid.id.Id``
:return: list of matching parameters
:rtype: ``osid.configuration.ParameterList``
:raise: ``NotFound`` -- ``configuration_id`` is not found
:raise: ``NullArgument`` -- ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ParameterList
@abc.abstractmethod
def get_parameter_ids_by_configurations(self, configuration_ids):
"""Gets the list of ``Parameter Ids`` associated with a list of ``Configurations``.
:param configuration_ids: list of configurations
:type configuration_ids: ``osid.id.IdList``
:return: list of parameter ``Ids``
:rtype: ``osid.id.IdList``
:raise: ``NullArgument`` -- ``configuration_ids`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
@abc.abstractmethod
def get_parameters_by_configurations(self, configuration_ids):
"""Gets the list of ``Parameters`` associated with a list of ``Configurations``.
:param configuration_ids: list of configurations
:type configuration_ids: ``osid.id.IdList``
:return: list of parameters
:rtype: ``osid.configuration.ParameterList``
:raise: ``NullArgument`` -- ``configuration_ids`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ParameterList
@abc.abstractmethod
def get_configuration_ids_by_parameter(self, parameter_id):
"""Gets the ``Configuration Ids`` mapped to a ``Parameter``.
:param parameter_id: ``Id`` of a ``Parameter``
:type parameter_id: ``osid.id.Id``
:return: list of configuration ``Ids``
:rtype: ``osid.id.IdList``
:raise: ``NotFound`` -- ``parameter_id`` is not found
:raise: ``NullArgument`` -- ``parameter_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
@abc.abstractmethod
def get_configurations_by_parameter(self, parameter_id):
"""Gets the ``Configurations`` mapped to a ``Parameter``.
:param parameter_id: ``Id`` of a ``Parameter``
:type parameter_id: ``osid.id.Id``
:return: list of configurations
:rtype: ``osid.configuration.ConfigurationList``
:raise: ``NotFound`` -- ``parameter_id`` is not found
:raise: ``NullArgument`` -- ``parameter_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ConfigurationList
class ParameterConfigurationAssignmentSession:
"""This session provides methods to re-assign ``Parameters`` to ``Configurations``.
A ``Parameter`` may appear in multiple ``Configurations`` and
removing the last reference to a ``Parameter`` is the equivalent of
deleting it which may or may not be permitted. Each
``Configuration`` may have its own authorizations as to who is
allowed to operate on it.
Moving or adding a reference of a ``Parameter`` to another
``Configuration`` is not a copy operation (eg: does not change its
``Id`` ).
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def can_assign_parameter_configurations(self):
"""Tests if this user can change parameter configuration mappings.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may not wish to offer
assignment operations.
:return: ``false`` if parameter configuration assignment is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def can_assign_parameters_to_configuration(self, configuration_id):
"""Tests if this user can alter parameter/configuration parameters.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known parameter methods in
this session will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
lookup operations to unauthorized users.
:param configuration_id: the ``Id`` of the ``Configuration``
:type configuration_id: ``osid.id.Id``
:return: ``false`` if configuration is not authorized, ``true`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``configuration_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_assignable_configuration_ids(self, configuration_id):
"""Gets a list of configurations including and under the given configuration node in which any parameter can be
assigned.
:param configuration_id: the ``Id`` of the ``Configuration``
:type configuration_id: ``osid.id.Id``
:return: list of assignable configuration ``Ids``
:rtype: ``osid.id.IdList``
:raise: ``NullArgument`` -- ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
@abc.abstractmethod
def get_assignable_configuration_ids_for_parameter(self, configuration_id, parameter_id):
"""Gets a list of configurations including and under the given configuration node in which a specific parameter
can be assigned.
:param configuration_id: the ``Id`` of the ``Configuration``
:type configuration_id: ``osid.id.Id``
:param parameter_id: the ``Id`` of the ``Parameter``
:type parameter_id: ``osid.id.Id``
:return: list of assignable configuration ``Ids``
:rtype: ``osid.id.IdList``
:raise: ``NullArgument`` -- ``configuration_id`` or ``parameter_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
@abc.abstractmethod
def assign_parameter_to_configuration(self, parameter_id, configuration_id):
"""Adds an existing ``Parameter`` to a ``Configuration``.
:param parameter_id: the ``Id`` of the ``Parameter``
:type parameter_id: ``osid.id.Id``
:param configuration_id: the ``Id`` of the ``Configuration``
:type configuration_id: ``osid.id.Id``
:raise: ``AlreadyExists`` -- ``parameter_id`` and ``configuration_id`` already mapped
:raise: ``NotFound`` -- ``parameter_id`` or ``configuration_id`` not found
:raise: ``NullArgument`` -- ``parameter_id`` or ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def unassign_parameter_from_configuration(self, parameter_id, configuration_id):
"""Removes a ``Parameter`` from a ``Configuration``.
:param parameter_id: the Id of the ``Parameter``
:type parameter_id: ``osid.id.Id``
:param configuration_id: the Id of the ``Configuration``
:type configuration_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``parameter_id`` or ``configuration_id`` not found or is not mapped
:raise: ``NullArgument`` -- ``parameter_id`` or ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def reassign_parameter_to_configuration(self, parameter_id, from_configuration_id, to_configuration_id):
"""Moves a ``Parameter`` from one ``Configuration`` to another.
Mappings to other ``Configurations`` are unaffected.
:param parameter_id: the ``Id`` of the ``Parameter``
:type parameter_id: ``osid.id.Id``
:param from_configuration_id: the ``Id`` of the current ``Configuration``
:type from_configuration_id: ``osid.id.Id``
:param to_configuration_id: the ``Id`` of the destination ``Configuration``
:type to_configuration_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``parameter_id from_configuration_id,`` or ``to_configuration_id`` not found or
``credit_id`` not mapped to ``from_configuration_id``
:raise: ``NullArgument`` -- ``parameter_id, from_configuration_id,`` or ``to_configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
class ParameterSmartConfigurationSession:
"""This session manages queries and sequencing to create "smart" dynamic catalogs.
A ``ParameterQuery`` can be retrieved from this session and mapped
to this ``Configuration`` to create a virtual collection of
``Parameters``. The parameters may be sequenced using the
``ParameterSearchOrder`` from this session.
This ``Configuration`` has a default query that matches any
parameter and a default search order that specifies no sequencing.
The queries may be examined using a ``ParameterQueryInspector``. The
query may be modified by converting the inspector back to a
``ParameterQuery``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_configuration_id(self):
"""Gets the ``Configuration`` ``Id`` associated with this session.
:return: the ``Configuration Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
configuration_id = property(fget=get_configuration_id)
@abc.abstractmethod
def get_configuration(self):
"""Gets the ``Configuration`` associated with this session.
:return: the ``Configuration`` associated with this session
:rtype: ``osid.configuration.Configuration``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.Configuration
configuration = property(fget=get_configuration)
@abc.abstractmethod
def can_manage_smart_configurations(self):
"""Tests if this user can manage smart configurations.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer operations
to unauthorized users.
:return: ``false`` if smart cobfiguration management is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_parameter_query(self):
"""Gets a parameter query.
:return: the parameter query
:rtype: ``osid.configuration.ParameterQuery``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ParameterQuery
parameter_query = property(fget=get_parameter_query)
@abc.abstractmethod
def get_parameter_search_order(self):
"""Gets a parameter search order.
:return: the parameter search order
:rtype: ``osid.configuration.ParameterSearchOrder``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ParameterSearchOrder
parameter_search_order = property(fget=get_parameter_search_order)
@abc.abstractmethod
def apply_parameter_query(self, parameter_query):
"""Applies a parameter query to this configuration.
:param parameter_query: the parameter query
:type parameter_query: ``osid.configuration.ParameterQuery``
:raise: ``NullArgument`` -- ``parameter_query`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure occurred
:raise: ``Unsupported`` -- ``parameter_query`` not of this service
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def inspec_parameter_query(self):
"""Gets a parameter query inspector for this configuration.
:return: the parameter query inspector
:rtype: ``osid.configuration.ParameterQueryInspector``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure occurred
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ParameterQueryInspector
@abc.abstractmethod
def apply_parameter_sequencing(self, parameter_search_order):
"""Applies a parameter search order to this configuration.
:param parameter_search_order: the parameter search order
:type parameter_search_order: ``osid.configuration.ParameterSearchOrder``
:raise: ``NullArgument`` -- ``parameter_search_order`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure occurred
:raise: ``Unsupported`` -- ``parameter_search_order`` not of this service
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def get_parameter_query_from_inspector(self, parameter_query_inspector):
"""Gets a parameter query from an inspector.
:param parameter_query_inspector: a parameter query inspector
:type parameter_query_inspector: ``osid.configuration.ParameterQueryInspector``
:return: the parameter query
:rtype: ``osid.configuration.ParameterQuery``
:raise: ``NullArgument`` -- ``parameter_query_inspector`` is ``null``
:raise: ``Unsupported`` -- ``parameter_query_inspector`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ParameterQuery
class ConfigurationLookupSession:
"""This session provides methods for retrieving ``Configuration`` objects.
The ``Configuration`` represents a collection of parameter values.
This session defines views that offer differing behaviors when
retrieving multiple objects.
* comparative view: elements may be silently omitted or re-ordered
* plenary view: provides a complete set or is an error condition
Generally, the comparative view should be used for most applications
as it permits operation even if there is data that cannot be
accessed. For example, a browsing application may only need to
examine the ``Configurations`` it can access, without breaking
execution. However, an assessment may only be useful if all
``Configurations`` referenced by it are available, and a test-taking
applicationmay sacrifice some interoperability for the sake of
precision.
Configurations may have an additional interface indicated by their
respective types. The interface extension is accessed via the
``Configuration``. The returns may not be cast directly from the
returns in the lookup methods.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def can_lookup_configurations(self):
"""Tests if this user can perform ``Configuration`` lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations to unauthorized users.
:return: ``false`` if lookup methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def use_comparative_configuration_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as
authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_plenary_configuration_view(self):
"""A complete view of the ``Configuration`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def get_configuration(self, configuration_id):
"""Gets the ``Configuration`` specified by its ``Id``.
In plenary mode, the exact ``Id`` is found or a ``NotFound``
results. Otherwise, the returned ``Configuration`` may have a
different ``Id`` than requested, such as the case where a
duplicate ``Id`` was assigned to a ``Configuration`` and
retained for compatibility.
:param configuration_id: the ``Id`` of the ``Configuration`` to retrieve
:type configuration_id: ``osid.id.Id``
:return: the ``Configuration``
:rtype: ``osid.configuration.Configuration``
:raise: ``NotFound`` -- no ``Configuration`` found with the given ``Id``
:raise: ``NullArgument`` -- ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.Configuration
@abc.abstractmethod
def get_configurations_by_ids(self, configuration_ids):
"""Gets a ``ConfigurationList`` corresponding to the given ``IdList``.
In plenary mode, the returned list contains all of the
configurations specified in the ``Id`` list, in the order of the
list, including duplicates, or an error results if an ``Id`` in
the supplied list is not found or inaccessible. Otherwise,
inaccessible ``Configurations`` may be omitted from the list and
may present the elements in any order including returning a
unique set.
:param configuration_ids: the list of ``Ids`` to retrieve
:type configuration_ids: ``osid.id.IdList``
:return: the returned ``Configuration`` list
:rtype: ``osid.configuration.ConfigurationList``
:raise: ``NotFound`` -- an ``Id was`` not found
:raise: ``NullArgument`` -- ``configuration_ids`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ConfigurationList
@abc.abstractmethod
def get_configurations_by_genus_type(self, configuration_genus_type):
"""Gets an ``ConfigurationList`` corresponding to the given configuration genus ``Type`` which does not include
configuration types derived from the specified ``Type``.
In plenary mode, the returned list contains all known
configurations or an error results. Otherwise, the returned list
may contain only those configurations that are accessible
through this session.
:param configuration_genus_type: a configuration genus type
:type configuration_genus_type: ``osid.type.Type``
:return: the returned ``Configuration`` list
:rtype: ``osid.configuration.ConfigurationList``
:raise: ``NullArgument`` -- ``configuration_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ConfigurationList
@abc.abstractmethod
def get_configurations_by_parent_genus_type(self, configuration_genus_type):
"""Gets an ``ConfigurationList`` corresponding to the given configuration genus ``Type`` and include any
additional configurations with genus types derived from the specified ``Type``.
In plenary mode, the returned list contains all known
configurations or an error results. Otherwise, the returned list
may contain only those configurations that are accessible
through this session.
:param configuration_genus_type: a configuration genus type
:type configuration_genus_type: ``osid.type.Type``
:return: the returned ``Configuration`` list
:rtype: ``osid.configuration.ConfigurationList``
:raise: ``NullArgument`` -- ``configuration_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ConfigurationList
@abc.abstractmethod
def get_configurations_by_record_type(self, configuration_record_type):
"""Gets a ``ConfigurationList`` containing the given configuration record ``Type``.
In plenary mode, the returned list contains all known
configurations or an error results. Otherwise, the returned list
may contain only those configurations that are accessible
through this session.
:param configuration_record_type: a configuration record type
:type configuration_record_type: ``osid.type.Type``
:return: the returned ``Configuration`` list
:rtype: ``osid.configuration.ConfigurationList``
:raise: ``NullArgument`` -- ``configuration_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ConfigurationList
@abc.abstractmethod
def get_configurations_by_provider(self, resource_id):
"""Gets a ``ConfigurationList`` from the given provider ````.
In plenary mode, the returned list contains all known
configurations or an error results. Otherwise, the returned list
may contain only those configurations that are accessible
through this session.
:param resource_id: a resource ``Id``
:type resource_id: ``osid.id.Id``
:return: the returned ``Configuration`` list
:rtype: ``osid.configuration.ConfigurationList``
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ConfigurationList
@abc.abstractmethod
def get_configurations(self):
"""Gets all ``Configurations,`` In plenary mode, the returned list contains all known configurations or an error
results.
Otherwise, the returned list may contain only those
configurations that are accessible through this session.
:return: a list of ``Configurations``
:rtype: ``osid.configuration.ConfigurationList``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ConfigurationList
configurations = property(fget=get_configurations)
class ConfigurationQuerySession:
"""This session provides methods for searching among ``Configuration`` objects.
The search query is constructed using the ``ConfigurationQuery``.
Configurations may have a query record indicated by their respective
record types. The query record is accessed via the
``ConfigurationQuery``. The returns in this session may not be cast
directly to these interfaces.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def can_search_configurations(self):
"""Tests if this user can perform ``Configuration`` searches.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer search
operations to unauthorized users.
:return: ``false`` if search methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_configuration_query(self):
"""Gets a configuration query.
:return: the configuration query
:rtype: ``osid.configuration.ConfigurationQuery``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ConfigurationQuery
configuration_query = property(fget=get_configuration_query)
@abc.abstractmethod
def get_configurations_by_query(self, configuration_query):
"""Gets a list of ``Configurations`` matching the given search.
:param configuration_query: the configuration query
:type configuration_query: ``osid.configuration.ConfigurationQuery``
:return: the returned ``ConfigurationList``
:rtype: ``osid.configuration.ConfigurationList``
:raise: ``NullArgument`` -- ``configuration_query`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``configuration_query`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ConfigurationList
class ConfigurationSearchSession:
"""This session provides methods for searching among ``Configuration`` objects.
The search query is constructed using the ``ConfigurationQuery``.
``get_configurations_by_query()`` is the basic search method and
returns a list of ``Configuration`` objects.A more advanced search
may be performed with ``getConfigurationsBySearch()``. It accepts a
``ConfigurationSearch`` in addition to the query for the purpose of
specifying additional options affecting the entire search, such as
ordering. ``get_configurations_by_search()`` returns a
``ConfigurationSearchResults`` that can be used to access the
resulting ``ConfigurationList`` or be used to perform a search
within the result set through ``ConfigurationSearch``.
Configurations may have a query record indicated by their respective
record types. The query record is accessed via the
``ConfigurationQuery``. The returns in this session may not be cast
directly to these interfaces.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_configuration_search(self):
"""Gets a configuration search.
:return: the configuration search
:rtype: ``osid.configuration.ConfigurationSearch``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ConfigurationSearch
configuration_search = property(fget=get_configuration_search)
@abc.abstractmethod
def get_configuration_search_order(self):
"""Gets a log search order.
The ``ConfigurationSearchOrder`` is supplied to a
``ConfigurationSearch`` to specify the ordering of results.
:return: the configuration search order
:rtype: ``osid.configuration.ConfigurationSearchOrder``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ConfigurationSearchOrder
configuration_search_order = property(fget=get_configuration_search_order)
@abc.abstractmethod
def get_configurations_by_search(self, configuration_query, configuration_search):
"""Gets a list of ``Configurations`` matching the given search.
Each element in the array is OR'd.
:param configuration_query: the configuration query
:type configuration_query: ``osid.configuration.ConfigurationQuery``
:param configuration_search: the configuration search
:type configuration_search: ``osid.configuration.ConfigurationSearch``
:return: the configuration search results
:rtype: ``osid.configuration.ConfigurationSearchResults``
:raise: ``NullArgument`` -- ``configuration_query`` or ``configuration_search`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``configuration_query`` or ``configuration_search`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ConfigurationSearchResults
@abc.abstractmethod
def get_configuration_query_from_inspector(self, configuration_query_inspector):
"""Gets a configuration query from an inspector.
The inspector is available from a
``ConfigurationSearchResults``.
:param configuration_query_inspector: a configuration query inspector
:type configuration_query_inspector: ``osid.configuration.ConfigurationQueryInspector``
:return: the configuration query
:rtype: ``osid.configuration.ConfigurationQuery``
:raise: ``NullArgument`` -- ``configuration_query_inspector`` is ``null``
:raise: ``Unsupported`` -- ``configuration_query_inspector`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ConfigurationQuery
class ConfigurationAdminSession:
"""This session creates, updates, and deletes ``Configuration``.
The data for create and update is provided by the consumer via the
form object. ``OsidForms`` are requested for each create or update
and may not be reused.
Create and update operations differ in their usage. To create a
``Configuration,`` a ``ConfigurationForm`` is requested using
``get_configuration_form_for_create()`` specifying the desired
record ``Types`` or none if no record ``Types`` are needed. The
returned ``ConfigurationForm`` will indicate that it is to be used
with a create operation and can be used to examine metdata or
validate data prior to creation. Once the ``ConfigurationForm`` is
submiited to a create operation, it cannot be reused with another
create operation unless the first operation was unsuccessful. Each
``ConfigurationForm`` corresponds to an attempted transaction.
For updates, ``ConfigurationForms`` are requested to the
``Configuration`` ``Id`` that is to be updated using
``getConfigurationFormForUpdate()``. Similarly, the
``ConfigurationForm`` has metadata about the data that can be
updated and it can perform validation before submitting the update.
The ``ConfigurationForm`` can only be used once for a successful
update and cannot be reused.
The delete operations delete ``Configurations``.
This session includes an ``Id`` aliasing mechanism to assign an
external ``Id`` to an internally assigned Id.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def can_create_configurations(self):
"""Tests if this user can create ``Configurations``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating a C
``onfiguration`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
create operations to an unauthorized user.
:return: ``false`` if ``Configuration`` creation is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def can_create_configuration_with_record_types(self, configuration_record_types):
"""Tests if this user can create a single ``Configuration`` using the desired record types.
While ``ConfigurationManager.getConfigurationRecordTypes()`` can
be used to examine which records are supported, this method
tests which record(s) are required for creating a specific
``Configuration``. Providing an empty array tests if a
``Configuration`` can be created with no records.
:param configuration_record_types: array of configuration record types
:type configuration_record_types: ``osid.type.Type[]``
:return: ``true`` if ``Configuration`` creation using the specified record ``Types`` is supported, ``false``
otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``configuration_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_configuration_form_for_create(self, configuration_record_types):
"""Gets the configuration form for creating new configurations.
A new form should be requested for each create transaction.
:param configuration_record_types: array of configuration record types
:type configuration_record_types: ``osid.type.Type[]``
:return: the configuration form
:rtype: ``osid.configuration.ConfigurationForm``
:raise: ``NullArgument`` -- ``configuration_record_types`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- unable to get form for requested record types
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ConfigurationForm
@abc.abstractmethod
def create_configuration(self, configuration_form):
"""Creates a new ``Configuration``.
:param configuration_form: the configuration form
:type configuration_form: ``osid.configuration.ConfigurationForm``
:return: the new ``Configuration``
:rtype: ``osid.configuration.Configuration``
:raise: ``IllegalState`` -- ``configuration_form`` already used in a create transaction
:raise: ``InvalidArgument`` -- the form contains an invalid value
:raise: ``NullArgument`` -- ``configuration_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``configuration_form`` did not originate from ``get_configuration_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.Configuration
@abc.abstractmethod
def can_update_configurations(self):
"""Tests if this user can update ``Configurations``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating a C
``onfiguration`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
update operations to an unauthorized user.
:return: ``false`` if ``Configuration`` modification is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_configuration_form_for_update(self, configuration_id):
"""Gets the configuration form for updating existing configurations.
A new configuration form should be requested for each update
transaction.
:param configuration_id: ``Id`` of a ``Configuration``
:type configuration_id: ``osid.id.Id``
:return: the configuration form
:rtype: ``osid.configuration.ConfigurationForm``
:raise: ``NotFound`` -- ``configuration_id`` is not found
:raise: ``NullArgument`` -- ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ConfigurationForm
@abc.abstractmethod
def update_configuration(self, configuration_form):
"""Updates an existing ``Configuration``.
:param configuration_form: the configuration form
:type configuration_form: ``osid.configuration.ConfigurationForm``
:raise: ``IllegalState`` -- ``configuration_form`` already used in an update transaction
:raise: ``InvalidArgument`` -- the form contains an invalid value
:raise: ``NullArgument`` -- ``configuration_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``configuration_form`` did not originate from ``get_configuration_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def can_delete_configurations(self):
"""Tests if this user can delete ``Configurations``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting a
``Configuration`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
delete operations to an unauthorized user.
:return: ``false`` if ``Configuration`` deletion is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def delete_configuration(self, configuration_id):
"""Deletes a ``Configuration``.
:param configuration_id: the ``Id`` of the ``Configuration`` to delete
:type configuration_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``configuration_id`` not found
:raise: ``NullArgument`` -- ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def can_manage_configuration_aliases(self):
"""Tests if this user can manage ``Id`` aliases for ``Configurations``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known changing an alias
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer alias
operations to an unauthorized user.
:return: ``false`` if ``Configuration`` aliasing is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def alias_configuration(self, configuration_id, alias_id):
"""Adds an ``Id`` to a ``Configuration`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Configuration`` is determined by the
provider. The new ``Id`` performs as an alias to the primary
``Id``. If the alias is a pointer to another configuration it is
reassigned to the given configuration ``Id``.
:param configuration_id: the ``Id`` of a ``Configuration``
:type configuration_id: ``osid.id.Id``
:param alias_id: the alias ``Id``
:type alias_id: ``osid.id.Id``
:raise: ``AlreadyExists`` -- ``alias_id`` is already assigned
:raise: ``NotFound`` -- ``configuration_id`` not found
:raise: ``NullArgument`` -- ``configuration_id`` or ``alias_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
class ConfigurationNotificationSession:
"""This session defines methods to receive notifications on adds/changes to ``Configurations``.
Notrifications related to adding or removing of parameters are
handled through the ``ValueNotificationSession``. This session is
intended for adapters and providers needing to synchronize their
state with this service without the use of polling. Notifications
are cancelled when this session is closed.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def can_register_for_configuration_notifications(self):
"""Tests if this user can register for ``Configuration`` notifications.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer
notification operations.
:return: ``false`` if notification methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def reliable_configuration_notifications(self):
"""Reliable notifications are desired.
In reliable mode, notifications are to be acknowledged using
``acknowledge_configuration_notification()`` .
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def unreliable_configuration_notifications(self):
"""Unreliable notifications are desired.
In unreliable mode, notifications do not need to be
acknowledged.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def acknowledge_configuration_notification(self, notification_id):
"""Acknowledge a configuration notification.
:param notification_id: the ``Id`` of the notification
:type notification_id: ``osid.id.Id``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_new_configurations(self):
"""Registers for notifications of new configurations.
``ConfigurationReceiver.newConfigurations()`` is invoked when a
new ``Configuration`` is created.
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_changed_configurations(self):
"""Registers for notification of updated configurations.
``ConfigurationReceiver.changedConfigurations()`` is invoked
when a configuration is changed.
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_changed_configuration(self, configuration_id):
"""Registers for notifications of an update to a configuration.
``ConfigurationReceiver.changedConfigurations()`` is invoked
when the specified ``Configuration`` is changed.
:param configuration_id: the ``Id`` of the ``Configuration`` to monitor
:type configuration_id: ``osid.id.Id``
:raise: ``NotFound`` -- a ``Configuration`` was not found identified by the given ``Id``
:raise: ``NullArgument`` -- ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_deleted_configurations(self):
"""Registers for notification of deleted configurations.
``ConfigurationReceiver.deletedConfigurations()`` is invoked
when a ``Configuration`` is deleted.
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_deleted_configuration(self, configuration_id):
"""Registers for notifications of a deleted configuration.
``ConfiguratinReceiver.deletedConfigurations()`` is invoked when
the specified configuration is deleted.
:param configuration_id: the ``Id`` of the ``Configuration`` to monitor
:type configuration_id: ``osid.id.Id``
:raise: ``NotFound`` -- a ``Configuration`` was not found identified by the given ``Id``
:raise: ``NullArgument`` -- ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_changed_configuration_hierarchy(self):
"""Registers for notification of an updated configuration hierarchy structure.
``ConfigurationReceiver.changedChildOfConfigurations()`` is
invoked when a node experiences a change in its children.
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_changed_configuration_hierarchy_for_ancestors(self, configuration_id):
"""Registers for notification of an updated configuration hierarchy structure.
``ConfigurationReceiver.changedChildOfConfigurations()`` is
invoked when the specified node or any of its ancestors
experiences a change in its children.
:param configuration_id: the ``Id`` of the ``Configuration`` node to monitor
:type configuration_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_changed_configuration_hierarchy_for_descendants(self, configuration_id):
"""Registers for notification of an updated configuration hierarchy structure.
``ConfigurationReceiver.changedChildOfConfigurations()`` is
invoked when the specified node or any of its descendants
experiences a change in its children.
:param configuration_id: the ``Id`` of the ``Configuration`` node to monitor
:type configuration_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
class ConfigurationHierarchySession:
"""This session defines methods for traversing a hierarchy of ``Configuration`` objects.
Each node in the hierarchy is a unique ``Configuration``. The
hierarchy may be traversed recursively to establish the tree
structure through ``get_parent_configurations()`` and
``getChildConfigurations()``. To relate these ``Ids`` to another
OSID, ``get_configuration_nodes()`` can be used for retrievals that
can be used for bulk lookups in other OSIDs. Any ``Configuration``
available in the Configuration OSID is known to this hierarchy but
does not appear in the hierarchy traversal until added as a root
node or a child of another node.
A user may not be authorized to traverse the entire hierarchy. Parts
of the hierarchy may be made invisible through omission from the
returns of ``get_parent_configurationss()`` or
``get_child_configurations()`` in lieu of a ``PermissionDenied``
error that may disrupt the traversal through authorized pathways.
This session defines views that offer differing behaviors when
retrieving multiple objects.
* comparative view: configuration elements may be silently omitted
or re-ordered
* plenary view: provides a complete set or is an error condition
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_configuration_hierarchy_id(self):
"""Gets the hierarchy ``Id`` associated with this session.
:return: the configuration ``Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
configuration_hierarchy_id = property(fget=get_configuration_hierarchy_id)
@abc.abstractmethod
def get_configuration_hierarchy(self):
"""Gets the hierarchy associated with this session.
:return: the hierarchy associated with this session
:rtype: ``osid.hierarchy.Hierarchy``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.hierarchy.Hierarchy
configuration_hierarchy = property(fget=get_configuration_hierarchy)
@abc.abstractmethod
def can_access_configuration_hierarchy(self):
"""Tests if this user can perform hierarchy queries.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations.
:return: ``false`` if hierarchy traversal methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def use_comparative_configuration_view(self):
"""The returns from the configuration methods may omit or translate elements based on this session, such as
authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_plenary_configuration_view(self):
"""A complete view of the ``Configuration`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def get_root_configuration_ids(self):
"""Gets the root configuration ``Ids`` in this hierarchy.
:return: the root configuration ``Ids``
:rtype: ``osid.id.IdList``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
root_configuration_ids = property(fget=get_root_configuration_ids)
@abc.abstractmethod
def get_root_configurations(self):
"""Gets the root configurations in the configuration hierarchy.
A node with no parents is an orphan. While all configuration
``Ids`` are known to the hierarchy, an orphan does not appear in
the hierarchy unless explicitly added as a root node or child of
another node.
:return: the root configurations
:rtype: ``osid.configuration.ConfigurationList``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method is must be implemented.*
"""
return # osid.configuration.ConfigurationList
root_configurations = property(fget=get_root_configurations)
@abc.abstractmethod
def has_parent_configurations(self, configuration_id):
"""Tests if the ``Configuration`` has any parents.
:param configuration_id: a configuration Id
:type configuration_id: ``osid.id.Id``
:return: ``true`` if the configuration has parents, f ``alse`` otherwise
:rtype: ``boolean``
:raise: ``NotFound`` -- ``configuration_id`` is not found
:raise: ``NullArgument`` -- ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def is_parent_of_configuration(self, id_, configuration_id):
"""Tests if an ``Id`` is a direct parent of configuration.
:param id: an ``Id``
:type id: ``osid.id.Id``
:param configuration_id: a configuration Id
:type configuration_id: ``osid.id.Id``
:return: ``true`` if this ``id`` is a parent of ``configuration_id,`` f ``alse`` otherwise
:rtype: ``boolean``
:raise: ``NotFound`` -- ``configuration_id`` is not found
:raise: ``NullArgument`` -- ``id`` or ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: If ``id`` not found return ``false``.
"""
return # boolean
@abc.abstractmethod
def get_parent_configuration_ids(self, configuration_id):
"""Gets the parent ``Ids`` of the given configuration.
:param configuration_id: a configuration Id
:type configuration_id: ``osid.id.Id``
:return: the parent Ids of the configuration
:rtype: ``osid.id.IdList``
:raise: ``NotFound`` -- ``configuration_id`` is not found
:raise: ``NullArgument`` -- ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
@abc.abstractmethod
def get_parent_configurations(self, configuration_id):
"""Gets the parents of the given configuration.
:param configuration_id: the ``Id`` to query
:type configuration_id: ``osid.id.Id``
:return: the parents of the configuration
:rtype: ``osid.configuration.ConfigurationList``
:raise: ``NotFound`` -- ``configuration_id`` not found
:raise: ``NullArgument`` -- ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ConfigurationList
@abc.abstractmethod
def is_ancestor_of_configuration(self, id_, configuration_id):
"""Tests if an Id is an ancestor of a configuration.
:param id: an ``Id``
:type id: ``osid.id.Id``
:param configuration_id: the ``Id`` of a configuration
:type configuration_id: ``osid.id.Id``
:return: ``tru`` e if this ``id`` is an ancestor of ``configuration_id,`` ``false`` otherwise
:rtype: ``boolean``
:raise: ``NotFound`` -- ``configuration_id`` is not found
:raise: ``NullArgument`` -- ``id`` or ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: If ``id`` not found return ``false``.
"""
return # boolean
@abc.abstractmethod
def has_child_configurations(self, configuration_id):
"""Tests if a configuration has any children.
:param configuration_id: a ``configuration_id``
:type configuration_id: ``osid.id.Id``
:return: ``true`` if the ``configuration_id`` has children, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NotFound`` -- ``configuration_id`` not found
:raise: ``NullArgument`` -- ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def is_child_of_configuration(self, id_, configuration_id):
"""Tests if a node is a direct child of another.
:param id: an ``Id``
:type id: ``osid.id.Id``
:param configuration_id: the ``Id`` of a configuration
:type configuration_id: ``osid.id.Id``
:return: ``true`` if the ``id`` is a child of ``configuration_id,`` ``false`` otherwise
:rtype: ``boolean``
:raise: ``NotFound`` -- ``configuration_id`` not found
:raise: ``NullArgument`` -- ``id`` or ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: If ``id`` not found return ``false``.
"""
return # boolean
@abc.abstractmethod
def get_child_configuration_ids(self, configuration_id):
"""Gets the child ``Ids`` of the given configuration.
:param configuration_id: the ``Id`` to query
:type configuration_id: ``osid.id.Id``
:return: the children of the configuration
:rtype: ``osid.id.IdList``
:raise: ``NotFound`` -- ``configuration_id`` is not found
:raise: ``NullArgument`` -- ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
@abc.abstractmethod
def get_child_configurations(self, configuration_id):
"""Gets the children of the given configuration.
:param configuration_id: the ``Id`` to query
:type configuration_id: ``osid.id.Id``
:return: the children of the configuration
:rtype: ``osid.configuration.ConfigurationList``
:raise: ``NotFound`` -- ``configuration_id`` not found
:raise: ``NullArgument`` -- ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ConfigurationList
@abc.abstractmethod
def is_descendant_of_configuration(self, id_, configuration_id):
"""Tests if an ``Id`` is a descendant of a configuration.
:param id: an ``Id``
:type id: ``osid.id.Id``
:param configuration_id: the ``Id`` of a configuration
:type configuration_id: ``osid.id.Id``
:return: ``true`` if the ``id`` is a descendant of the ``configuration_id,`` ``false`` otherwise
:rtype: ``boolean``
:raise: ``NotFound`` -- ``configuration_id`` is not found
:raise: ``NullArgument`` -- ``id`` or ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: If ``id`` is not found return ``false``.
"""
return # boolean
@abc.abstractmethod
def get_configuration_node_ids(self, configuration_id, ancestor_levels, descendant_levels, include_siblings):
"""Gets a portion of the hierarchy for the given configuration.
:param configuration_id: the ``Id`` to query
:type configuration_id: ``osid.id.Id``
:param ancestor_levels: the maximum number of ancestor levels to include. A value of 0 returns no parents in the
node.
:type ancestor_levels: ``cardinal``
:param descendant_levels: the maximum number of descendant levels to include. A value of 0 returns no children
in the node.
:type descendant_levels: ``cardinal``
:param include_siblings: ``true`` to include the siblings of the given node, ``false`` to omit the siblings
:type include_siblings: ``boolean``
:return: a configuration node
:rtype: ``osid.hierarchy.Node``
:raise: ``NotFound`` -- ``configuration_id`` not found
:raise: ``NullArgument`` -- ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.hierarchy.Node
@abc.abstractmethod
def get_configuration_nodes(self, configuration_id, ancestor_levels, descendant_levels, include_siblings):
"""Gets a portion of the hierarchy for the given configuration.
:param configuration_id: the ``Id`` to query
:type configuration_id: ``osid.id.Id``
:param ancestor_levels: the maximum number of ancestor levels to include. A value of 0 returns no parents in the
node.
:type ancestor_levels: ``cardinal``
:param descendant_levels: the maximum number of descendant levels to include. A value of 0 returns no children
in the node.
:type descendant_levels: ``cardinal``
:param include_siblings: ``true`` to include the siblings of the given node, ``false`` to omit the siblings
:type include_siblings: ``boolean``
:return: a configuration node
:rtype: ``osid.configuration.ConfigurationNode``
:raise: ``NotFound`` -- ``configuration_id`` not found
:raise: ``NullArgument`` -- ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.configuration.ConfigurationNode
class ConfigurationHierarchyDesignSession:
"""This session defines methods for managing a hierarchy of ``Configuration`` objects.
Each node in the hierarchy is a unique ``Configuration``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_configuration_hierarchy_id(self):
"""Gets the hierarchy ``Id`` associated with this session.
:return: the hierarchy ``Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
configuration_hierarchy_id = property(fget=get_configuration_hierarchy_id)
@abc.abstractmethod
def get_configuration_hierarchy(self):
"""Gets the hierarchy associated with this session.
:return: the hierarchy associated with this session
:rtype: ``osid.hierarchy.Hierarchy``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.hierarchy.Hierarchy
configuration_hierarchy = property(fget=get_configuration_hierarchy)
@abc.abstractmethod
def can_modify_configuration_hierarchy(self):
"""Tests if this user can change the hierarchy.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known performing any update
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer these
operations to an unauthorized user.
:return: ``false`` if changing this hierarchy is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def add_root_configuration(self, configuration_id):
"""Adds a root configuration.
:param configuration_id: the ``Id`` of a configuration
:type configuration_id: ``osid.id.Id``
:raise: ``AlreadyExists`` -- ``configuration_id`` is already in hierarchy
:raise: ``NotFound`` -- ``configuration_id`` not found
:raise: ``NullArgument`` -- ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def remove_root_configuration(self, configuration_id):
"""Removes a root configuration.
:param configuration_id: the ``Id`` of a configuration
:type configuration_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``configuration_id`` not a root
:raise: ``NullArgument`` -- ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def add_child_configuration(self, configuration_id, child_id):
"""Adds a child to a configuration.
:param configuration_id: the ``Id`` of a configuration
:type configuration_id: ``osid.id.Id``
:param child_id: the ``Id`` of the new child
:type child_id: ``osid.id.Id``
:raise: ``AlreadyExists`` -- ``configuration_id`` is already a parent of ``child_id``
:raise: ``NotFound`` -- ``configuration_id`` or ``child_id`` not found
:raise: ``NullArgument`` -- ``configuration_id`` or ``child_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def remove_child_configuration(self, configuration_id, child_id):
"""Removes a child from a configuration.
:param configuration_id: the ``Id`` of a configuration
:type configuration_id: ``osid.id.Id``
:param child_id: the ``Id`` of the new child
:type child_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``configuration_id`` not a parent of ``child_id``
:raise: ``NullArgument`` -- ``configuration_id`` or ``child_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def remove_child_configurations(self, configuration_id):
"""Removes all children from a configuration.
:param configuration_id: the ``Id`` of a configuration
:type configuration_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``configuration_id`` is not in hierarchy
:raise: ``NullArgument`` -- ``configuration_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
| 37.661895
| 127
| 0.662527
| 17,393
| 157,841
| 5.925142
| 0.037199
| 0.032458
| 0.040755
| 0.059094
| 0.874224
| 0.85537
| 0.825124
| 0.796974
| 0.776238
| 0.754687
| 0
| 0.000033
| 0.240014
| 157,841
| 4,190
| 128
| 37.670883
| 0.859075
| 0.732351
| 0
| 0.724332
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.295359
| false
| 0.108298
| 0.001406
| 0
| 0.596343
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 10
|
4be5f442145325a5ebcd3382b70d4263cbac0071
| 163
|
py
|
Python
|
src/SME_UnB/tests/test_SME_UnB.py
|
fga-gpp-mds/2016.2-Time07
|
44d78ce4f36b7cb535b9c775027b8a93972ba5e3
|
[
"MIT"
] | null | null | null |
src/SME_UnB/tests/test_SME_UnB.py
|
fga-gpp-mds/2016.2-Time07
|
44d78ce4f36b7cb535b9c775027b8a93972ba5e3
|
[
"MIT"
] | null | null | null |
src/SME_UnB/tests/test_SME_UnB.py
|
fga-gpp-mds/2016.2-Time07
|
44d78ce4f36b7cb535b9c775027b8a93972ba5e3
|
[
"MIT"
] | null | null | null |
import pytest
import SME_UnB
def test_project_defines_author_and_version():
assert hasattr(SME_UnB, '__author__')
assert hasattr(SME_UnB, '__version__')
| 20.375
| 46
| 0.785276
| 22
| 163
| 5.090909
| 0.590909
| 0.160714
| 0.285714
| 0.339286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134969
| 163
| 7
| 47
| 23.285714
| 0.794326
| 0
| 0
| 0
| 0
| 0
| 0.128834
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.2
| true
| 0
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4be807cb4a9fcc8376068377417a9afe8bc19c7f
| 21,011
|
py
|
Python
|
authnzerver/actions/passcheck.py
|
waqasbhatti/authnzerver
|
d40fa38601f4f11e966fc52e11ad6fe1116bb145
|
[
"MIT"
] | 3
|
2019-06-02T12:57:08.000Z
|
2020-04-01T14:00:12.000Z
|
authnzerver/actions/passcheck.py
|
waqasbhatti/authnzerver
|
d40fa38601f4f11e966fc52e11ad6fe1116bb145
|
[
"MIT"
] | 7
|
2020-03-17T21:55:41.000Z
|
2020-07-07T22:58:48.000Z
|
authnzerver/actions/passcheck.py
|
waqasbhatti/authnzerver
|
d40fa38601f4f11e966fc52e11ad6fe1116bb145
|
[
"MIT"
] | 2
|
2020-03-04T06:56:27.000Z
|
2020-03-24T08:39:11.000Z
|
# -*- coding: utf-8 -*-
# actions_session.py - Waqas Bhatti (wbhatti@astro.princeton.edu) - Aug 2018
# License: MIT - see the LICENSE file for the full text.
"""This contains functions to drive session-related auth actions.
"""
#############
## LOGGING ##
#############
import logging
# get a logger
LOGGER = logging.getLogger(__name__)
#############
## IMPORTS ##
#############
try:
from datetime import timezone, timedelta
utc = timezone.utc
except Exception:
from datetime import timedelta, tzinfo
ZERO = timedelta(0)
class UTC(tzinfo):
"""UTC"""
def utcoffset(self, dt):
return ZERO
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return ZERO
utc = UTC()
import multiprocessing as mp
from sqlalchemy import select
from argon2 import PasswordHasher
from .. import authdb
from ..permissions import pii_hash
from .session import auth_session_exists
############################
## PASSWORD HASHER OBJECT ##
############################
pass_hasher = PasswordHasher()
############################################
## USER PASSWORD CHECK HANDLING FUNCTIONS ##
############################################
def auth_password_check(payload,
override_authdb_path=None,
raiseonfail=False,
config=None):
"""This runs a password check given a session token and password.
Used to gate high-security areas or operations that require re-verification
of the password for a user's existing session.
Parameters
----------
payload : dict
This is a dict containing the following items:
- session_token
- password
In addition to these items received from an authnzerver client, the
payload must also include the following keys (usually added in by a
wrapping function):
- reqid: int or str
- pii_salt: str
override_authdb_path : str or None
The SQLAlchemy database URL to use if not using the default auth DB.
raiseonfail : bool
If True, and something goes wrong, this will raise an Exception instead
of returning normally with a failure condition.
config : SimpleNamespace object or None
An object containing systemwide config variables as attributes. This is
useful when the wrapping function needs to pass in some settings
directly from environment variables.
Returns
-------
dict
Returns a dict containing the result of the password verification check.
"""
for key in ('reqid', 'pii_salt'):
if key not in payload:
LOGGER.error(
"Missing %s in payload dict. Can't process this request." % key
)
return {
'success': False,
'failure_reason': (
"invalid request: missing '%s' in request" % key
),
'user_id': None,
'messages': ["Invalid password check request."],
}
# check broken request
request_ok = True
for item in ('password', 'session_token'):
if item not in payload:
request_ok = False
break
# this checks if the database connection is live
currproc = mp.current_process()
engine = getattr(currproc, 'authdb_engine', None)
if override_authdb_path:
currproc.auth_db_path = override_authdb_path
if not engine:
currproc.authdb_engine, currproc.authdb_conn, currproc.authdb_meta = (
authdb.get_auth_db(
currproc.auth_db_path,
echo=raiseonfail
)
)
users = currproc.authdb_meta.tables['users']
#
# check if the request is OK
#
# if it isn't, then hash the dummy user's password twice
if not request_ok:
# dummy session request
auth_session_exists(
{'session_token': 'nope',
'reqid': payload['reqid'],
'pii_salt': payload['pii_salt']},
raiseonfail=raiseonfail,
override_authdb_path=override_authdb_path
)
# always get the dummy user's password from the DB
dummy_sel = select([
users.c.password
]).select_from(users).where(users.c.user_id == 3)
dummy_results = currproc.authdb_conn.execute(dummy_sel)
dummy_password = dummy_results.fetchone()['password']
dummy_results.close()
try:
pass_hasher.verify(dummy_password, 'nope')
except Exception:
pass
# always get the dummy user's password from the DB
dummy_sel = select([
users.c.password
]).select_from(users).where(users.c.user_id == 3)
dummy_results = currproc.authdb_conn.execute(dummy_sel)
dummy_password = dummy_results.fetchone()['password']
dummy_results.close()
try:
pass_hasher.verify(dummy_password, 'nope')
except Exception:
pass
LOGGER.error(
'[%s] Password check failed for session_token: %s. '
'Missing request items.' %
(payload['reqid'],
pii_hash(payload['session_token'], payload['pii_salt']))
)
return {
'success': False,
'failure_reason': (
"invalid request: missing either 'password' or 'session_token'"
),
'user_id': None,
'messages': ['Invalid password verification request.']
}
# otherwise, now we'll check if the session exists
else:
session_info = auth_session_exists(
{'session_token': payload['session_token'],
'reqid': payload['reqid'],
'pii_salt': payload['pii_salt']},
raiseonfail=raiseonfail,
override_authdb_path=override_authdb_path
)
# if it doesn't, hash the dummy password twice
if not session_info['success']:
# always get the dummy user's password from the DB
dummy_sel = select([
users.c.password
]).select_from(users).where(users.c.user_id == 3)
dummy_results = currproc.authdb_conn.execute(dummy_sel)
dummy_password = dummy_results.fetchone()['password']
dummy_results.close()
try:
pass_hasher.verify(dummy_password, 'nope')
except Exception:
pass
# always get the dummy user's password from the DB
dummy_sel = select([
users.c.password
]).select_from(users).where(users.c.user_id == 3)
dummy_results = currproc.authdb_conn.execute(dummy_sel)
dummy_password = dummy_results.fetchone()['password']
dummy_results.close()
try:
pass_hasher.verify(dummy_password, 'nope')
except Exception:
pass
LOGGER.error(
'[%s] Password check failed for session_token: %s. '
'The session token provided does not exist.' %
(payload['reqid'],
pii_hash(payload['session_token'], payload['pii_salt']))
)
return {
'success': False,
'failure_reason': (
"session does not exist"
),
'user_id': None,
'messages': ['No session token provided.']
}
# if the session token does exist, we'll proceed to checking the
# password for the provided email
else:
# always get the dummy user's password from the DB
dummy_sel = select([
users.c.password
]).select_from(users).where(users.c.user_id == 3)
dummy_results = currproc.authdb_conn.execute(dummy_sel)
dummy_password = dummy_results.fetchone()['password']
dummy_results.close()
try:
pass_hasher.verify(dummy_password, 'nope')
except Exception:
pass
# look up the provided user
user_sel = select([
users.c.user_id,
users.c.password,
users.c.is_active,
users.c.user_role,
]).select_from(
users
).where(users.c.user_id == session_info['session_info']['user_id'])
user_results = currproc.authdb_conn.execute(user_sel)
user_info = user_results.fetchone()
user_results.close()
if user_info:
try:
pass_ok = pass_hasher.verify(
user_info['password'],
payload['password'][: 256],
)
except Exception as e:
LOGGER.error(
'[%s] Password check failed for session_token: %s. '
'The password provided does not match the one on '
'record for user_id: %s. Exception was: %r' %
(payload['reqid'],
pii_hash(payload['session_token'],
payload['pii_salt']),
pii_hash(user_info['user_id'],
payload['pii_salt']),
e)
)
pass_ok = False
else:
try:
pass_hasher.verify(dummy_password, 'nope')
except Exception:
pass
pass_ok = False
if not pass_ok:
return {
'success': False,
'failure_reason': (
"user does not exist or password doesn't match"
),
'user_id': None,
'messages': ["Sorry, that user ID and "
"password combination didn't work."]
}
# if password verification succeeeded, check if the user can
# actually log in (i.e. their account is not locked or is not
# inactive)
else:
# if the user account is active and unlocked, proceed.
# the frontend will take this user_id and ask for a new session
# token with it.
if (user_info['is_active'] and
user_info['user_role'] != 'locked'):
LOGGER.info(
'[%s] Password check successful for session_token: %s. '
'Matched user with user_id: %s. ' %
(payload['reqid'],
pii_hash(payload['session_token'],
payload['pii_salt']),
pii_hash(user_info['user_id'],
payload['pii_salt']))
)
return {
'success': True,
'user_id': user_info['user_id'],
'user_role': user_info['user_role'],
'messages': ["Verification successful."]
}
# if the user account is locked, return a failure
else:
LOGGER.error(
'[%s] Password check failed for session_token: %s. '
'Matched user with user_id: %s is not active '
'or is locked.' %
(payload['reqid'],
pii_hash(payload['session_token'],
payload['pii_salt']),
pii_hash(user_info['user_id'],
payload['pii_salt']))
)
return {
'success': False,
'failure_reason': (
"user exists but is inactive"
),
'user_id': user_info['user_id'],
'messages': ["Sorry, that user ID and "
"password combination didn't work."]
}
def auth_password_check_nosession(payload,
override_authdb_path=None,
raiseonfail=False,
config=None):
"""This runs a password check given an email address and password.
Used to gate high-security areas or operations that require re-verification
of the password for a user, without checking if they have a session.
Useful for APIs, where the 'password' is some API token.
Parameters
----------
payload : dict
This is a dict containing the following items:
- email
- password
In addition to these items received from an authnzerver client, the
payload must also include the following keys (usually added in by a
wrapping function):
- reqid: int or str
- pii_salt: str
override_authdb_path : str or None
The SQLAlchemy database URL to use if not using the default auth DB.
raiseonfail : bool
If True, and something goes wrong, this will raise an Exception instead
of returning normally with a failure condition.
config : SimpleNamespace object or None
An object containing systemwide config variables as attributes. This is
useful when the wrapping function needs to pass in some settings
directly from environment variables.
Returns
-------
dict
Returns a dict containing the result of the password verification check.
"""
for key in ('reqid', 'pii_salt'):
if key not in payload:
LOGGER.error(
"Missing %s in payload dict. Can't process this request." % key
)
return {
'success': False,
'failure_reason': (
"invalid request: missing '%s' in request" % key
),
'user_id': None,
'messages': ["Invalid password check request."],
}
# check broken request
request_ok = True
for item in ('password', 'email'):
if item not in payload:
request_ok = False
break
# this checks if the database connection is live
currproc = mp.current_process()
engine = getattr(currproc, 'authdb_engine', None)
if override_authdb_path:
currproc.auth_db_path = override_authdb_path
if not engine:
currproc.authdb_engine, currproc.authdb_conn, currproc.authdb_meta = (
authdb.get_auth_db(
currproc.auth_db_path,
echo=raiseonfail
)
)
users = currproc.authdb_meta.tables['users']
#
# check if the request is OK
#
# if it isn't, then hash the dummy user's password twice
if not request_ok:
# always get the dummy user's password from the DB
dummy_sel = select([
users.c.password
]).select_from(users).where(users.c.user_id == 3)
dummy_results = currproc.authdb_conn.execute(dummy_sel)
dummy_password = dummy_results.fetchone()['password']
dummy_results.close()
try:
pass_hasher.verify(dummy_password, 'nope')
except Exception:
pass
# always get the dummy user's password from the DB
dummy_sel = select([
users.c.password
]).select_from(users).where(users.c.user_id == 3)
dummy_results = currproc.authdb_conn.execute(dummy_sel)
dummy_password = dummy_results.fetchone()['password']
dummy_results.close()
try:
pass_hasher.verify(dummy_password, 'nope')
except Exception:
pass
LOGGER.error(
'[%s] Password check failed for email: %s. '
'Missing request items.' %
(payload['reqid'],
pii_hash(payload['email'], payload['pii_salt']))
)
return {
'success': False,
'failure_reason': (
"invalid request: missing 'email' or 'password' in request"
),
'user_id': None,
'messages': ['Invalid password verification request.']
}
# otherwise, now we'll check if the user exists and the password is correct
else:
# always get the dummy user's password from the DB
dummy_sel = select([
users.c.password
]).select_from(users).where(users.c.user_id == 3)
dummy_results = currproc.authdb_conn.execute(dummy_sel)
dummy_password = dummy_results.fetchone()['password']
dummy_results.close()
try:
pass_hasher.verify(dummy_password, 'nope')
except Exception:
pass
# look up the provided user
user_sel = select([
users.c.user_id,
users.c.password,
users.c.is_active,
users.c.user_role,
]).select_from(
users
).where(users.c.email == payload['email'])
user_results = currproc.authdb_conn.execute(user_sel)
user_info = user_results.fetchone()
user_results.close()
pass_ok = False
if user_info:
try:
pass_ok = pass_hasher.verify(
user_info['password'],
payload['password'][: 256],
)
except Exception as e:
LOGGER.error(
'[%s] Password check failed for email: %s. '
'The password provided does not match the one on '
'record for user_id: %s. Exception was: %r' %
(payload['reqid'],
pii_hash(payload['email'],
payload['pii_salt']),
pii_hash(user_info['user_id'],
payload['pii_salt']),
e)
)
pass_ok = False
# if the user doesn't exist, do a dummy pass hash
else:
try:
pass_hasher.verify(dummy_password, 'nope')
except Exception:
pass
pass_ok = False
if not pass_ok:
return {
'success': False,
'failure_reason': (
"user does not exist or password doesn't match"
),
'user_id': None,
'messages': ["Sorry, that user ID and "
"password combination didn't work."]
}
# if password verification succeeeded, check if the user can
# actually log in (i.e. their account is not locked or is not
# inactive)
else:
# if the user account is active and unlocked, proceed.
# the frontend will take this user_id and ask for a new session
# token with it.
if (user_info['is_active'] and
user_info['user_role'] != 'locked'):
LOGGER.info(
'[%s] Password check successful for email: %s. '
'Matched user with user_id: %s. ' %
(payload['reqid'],
pii_hash(payload['email'],
payload['pii_salt']),
pii_hash(user_info['user_id'],
payload['pii_salt']))
)
return {
'success': True,
'user_id': user_info['user_id'],
'user_role': user_info['user_role'],
'messages': ["Verification successful."]
}
# if the user account is locked, return a failure
else:
LOGGER.error(
'[%s] Password check failed for email: %s. '
'Matched user with user_id: %s is not active '
'or is locked.' %
(payload['reqid'],
pii_hash(payload['email'],
payload['pii_salt']),
pii_hash(user_info['user_id'],
payload['pii_salt']))
)
return {
'success': False,
'failure_reason': (
"user exists but is inactive"
),
'user_id': user_info['user_id'],
'messages': ["Sorry, that user ID and "
"password combination didn't work."]
}
| 31.883156
| 80
| 0.510304
| 2,148
| 21,011
| 4.841248
| 0.121508
| 0.025964
| 0.022887
| 0.012694
| 0.870564
| 0.864795
| 0.864795
| 0.864795
| 0.864218
| 0.855563
| 0
| 0.001657
| 0.396697
| 21,011
| 658
| 81
| 31.931611
| 0.818713
| 0.204655
| 0
| 0.830846
| 0
| 0
| 0.179748
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012438
| false
| 0.199005
| 0.022388
| 0.007463
| 0.072139
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
ef1024c5a0fbac670954b3a3dd38fdffad5b9740
| 3,233
|
py
|
Python
|
tests/thumbor_project/my_app/models.py
|
Starou/django-thumborstorage
|
9c2089f87b27a2d1562019168d8fab1f5679600d
|
[
"MIT"
] | 10
|
2015-03-10T00:43:19.000Z
|
2021-05-25T01:00:29.000Z
|
tests/thumbor_project/my_app/models.py
|
Starou/django-thumborstorage
|
9c2089f87b27a2d1562019168d8fab1f5679600d
|
[
"MIT"
] | 18
|
2015-07-16T14:46:33.000Z
|
2022-03-24T07:26:30.000Z
|
tests/thumbor_project/my_app/models.py
|
Starou/django-thumborstorage
|
9c2089f87b27a2d1562019168d8fab1f5679600d
|
[
"MIT"
] | 5
|
2015-03-26T23:14:30.000Z
|
2018-09-24T21:59:53.000Z
|
from django.db import models
from django_thumborstorage.storages import ThumborStorage, ThumborMigrationStorage
class PersonManager(models.Manager):
def get_by_natural_key(self, first_name, last_name):
return self.get(first_name=first_name, last_name=last_name)
class Person(models.Model):
"""A model that used to store images on the file-system and has been moved to Thumbor."""
objects = PersonManager()
first_name = models.CharField(max_length=100)
last_name = models.CharField(max_length=100)
def upload_path(instance, filename):
return 'people/%s' % filename
photo = models.ImageField('image', upload_to=upload_path,
storage=ThumborMigrationStorage(),
height_field='photo_height',
width_field='photo_width')
photo_height = models.IntegerField(blank=True, null=True)
photo_width = models.IntegerField(blank=True, null=True)
class Meta:
unique_together = (('first_name', 'last_name'),)
def __unicode__(self):
return u"%s %s" % (self.first_name, self.last_name)
def natural_key(self):
return (self.first_name, self.last_name)
def get_full_name(self):
return u"%s %s" % (self.first_name, self.last_name)
class PersonNew(models.Model):
"""A model that always stored images on Thumbor."""
objects = PersonManager()
first_name = models.CharField(max_length=100)
last_name = models.CharField(max_length=100)
def upload_path(instance, filename):
return 'people/new/%s' % filename
photo = models.ImageField('image', upload_to=upload_path,
storage=ThumborStorage(),
height_field='photo_height',
width_field='photo_width')
photo_height = models.IntegerField(blank=True, null=True)
photo_width = models.IntegerField(blank=True, null=True)
class Meta:
unique_together = (('first_name', 'last_name'),)
def __unicode__(self):
return u"%s %s" % (self.first_name, self.last_name)
def natural_key(self):
return (self.first_name, self.last_name)
def get_full_name(self):
return u"%s %s" % (self.first_name, self.last_name)
class PersonFileSystem(models.Model):
"""A model that still store images on the file-system."""
objects = PersonManager()
first_name = models.CharField(max_length=100)
last_name = models.CharField(max_length=100)
def upload_path(instance, filename):
return 'people/fs/%s' % filename
photo = models.ImageField('image', upload_to=upload_path,
height_field='photo_height',
width_field='photo_width')
photo_height = models.IntegerField(blank=True, null=True)
photo_width = models.IntegerField(blank=True, null=True)
class Meta:
unique_together = (('first_name', 'last_name'),)
def __unicode__(self):
return u"%s %s" % (self.first_name, self.last_name)
def natural_key(self):
return (self.first_name, self.last_name)
def get_full_name(self):
return u"%s %s" % (self.first_name, self.last_name)
| 36.738636
| 93
| 0.652645
| 402
| 3,233
| 5.007463
| 0.181592
| 0.080477
| 0.06458
| 0.076006
| 0.830104
| 0.798808
| 0.772976
| 0.772976
| 0.772976
| 0.772976
| 0
| 0.007296
| 0.236932
| 3,233
| 87
| 94
| 37.16092
| 0.808675
| 0.055985
| 0
| 0.796875
| 0
| 0
| 0.067523
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.203125
| false
| 0
| 0.03125
| 0.203125
| 0.828125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 10
|
ef2a5bce020ad1582c50e3b0844616b8d0c76328
| 48
|
py
|
Python
|
Python/Books/Learning-Programming-with-Python.Tamim-Shahriar-Subeen/chapter-008/pg-8.2-how-print-where-in-single-cotation.py
|
shihab4t/Books-Code
|
b637b6b2ad42e11faf87d29047311160fe3b2490
|
[
"Unlicense"
] | null | null | null |
Python/Books/Learning-Programming-with-Python.Tamim-Shahriar-Subeen/chapter-008/pg-8.2-how-print-where-in-single-cotation.py
|
shihab4t/Books-Code
|
b637b6b2ad42e11faf87d29047311160fe3b2490
|
[
"Unlicense"
] | null | null | null |
Python/Books/Learning-Programming-with-Python.Tamim-Shahriar-Subeen/chapter-008/pg-8.2-how-print-where-in-single-cotation.py
|
shihab4t/Books-Code
|
b637b6b2ad42e11faf87d29047311160fe3b2490
|
[
"Unlicense"
] | null | null | null |
s = "Dimik's"
print(s)
s = 'Dimik\'s'
print(s)
| 8
| 14
| 0.541667
| 10
| 48
| 2.6
| 0.3
| 0.461538
| 0.538462
| 0.923077
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1875
| 48
| 5
| 15
| 9.6
| 0.666667
| 0
| 0
| 0.5
| 0
| 0
| 0.270833
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
ef2e506f62a1666c45f49b154af5b590094533b4
| 4,347
|
py
|
Python
|
pingdomexport/tests/load/test_checks_postgres.py
|
mattboston/pingdomexport
|
1cd7acbf813abee0b9a7865b9cd4a1e166d55c37
|
[
"MIT"
] | 4
|
2018-01-25T09:18:38.000Z
|
2021-02-12T18:36:08.000Z
|
pingdomexport/tests/load/test_checks_postgres.py
|
mattboston/pingdomexport
|
1cd7acbf813abee0b9a7865b9cd4a1e166d55c37
|
[
"MIT"
] | 1
|
2018-12-04T18:42:06.000Z
|
2021-05-25T14:03:32.000Z
|
pingdomexport/tests/load/test_checks_postgres.py
|
mattboston/pingdomexport
|
1cd7acbf813abee0b9a7865b9cd4a1e166d55c37
|
[
"MIT"
] | 3
|
2019-04-30T11:52:14.000Z
|
2021-03-24T20:58:04.000Z
|
import records
from pingdomexport.load import checks_postgres
from unittest.mock import Mock, call
class TestPostgres:
def test_load_new(self):
db = Mock()
query = Mock()
query.all.return_value = []
db.query.return_value = query
checks_postgres.Postgres(db).load(
[
{
'hostname': 'www.a.com',
'use_legacy_notifications': True,
'lastresponsetime': 411,
'ipv6': False,
'type': 'http',
'name': 'A',
'resolution': 1,
'created': 1458372620,
'lasttesttime': 1459005934,
'status': 'up',
'id': 2057736
},
{
'lasterrortime': 1458938840,
'type': 'http',
'hostname': 'b.a.com',
'lastresponsetime': 827,
'created': 1458398619,
'lasttesttime': 1459005943,
'status': 'up',
'ipv6': False,
'use_legacy_notifications': True,
'resolution': 1,
'name': 'B',
'id': 2057910
}
]
)
assert 4 == db.query.call_count
args = db.query.call_args_list
arg1 = call('SELECT id FROM pingdom_check WHERE id = :id', id=2057736)
arg2 = call('INSERT INTO pingdom_check (id, name, created_at, status, hostname, type) VALUES (:id, :name, to_timestamp(:created_at), :status, :hostname, :type)', id=2057736, name='A', created_at=1458372620, status='up', hostname='www.a.com', type='http')
arg3 = call('SELECT id FROM pingdom_check WHERE id = :id', id=2057910)
arg4 = call('INSERT INTO pingdom_check (id, name, created_at, status, hostname, type) VALUES (:id, :name, to_timestamp(:created_at), :status, :hostname, :type)', id=2057910, name='B', created_at=1458398619, status='up', hostname='b.a.com', type='http')
assert args == [arg1, arg2, arg3, arg4]
def test_load_existent(self):
db = Mock()
query = Mock()
query.all.return_value = [records.Record(['id'], [2057736])]
db.query.return_value = query
checks_postgres.Postgres(db).load(
[
{
'hostname': 'www.a.com',
'use_legacy_notifications': True,
'lastresponsetime': 411,
'ipv6': False,
'type': 'http',
'name': 'A',
'resolution': 1,
'created': 1458372620,
'lasttesttime': 1459005934,
'status': 'up',
'id': 2057736
},
{
'lasterrortime': 1458938840,
'type': 'http',
'hostname': 'b.a.com',
'lastresponsetime': 827,
'created': 1458398619,
'lasttesttime': 1459005943,
'status': 'up',
'ipv6': False,
'use_legacy_notifications': True,
'resolution': 1,
'name': 'B',
'id': 2057910
}
]
)
assert 4 == db.query.call_count
args = db.query.call_args_list
arg1 = call('SELECT id FROM pingdom_check WHERE id = :id', id=2057736)
arg2 = call('UPDATE pingdom_check SET name=:name, created_at=to_timestamp(:created_at), status=:status, hostname=:hostname, type=:type WHERE id=:id', id=2057736, name='A', created_at=1458372620, status='up', hostname='www.a.com', type='http')
arg3 = call('SELECT id FROM pingdom_check WHERE id = :id', id=2057910)
arg4 = call('UPDATE pingdom_check SET name=:name, created_at=to_timestamp(:created_at), status=:status, hostname=:hostname, type=:type WHERE id=:id', id=2057910, name='B', created_at=1458398619, status='up', hostname='b.a.com', type='http')
assert args == [arg1, arg2, arg3, arg4]
| 45.757895
| 288
| 0.47504
| 410
| 4,347
| 4.917073
| 0.185366
| 0.02381
| 0.026786
| 0.032738
| 0.922619
| 0.922619
| 0.922619
| 0.922619
| 0.922619
| 0.884921
| 0
| 0.103026
| 0.399356
| 4,347
| 94
| 289
| 46.244681
| 0.669092
| 0
| 0
| 0.711111
| 0
| 0.044444
| 0.31792
| 0.05107
| 0
| 0
| 0
| 0
| 0.044444
| 1
| 0.022222
| false
| 0
| 0.033333
| 0
| 0.066667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
324c8a1bed0b3c876fcbecfb7287d97dae1943e6
| 275
|
py
|
Python
|
Semester 5 (PIP)/assignment1/prog2.py
|
MartyMiniac/ITER-Assignment
|
a7b355f40cc52a337ad90bb8328e54c4a9534530
|
[
"MIT"
] | 14
|
2020-11-11T08:48:58.000Z
|
2022-02-26T03:59:05.000Z
|
Semester 5 (PIP)/assignment1/prog2.py
|
SKSTCODE42/ITER-Assignment
|
a7b355f40cc52a337ad90bb8328e54c4a9534530
|
[
"MIT"
] | 4
|
2020-11-12T13:31:14.000Z
|
2021-06-21T05:41:34.000Z
|
Semester 5 (PIP)/assignment1/prog2.py
|
SKSTCODE42/ITER-Assignment
|
a7b355f40cc52a337ad90bb8328e54c4a9534530
|
[
"MIT"
] | 10
|
2020-11-07T15:09:20.000Z
|
2022-02-26T03:56:50.000Z
|
print('''
RRRRRRRRRR V V
R R V V
R R V V
R R V V
RRRRRRRRRR V V
R R V V
R R V V
R R V V
R R V
''')
| 25
| 32
| 0.203636
| 34
| 275
| 1.647059
| 0.117647
| 0.285714
| 0.375
| 0.5
| 0.910714
| 0.910714
| 0.910714
| 0.910714
| 0.910714
| 0.910714
| 0
| 0
| 0.767273
| 275
| 11
| 33
| 25
| 0.875
| 0
| 0
| 0.727273
| 0
| 0
| 0.949275
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.090909
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 14
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.