id
stringlengths 3
8
| content
stringlengths 100
981k
|
|---|---|
11503024
|
from inspect import isclass
from _dependencies.exceptions import DependencyError
from _dependencies.injectable import _function_args
from _dependencies.spec import _Spec
class Value:
"""Evaluate given function during dependency injection.
Returned value is used as value of the dependency.
Used as function decorator.
"""
def __init__(self, function):
_check_class(function)
self.__function__ = function
value = Value
def _is_value(name, dependency):
return isinstance(dependency, Value)
def _build_value_spec(name, dependency):
function = dependency.__function__
name = function.__name__
owner = f"{name!r} value"
args, required, optional = _function_args(function, name, owner)
_check_method(args)
return _Spec(function, args, required, optional, lambda: "'value'")
def _check_class(function):
if isclass(function):
raise DependencyError("'value' decorator can not be used on classes")
def _check_method(arguments):
if "self" in arguments:
raise DependencyError("'value' decorator can not be used on methods")
|
11503041
|
from dataclasses import dataclass, field
from typing import List, Union, Dict, Any
from pyball.models.draft.round import Round
@dataclass
class Draft:
draftYear: int = None
rounds: List[Union[Round, Dict[str, Any]]] = field(default_factory=list)
def __post_init__(self):
self.rounds = [
Round(**r)
for r
in self.rounds
]
|
11503045
|
from __future__ import annotations
import numpy as np
import re
from .._types import Slices
__all__ = ["str_to_slice", "key_repr", "axis_targeted_slicing"]
def _range_to_list(v:str) -> list[int]:
"""
"1,3,5" -> [1,3,5]
"2,4:6,9" -> [2,4,5,6,9]
"""
if ":" in v:
s, e = v.split(":")
return list(range(int(s), int(e)))
else:
return [int(v)]
def int_or_None(v:str) -> int|None:
if v:
return int(v)
else:
return None
def str_to_slice(v:str) -> list[int]|slice|int:
v = re.sub(" ", "", v)
if "," in v:
sl = sum((_range_to_list(v) for v in v.split(",")), [])
elif ":" in v:
sl = slice(*map(int_or_None, v.split(":")))
else:
sl = int(v)
return sl
def key_repr(key):
keylist = []
if isinstance(key, tuple):
_keys = key
elif hasattr(key, "__array__"):
_keys = ("array",)
else:
_keys = (key,)
for s in _keys:
if isinstance(s, (slice, list, np.ndarray)):
keylist.append("*")
elif s is None:
keylist.append("new")
elif s is ...:
keylist.append("...")
else:
keylist.append(str(s))
return ",".join(keylist)
def axis_targeted_slicing(arr: np.ndarray, axes: str, string: str) -> Slices:
"""
Make a conventional slices from an axis-targeted slicing string.
Parameters
----------
arr : np.ndarray
Array to be sliced.
axes : str
Axes of input ndarray.
string : str
Axis-targeted slicing string. If an axis that does not exist in `axes` is contained,
this function will raise ValueError.
Returns
-------
slices
"""
keylist = re.sub(" ", "", string).split(";")
sl_list = [slice(None)]*arr.ndim
for k in keylist:
axis, sl_str = k.split("=")
i = axes.find(axis)
if i < 0:
raise ValueError(f"Axis '{axis}' does not exist: {axes}.")
try:
sl_list[i] = str_to_slice(sl_str)
except ValueError:
raise ValueError(f"Informal axis-targeted slicing: {string}")
return tuple(sl_list)
|
11503062
|
from boa.interop.System.App import RegisterAppCall
from boa.interop.System.Runtime import Log
from boa.interop.System.ExecutionEngine import GetExecutingScriptHash
# Here "8ef4b22b006b49a85f5a9a4fe4cd42ce1ab809f4" should your OPE4 contract hash, pls note it's not reversed hash
OEP4Contract = RegisterAppCall('8ef4b22b006b49a85f5a9a4fe4cd42ce1ab809f4', 'operation', 'args')
selfContractAddress = GetExecutingScriptHash()
def Main(operation, args):
# Here you can define the method name "checkName" to anything you want
if operation == "checkName":
return checkName()
# Here you can define the method name "checkBalanceOf" to anything you want
if operation == "checkBalanceOf":
if len(args) == 1:
account = args[0]
return checkBalanceOf(account)
else:
return False
if operation == "checkSelfBalance":
return checkSelfBalance()
if operation == "checkTransfer":
if len(args) != 3:
Log("len(args)!=3 ")
return False
else:
fromAcct = args[0]
toAcct = args[1]
tokenAmount = args[2]
return checkTransfer(fromAcct, toAcct, tokenAmount)
if operation == "sendOEP4TokenFromContractTo":
if len(args) == 2:
toAcct = args[0]
tokenAmount = args[1]
return sendOEP4TokenFromContractTo(toAcct, tokenAmount)
else:
return False
return False
def checkName():
# This "name" below should be consistent with your OEP4Contract methods
# return OEP4Contract("name") is wrong
# return OEP4Contract("name", []) or return OEP4Contract("name", 0) is correct!
return OEP4Contract("name", 0)
def checkBalanceOf(account):
# This "balanceOf" below should be consistent with your OEP4Contract methods
# params = account is wrong
params = [account]
return OEP4Contract("balanceOf", params)
def checkSelfBalance():
params = [selfContractAddress]
return OEP4Contract("balanceOf", params)
def checkTransfer(fromAcct, toAcct, tokenAmount):
params = [fromAcct, toAcct, tokenAmount]
return OEP4Contract("transfer", params)
def sendOEP4TokenFromContractTo(toAcct, tokenAmount):
params = [selfContractAddress, toAcct, tokenAmount]
return OEP4Contract("transfer", params)
|
11503084
|
from scapy.all import * # pylint: disable=unused-wildcard-import
import random
import string
from pprint import pprint
from binascii import hexlify
from scapy.utils import PcapWriter
from scapy.config import conf
import os
inside = "18.0.0.0/28"
outside = "1.1.1.0/28"
random.seed(0)
PKT_TEMPLATE = """
Definition {} : ByteBuffer.t _ :=
Eval compute in Vector.map (@NToWord 8) [{}]%N."""
def bytes2fiat(name, bs):
nums = "; ".join(str(int(b)) for b in bs)
return PKT_TEMPLATE.format(name, nums)
def pkt2fiat(name, pkt):
del pkt.chksum
pkt.show2(dump=True)
return bytes2fiat(name, bytes(pkt))
def sstr(obj):
return "".join("\\x{}".format(hex(ord(c))[2:].rjust(2, "0")) for c in str(obj))
def random_payload(length):
return "".join(random.choice(string.ascii_lowercase) for _ in range(length))
def random_ip(network):
return str(RandIP(network))
def make_outgoing():
return IP(src=random_ip(inside), dst=random_ip(outside))/UDP(sport=23, dport=23)/b"outgoing:accept"
def find_previous(history, src, dst):
count = sum(pkt[IP].dst == dst for pkt in history) # pkt[IP].src == src and
return ("accept ({})".format(count) if count > 0 else "reject").encode("ascii")
def make_incoming(history):
src, dst = random_ip(outside), random_ip(inside)
decision = find_previous(history, src=dst, dst=src)
payload = b"incoming:" + decision
return IP(src=src, dst=dst)/UDP(sport=23, dport=23)/payload
def main():
history = []
for idx in range(50):
outgoing = random.uniform(0, 1) > 0.7
pkt = make_outgoing() if outgoing else make_incoming(history)
history.append(pkt)
fiat = pkt2fiat("pkt{}".format(idx), pkt)
pkt_bytes = hexlify(bytes(pkt)).decode("ascii")
# print(fiat)
print("{}\t{}\t{}".format(idx, pkt_bytes, "pass" if b"accept" in bytes(pkt[UDP].payload) else "fail"))
print(os.getcwd())
wrpcap("stateful_guard.pcap", history)
return history
if __name__ == '__main__':
main()
|
11503109
|
from .helpers import *
from .behavior import trivia_behavior
from io import BytesIO
DELAY = 20
async def premise(item):
_, data = item
state_image_url = 'https:{}'.format(data['map_url'])
state_image = await Utils.fetch(state_image_url)
return dict(
file_path=BytesIO(state_image),
filename='state.png',
content='You have {} seconds to guess the name of that state'.format(DELAY),
)
def resolve(item, answers):
state, data = item
winner, message = Resolve.fastest(answers, state.lower(), skill='kkona')
wiki_url = 'https://wikipedia.org{}'.format(data['path'])
more_info = 'More information here: {}'.format(wiki_url)
return winner, dict(
content='{}\n{}'.format(message, more_info)
)
USStatesTrivia = trivia_behavior(
fetch = Fetch.read_json('us_states.json'),
pick = Pick.random_dict_item,
premise = premise,
query = Query.timed(DELAY),
resolve = resolve,
)
|
11503114
|
from abc import ABCMeta, abstractmethod
class AbstractProductY():
"""
Abstract interface for products of type Y
"""
__metaclass__ = ABCMeta
@abstractmethod
def feature(self):
pass
|
11503127
|
from featureMan.lib.properties import glyphBase, glyphProperty
from featureMan.lib.tags import languageTags
import re
from itertools import permutations
rtlScripts = set(['arab', 'hebr', 'syrc', 'thaa'])
def wrap(string, width=70):
comment = ''
if string[0] == '#':
comment = '# '
if string:
newstring = ""
while len(string) > width:
marker = width - 1
while not string[marker].isspace():
marker = marker - 1
newline = '%s%s%s' %(string[0:marker], "\n", comment)
newstring = '%s%s' %(newstring, newline)
string = string[marker + 1:]
return '%s%s' %(newstring, string)
return ''
def indent(text, steps=1):
lines = text.split('\n')
space = ' '*steps
result = []
for line in lines:
result.append(space+line)
return '\n'.join(result)
class otClass():
"""
An object to create opentype classes.
"""
def __init__(self, name, glyphList):
""" class takes the class name and list of glyphs """
self.name = name
self.glyphList = glyphList
def syntax(self):
""" returns the syntax that deifnes the class """
if len(self.glyphList) < 1:
return ''
result = ['@%s = [' %self.name]
result.append(indent(wrap(' '.join(self.glyphList))))
result.append('];')
return '\n'.join(result)
def otName(self):
""" returns the class name in ot syntax"""
return '@%s' %self.name
class GDEF():
"""
An object to generate GDEF in Opentype syntax
"""
# there souldn't be any glyph shared between these classes.
# otherwise a warning should be produced.
def __init__(self, base, ligature, mark, component, glyphs=set()):
""" class takes the glyph lists in the written order """
self.table = ''
self.classes = ['', '', '', '']
self.returnNone = True
base -= component | ligature | mark
ligature -= component | mark | base
component -= mark | base
base = sorted(set(base) & glyphs)
ligature = sorted(set(ligature) & glyphs)
mark = sorted(set(mark) & glyphs)
component = sorted(set(component) & glyphs)
if base:
self.baseClass = otClass('GDEF_base', base)
self.classes[0] = (self.baseClass)
if ligature:
self.ligatureClass = otClass('GDEF_ligature', ligature)
self.classes[1] = (self.ligatureClass)
if mark:
self.markClass = otClass('GDEF_mark', mark)
self.classes[2] = (self.markClass)
if component:
self.componentClass = otClass('GDEF_component', component)
self.classes[3] = (self.componentClass)
if mark or ligature or base or component:
self.returnNone = False
else:
return
self.classesSyntax = []
self.table = []
for c in self.classes:
if type(c) != str:
self.table.append(c.otName())
self.classesSyntax.append(c.syntax())
else:
self.table.append('')
def syntax(self):
if not self.returnNone:
return '%s\n\ntable GDEF {\n GlyphClassDef\n%s\n} GDEF;' %('\n\n'.join(self.classesSyntax), indent(',\n'.join(self.table)+';', 2))
else:
return ''
flagDic = {
'rtl': 'RightToLeft',
'mark': 'MarkAttachmentType',
'nomark': 'IgnoreMarks',
'nolig': 'IgnoreLigatures'
}
class feature:
"""
An object to create a feature syntax in OpenType.
"""
def __init__(self, featureName):
""" class takes the feature name first """
self.featureName = featureName
self.contents = ''
self.lookupSet = set()
def content(self, contents):
""" this function adds the input to the feature block """
self.contents += '%s\n' %indent(contents)
def addLookup(self, rulesDic, lookupName=''):
""" lookupName is arbitrary and is given while creating the feature.
rulesDic has this sctructure:
rulesDic[flag] = listOfOpenTyperules
flag is s string in which abreviated names of flags which are defined
in flagDic are written with a space sepereating them like:
['rtl mark']
listOfOpenTyperules is a list or set of rules which is going to be
written inside the lookup which in turns goes inside this feature.
These are basically the rules written in OT syntax. They will be
joined with a newline character and will be added to the feature
block.
"""
if lookupName != '':
lookupName = '_%s' %lookupName
result = []
# (lookupType), (beforeContext, afterContext, )
for flag in rulesDic:
if rulesDic[flag]:
re_name = re.compile(r'[A-Za-z0-9]+') # trying to interpret the flags
flagName = '_'.join(re_name.findall(flag)) # converting flags to somthing to be used as the lookup name
i = 1
thisLookupName = '%s_lookup_%s%s_%i' %(self.featureName, flagName, lookupName, i)
while thisLookupName in self.lookupSet: # trying to avoid duplicated names for lookups
i += 1
thisLookupName = '%s_lookup_%s_%s_%i' %(self.featureName,
flag, lookupName, i)
self.lookupSet.add(thisLookupName)
Lookup = lookup(thisLookupName)
Lookup.flag(flag)
Lookup.content('\n'.join(rulesDic[flag]))
result.append(Lookup.syntax())
self.contents += ''.join(result)
def syntax(self):
""" return the whole feature block sytnax with its definition """
pat = re.compile(' |\n|\t')
rawContent = re.sub(pat, '', self.contents)
if rawContent: # cheking if there is any rules inside the block
return "feature %s {\n%s} %s;" %(self.featureName, self.contents, self.featureName)
return ''
class lookup:
"""
An object to create a lookup syntax in OpenType.
"""
def __init__(self, lookupName):
self.lookupName = lookupName
self.flags = ''
self.contents = ''
self.languages = []
def language(self, languages=None):
""" language or list of languages that substituion(s) is performed on. """
if languages:
self.languages.append(indent(languages))
def flag(self, inputflags=''):
"""
input: string of flags from flagdic keys seperated by space:
e.g:
'rtl nomark'
"""
result = []
if inputflags != 'noflag':
# self.flags = inputflags
result.append('\nlookupflag')
for f in inputflags.split(' '):
if f in flagDic:
result.append(flagDic[f])
else:
result.append(f)
self.flags += indent(' '.join(result))+';'
def call(self):
return 'lookup %s;' %self.lookupName
def content(self, contents):
"""
contents are the block of rules text joined with ";\n"
"""
self.contents += indent(contents, 2)
def syntax(self):
return 'lookup %s {%s\n%s\n} %s;\n' %(self.lookupName, self.flags, self.contents, self.lookupName)
class sub:
"""
"""
class chain:
"""
create a chain contextual for certain lookup(s).
"""
# which suffix belongs to which feature, and summary of supported features
suffix2tag = {
"init": "init", # inital
"medi": "medi", # medial
"fina": "fina", # final
"isol": "isol", # isolated
"liga": "liga", # standard ligatures (on by default)
"rlig": "rlig", # required ligatures (always on)
"dlig": "dlig", # discretionally ligatures
"cmps": "ccmp", # glyph composing
"dcmp": "ccmp", # glyph decomposing
"calt": "calt", # contextual alternates
"conc": "calt", # arabic cursive connections
"cap": "case", # case sensetive
"case": "case", # case sensetive
"zero": "zero", # slashed zero
"smcp": "smcp", # small caps
"lnum": "lnum", # lining numbers
"pnum": "pnum", # proportional numbers
"onum": "onum", # old style numbers
"tnum": "tnum", # tabular number
"onum_pnum": ("onum", "pnum"),
"lnum_pnum": ("lnum", "pnum"),
"onum_tnum": ("onum", "tnum"),
"lnum_tnum": ("lnum", "tnum"),
"ss01": "ss01", # stylistic set 1
"ss02": "ss02", # stylistic set 2
"ss03": "ss03", # stylistic set 3
"ss04": "ss04", # stylistic set 4
"ss05": "ss05", # stylistic set 5
"ss06": "ss06", # stylistic set 6
"ss07": "ss07", # stylistic set 7
"ss08": "ss08", # stylistic set 8
"ss09": "ss09", # stylistic set 9
"ss10": "ss10", # stylistic set 10
"ss11": "ss11", # stylistic set 11
"ss12": "ss12", # stylistic set 12
"ss13": "ss13", # stylistic set 13
"ss14": "ss14", # stylistic set 14
"ss15": "ss15", # stylistic set 15
"ss16": "ss16", # stylistic set 16
"ss17": "ss17", # stylistic set 17
"ss18": "ss18", # stylistic set 18
"ss19": "ss19", # stylistic set 19
"ss20": "ss20", # stylistic set 20
}
fea2name = {
'tnum': 'Tabular Numbers',
'pnum': 'Proportional Numbers',
'onum': 'Oldstyle Numbers',
'lnum': 'Lining Numbers',
'ss01': 'Stylistic Set 1',
'ss02': 'Stylistic Set 2',
'ss03': 'Stylistic Set 3',
'ss04': 'Stylistic Set 4',
'ss05': 'Stylistic Set 5',
'ss06': 'Stylistic Set 6',
'ss07': 'Stylistic Set 7',
'ss08': 'Stylistic Set 8',
'ss09': 'Stylistic Set 9',
'ss10': 'Stylistic Set 10',
'ss11': 'Stylistic Set 11',
'ss12': 'Stylistic Set 12',
'ss13': 'Stylistic Set 13',
'ss14': 'Stylistic Set 14',
'ss15': 'Stylistic Set 15',
'ss16': 'Stylistic Set 16',
'ss17': 'Stylistic Set 17',
'ss18': 'Stylistic Set 18',
'ss19': 'Stylistic Set 19',
'ss20': 'Stylistic Set 20',
'zero': 'Slashed Zero',
'medi': 'Medial Forms',
'init': 'Initial Forms',
'isol': 'Isolated Forms',
'fina': 'Final Forms',
'smcp': 'Small Caps',
'case': 'Case Senstive Forms',
'ccmp': 'Glyph Composition/Decomposition',
'rlig': 'Required Ligatures',
'liga': 'Standard Ligatures',
'dlig': 'Discretionary Ligatures',
'mark': 'Mark Positioning',
'mkmk': 'Mark to Mark Positioning',
'calt': 'Contextaul Alternates',
'curs': 'Cursive Attatchment',
'kern': 'Kerning',
'aalt': 'Access All Alternates',
'locl': 'Localized Froms',
}
compositeTags = set(["liga", "dlig", "rlig", "ccmp"])
reversed_composite_suffixes = set(["dcmp"])
lig_splitter = "_"
class fontDic():
"""
Object that converts font data to dictionaries which are used
to interpret font data, to speedup font data queiries.
This object now is adapted to robofont, but it could be
adapted to other environments.
"""
def __init__(self, f, marksToSkip=set(), skipGlyphs=set()):
self.f = f
self.marksToSkip = marksToSkip # glyphs that should not have mark positoning rules (arbitrary)
self.skipGlyphs = set(['.null', '.notdef']) | skipGlyphs
if self.f is not None:
self.scripts = {} # {scriptName : [glyphName1, glyphName2] }
self.localized = {} # {(script, language) : [gName1, gName2, ...]
self.glyphScript = {} # {glyphName : scriptName}
self.glyphAnchor = {} # hash of glyph names to list of anchors and thier positions
self.anchors = {} # hash of anchor names to the glyph names
self.glyphUnicode = {} # hash of glyph names to unicode values
self.unicodToGlyphs = dict(self.f.getCharacterMapping()) # hash of unicode value to glyph name
self.components = self.f.getReverseComponentMapping()
self.glyphComps = {} # {gName : [comp1, comp2, ...]}
self.baseGlyphOffsets = {} # {baseGlyph: [(composite: offset), ...}
self.compOffsets = {} # {compositeName: {baseGlyph: offset}}
self.glyphOrder = [g for g in f.glyphOrder if g in f and g not in self.skipGlyphs] # eliminating non existing glyphs
self.marks = set()
self.bases = set()
self.mkmk = set() # glyphs that have mark to mark positioning
self.skipVirtualMarks = set() # duplicated components means implicit mark positioning and should be skipped
self.rtl = set() # set of the right to left glyphs in the font
self.rtlGroups = set() # set of rtl groups
self._groups = list(self.f.groups) #
self.groups = dict(self.f.groups.items()) # hash of group names to their members
self.rightGrouped = set() # set of glyphs which exist in right groups
self.leftGrouped = set() # set of glyphs which exist in left groups
self.kerning = dict(f.kerning) # kerning data
self.glyphs = set() # set of glyph names
self.composites = {} # ligatures and composites
self.basic_subs = {} # fina, cap, etc
# report of the problems in naming scheme, etc
self.log = []
# list of glyph which might never be substited
self.substituted = set()
self.possible_malfunctions = []
languageTagsKeys = set(languageTags)
languageTagsValues = set([x.lower() for x in languageTags.values()])
self.glyphPseudoUnicode = {}
for gName in self.glyphOrder:
g = self.f[gName]
self.glyphs.add(gName)
gBase = glyphBase.findall(g.name)[0]
if gBase in self.glyphPseudoUnicode and g.unicode is not None:
self.glyphPseudoUnicode[gBase].add(g.unicode)
elif g.unicode is not None:
self.glyphPseudoUnicode[gBase] = set([g.unicode])
# making unicode dictionary
if gName in self.glyphUnicode and g.unicode is not None:
# gBase = glyphBase.findall(gName)[0]
self.glyphUnicode[gName].add(g.unicode)
elif g.unicode is not None:
self.glyphUnicode[gName] = set([g.unicode])
gAnchors = {}
if g.components:
self.compOffsets[gName] = {}
for comp in g.components:
base = comp.baseGlyph
try:
self.baseGlyphOffsets[base]
except KeyError:
self.baseGlyphOffsets[base] = {}
offset = comp.offset
try:
self.baseGlyphOffsets[base][gName].add(offset)
self.skipVirtualMarks.add(gName) # glyps which has duplicated glyphs
except KeyError:
self.baseGlyphOffsets[base][gName] = set([offset])
try:
self.glyphComps[gName].append(base)
except KeyError:
self.glyphComps[gName] = [base]
self.compOffsets[gName][base] = comp.offset
self.glyphComps[gName] = tuple(self.glyphComps[gName])
if g.contours:
self.skipVirtualMarks.add(gName)
# making anchor dictionaries
if gName not in self.marksToSkip:
if len(g.anchors) > 0:
for anchor in g.anchors:
anchorName = anchor.name
if anchorName is not None:
if anchorName[0] == '_':
self.marks.add(gName)
else:
self.bases.add(gName)
if anchorName not in gAnchors:
gAnchors[anchorName] = (anchor.x, anchor.y)
else:
self.log.append("#\tWarning: anchor name '%s' is duplicated in glyph '%s'" %(anchorName, gName))
try:
self.anchors[anchorName].add(gName)
except KeyError:
self.anchors[anchorName] = set([gName])
self.glyphAnchor[gName] = gAnchors
self.skipVirtualMarks.discard(gName) # glyps which has duplicated glyphs but have anchors
self.mkmk = self.marks & self.bases
self.bases = self.bases - self.marks
self._makeVirtualMarks()
self.marksToSkip = self.marksToSkip | self.skipVirtualMarks
for item in [self.marks, self.mkmk]:
item -= self.marksToSkip
# if all the glyphs from a anchor class doesn't have unicodes,
# delete the class. but we need to make sure that these glyphs are
# not substituted later by any gsub lookup.
self.glyphSort = sorted(self.glyphOrder)
for gName in self.glyphSort:
g = self.f[gName]
gp = glyphProperty(gName, self)
gscript = gp.script()
if gp.rtl():
self.rtl.add(gName)
if gscript != '' and gscript in self.scripts:
self.scripts[gscript].append(gName)
elif gscript != '':
self.scripts[gscript] = [gName]
if gscript != '' and gName in self.glyphScript:
self.glyphScript[gName].append(gscript)
elif gscript != '':
self.glyphScript[gName] = gscript
# ------------ Making feature dictionaries -----------
assigned = False
suffix = gName.split(".")
if gName in skipGlyphs:
continue
ligas = gName.split(lig_splitter)
base_glyph = None
feature_tag = None
if len(ligas) > 1:
comps = [] # ligature components
# we have a ligature or composite
ligature_without_suffix = ".".join(suffix[:-1]).split(lig_splitter)
if not set(ligas) - self.glyphs:
# all the components are found
comps = ligas
if gName in self.glyphUnicode:
feature_tag = "rlig"
# by default a ligature with unicode is a required
# ligature
else:
feature_tag = "liga"
elif not set(ligature_without_suffix) - self.glyphs:
# components are found if suffix is removed
comps = ligature_without_suffix
try:
feature_tag = suffix2tag[suffix[-1]]
except KeyError:
self.log.append(wrap("#\tWarning: Parsing name of glyph '%s' suffix and can't associate it with an OpenType syntax." %(gName)))
num_comps = len(comps)
if feature_tag in compositeTags:
assigned = True
notUni = set(comps) - set(self.glyphUnicode)
if notUni and notUni - self.substituted:
self.possible_malfunctions.append(gName)
self.substituted.add(gName)
sub_order = (num_comps, comps, [gName])
self._addComposite(feature_tag, sub_order)
if not assigned:
comps = self._getNestedComps(gName)
markComps = [c for c in comps if c in self.marks] # can't use set intersection because it removes duplicates
baseComps = [c for c in self.glyphComps.get(gName, []) if c in self.bases]
if len(suffix) > 1:
base_glyph = ".".join(suffix[:-1])
if suffix[-1] == 'dcmp':
feature_tag = 'ccmp'
decomposed = []
comps = self.glyphComps.get(gName)
while comps:
for c in reversed(comps):
if c not in self.glyphComps or len(self.glyphComps.get(c)) == 1:
decomposed.insert(0, c)
comps = self._getComps(comps)
if decomposed:
missing = set(decomposed) - self.glyphs
if missing:
self.log.append("#\tWarning: Can't decompose glyph '%s' to its components, missing component(s): %s" %(gName, ', '.join(missing)))
else:
assigned = True
sub_order = (1, [gName], decomposed)
self._addComposite(feature_tag, sub_order)
elif base_glyph in self.glyphs:
suffixes = set([x.lower() for x in suffix[-1].split('_')]) # making sure pnum_onum also gets in
if not suffixes - set(suffix2tag):
for suffix in suffixes:
feature_tag = suffix2tag[suffix]
self.substituted.add(gName)
assigned = True
if base_glyph not in self.glyphUnicode and base_glyph not in self.substituted:
self.possible_malfunctions.append(base_glyph)
elif not suffixes - languageTagsKeys or not suffixes - languageTagsValues:
for suffix in suffixes:
if suffix in languageTagsKeys:
language = languageTags[suffix]
assigned = True
elif suffix in languageTagsValues:
language = suffix.upper()
assigned = True
else:
continue
script = self.glyphScript[gName]
if script == 'dflt':
script = 'latn'
try:
self.localized[(script, language)].append((base_glyph, gName))
except KeyError:
self.localized[(script, language)] = [(base_glyph, gName)]
feature_tag = 'locl'
if assigned:
try:
self.basic_subs[feature_tag][0].append(base_glyph)
self.basic_subs[feature_tag][1].append(gName)
except KeyError:
self.basic_subs[feature_tag] = [[base_glyph], [gName]]
elif (markComps and baseComps) or len(markComps) > 1:
assigned = True # not nessecary because there is no suffix
feature_tag = 'ccmp'
order = len(markComps)
allMarkPositions = permutations(markComps)
for comps in allMarkPositions:
# trying to compose glyph from its components if there
# is marks inside the glyph
sub_order = (order, baseComps + list(comps), [gName])
self._addComposite(feature_tag, sub_order)
if (len(ligas) > 1 or len(suffix) > 1) and not assigned:
self.log.append("#\tWarning: Can't parse glyph name or associate glyph '%s' with a feature." %(gName))
if assigned:
self.substituted.add(gName)
for gr in self.groups:
grGlyphs = self.groups[gr]
if len(grGlyphs) > 0:
if set(grGlyphs) & self.rtl:
self.rtlGroups.add(gr)
if '_R_' in gr:
self.rightGrouped.update(grGlyphs)
elif '_L_' in gr:
self.leftGrouped.update(grGlyphs)
if self.possible_malfunctions:
error = wrap("#\tWarning: following glyphs might never be substituted in a text engine ever, check their unicode or the base glyph(s) unicode: %s" %(' '.join(self.possible_malfunctions)))
self.log.append(error)
else:
raise Exception( "No font is available!")
def _makeVirtualMarks(self):
# adding marks to components according to base glyph if they don't
# have that mark
# note: if there are multiple marks in a composite which mark
# should take which anchor for mark pos?
defined = set()
markBaseGlyphs = set(self.baseGlyphOffsets.keys()) & self.bases
markBaseGlyphs -= self.marks
while markBaseGlyphs:
for markBase in set(markBaseGlyphs):
markBaseGlyphs.discard(markBase)
for extramark in self.baseGlyphOffsets.get(markBase, []):
# extramark is name of the glyph composite we're definging
# new marks for
offset = sorted(self.baseGlyphOffsets[markBase][extramark])[0]
if extramark in self.baseGlyphOffsets:
markBaseGlyphs.add(extramark)
x, y = offset
self.bases.add(extramark)
baseanchors = self.glyphAnchor[markBase]
anchors = {}
for anchorName, anchorpos in baseanchors.items():
x2, y2 = anchorpos
anchors[anchorName] = (x+x2, y+y2)
for baseMarkGlyph in set(self.glyphComps.get(extramark, [])) & self.marks:
# remove unnessasary marks which are alreddy composed
# inside the composite glyph.
for anchorName in self.glyphAnchor[baseMarkGlyph]:
anchors.pop(anchorName[1:], None)
for baseMarkGlyph in set(self.glyphComps.get(extramark, [])) & self.mkmk:
# add marks if the mark glyph has mkmk, so if there is
# diacritic in the composite, the composite will get a
# mark position according to the mark glyph.
x, y = self.compOffsets[extramark][baseMarkGlyph]
for anchorName, anchorpos in self.glyphAnchor[baseMarkGlyph].items():
if anchorName[0] == '_':
continue
x2, y2 = anchorpos
anchors[anchorName] = (x+x2, y+y2)
anchors.update(self.glyphAnchor.get(extramark, {})) # don't override added marks by user
for anchorName in anchors:
self.anchors[anchorName].add(extramark)
self.glyphAnchor[extramark] = anchors
for gName in self.skipVirtualMarks:
self.glyphAnchor.pop(gName, None)
for a, gSet in self.anchors.items():
self.anchors[a] = gSet - self.skipVirtualMarks
def _getComps(self, glist):
comps = []
for gName in glist:
compList = self.glyphComps.get(gName)
if compList and len(compList) > 1:
comps.extend(compList)
return comps
def _getNestedComps(self, gName):
comps = list(self.glyphComps.get(gName, []))
for comp in comps[:]:
nested = self._getNestedComps(comp)
if nested:
comps.extend(nested)
comps.remove(comp)
return comps
def _addComposite(self, feature_tag, sub_order):
try:
self.composites[feature_tag].append(sub_order)
except KeyError:
self.composites[feature_tag] = [sub_order]
def __iter__(self):
return self.glyphs.__iter__()
def __getitem__(self, i):
if i in self.glyphUnicode:
return self.glyphUnicode.__getitem__(i)
elif i in self.glyphs:
return None
else:
raise KeyError("Glyph '%s' doesn't exist in the font" %i)
class abstractFeature(object):
tag = ''
def __init__(self, fDic, classes):
self.base = set()
self.ligature = set()
self.mark = set()
self.component = set()
self.fDic = fDic
self.classes = classes # {className : [glyphName1, glyphName2, ...]
self.aalt = set() # if the feature has rules then this will containt the feature tag(s)
self.log = []
def checkClass(self, className, classContents):
"""
checks if the class is already defined, if so check its contents.
if the contents match then it reutrns the class without changes.
otherwise returns an alternate name and the contents.
"""
incrementor = 0
tempClass = className
classSort = classContents[:]
classContents = set(classContents)
while tempClass in self.classes and not classContents - set(self.classes[tempClass]):
incrementor += 1
classContents = classContents - set(self.classes[tempClass])
tempClass = '%s_%i' %(className, incrementor)
classContents = sorted(classContents, key=lambda x: classSort.index(x))
return tempClass, classContents
def getFlags(self, gList):
"""
gets a glyph list and returns associated lookup flag
"""
flags = []
if not set(gList) - self.fDic.rtl:
flags.append('rtl')
if not set(gList) & self.fDic.marks:
flags.append('nomark')
if not flags:
flags = ['noflag']
return ' '.join(flags)
def featureStart(self, tag=None):
if tag is None:
tag = self.tag
return '\n#*************************************************************************\n#\n# %s \n#\n#*************************************************************************\n\n%s' %(fea2name[tag], '\n'.join(self.log))
class aaltFeature(object):
tag = 'aalt'
def __init__(self, aalt):
self.aalt = aalt # list of features that needs to be included
def featureStart(self, tag=None):
if tag is None:
tag = self.tag
return '\n#*************************************************************************\n#\n# %s \n#\n#*************************************************************************\n\n' %fea2name[tag]
def syntax(self):
result = []
if self.aalt:
result.append(self.featureStart())
finaFea = feature(self.tag)
rules = []
for fea in self.aalt:
rule = 'feature %s;' %fea
rules.append(rule)
finaFea.content('\n'.join(rules))
result.append('\n' + finaFea.syntax())
return ''.join(result)
|
11503132
|
from telegram_coin_bot.db.schema import Money, Session, db
def create_tables():
with db:
db.create_tables([Session, Money])
|
11503137
|
import math
import pytest
import torch
from snowfall.models.tdnnf import FactorizedTDNN, Tdnnf1a, _constrain_orthonormal_internal
torch.manual_seed(20200130)
def test_constrain_orthonormal():
def compute_loss(M):
P = torch.mm(M, M.t())
P_PT = torch.mm(P, P.t())
trace_P = torch.trace(P)
trace_P_P = torch.trace(P_PT)
scale = torch.sqrt(trace_P_P / trace_P)
identity = torch.eye(P.size(0), dtype=P.dtype, device=P.device)
Q = P / (scale * scale) - identity
loss = torch.norm(Q, p='fro') # Frobenius norm
return loss
w = torch.randn(6, 8) * 10
loss = []
loss.append(compute_loss(w))
for i in range(15):
w = _constrain_orthonormal_internal(w)
loss.append(compute_loss(w))
for i in range(1, len(loss)):
assert loss[i - 1] > loss[i]
# TODO(fangjun): draw the loss using matplotlib
# print(loss)
model = FactorizedTDNN(dim=1024,
bottleneck_dim=128,
kernel_size=3,
subsampling_factor=1)
loss = []
for m in model.modules():
if hasattr(m, 'constrain_orthonormal'):
m.constrain_orthonormal()
loss.append(
compute_loss(model.linear.conv.state_dict()['weight'].reshape(128, -1)))
for i in range(5):
for m in model.modules():
if hasattr(m, 'constrain_orthonormal'):
m.constrain_orthonormal()
loss.append(
compute_loss(model.linear.conv.state_dict()['weight'].reshape(
128, -1)))
for i in range(1, len(loss)):
assert loss[i - 1] > loss[i]
def test_factorized_tdnn():
N = 1
T = 10
C = 4
# case 0: kernel_size == 1, subsampling_factor == 1
model = FactorizedTDNN(dim=C,
bottleneck_dim=2,
kernel_size=1,
subsampling_factor=1)
x = torch.arange(N * T * C).reshape(N, C, T).float()
y = model(x)
assert y.size(2) == T
# case 1: kernel_size == 3, subsampling_factor == 1
model = FactorizedTDNN(dim=C,
bottleneck_dim=2,
kernel_size=3,
subsampling_factor=1)
y = model(x)
assert y.size(2) == T - 2
# case 2: kernel_size == 1, subsampling_factor == 3
model = FactorizedTDNN(dim=C,
bottleneck_dim=2,
kernel_size=1,
subsampling_factor=3)
y = model(x)
assert y.size(2) == math.ceil(math.ceil((T - 3)) - 3)
@pytest.mark.parametrize('seq_len', [126, 127, 128, 129, 130, 131])
def test_subsampling_matched_lengths(seq_len):
num_features = 4
num_classes = 7
subsampled_seq_len = math.ceil(seq_len / 3)
model = Tdnnf1a(
num_features=num_features,
num_classes=num_classes,
hidden_dim=16,
bottleneck_dim=2,
prefinal_bottleneck_dim=4,
)
inputs = torch.randn(1, num_features, seq_len)
outputs = model(inputs)
assert len(outputs.shape) == 3
assert outputs.shape[0] == 1
assert outputs.shape[1] == subsampled_seq_len
assert outputs.shape[2] == num_classes
|
11503149
|
import os, sys; sys.path.append(os.path.dirname(os.path.realpath(__file__)))
from GeoTableUtil import GeoTableUtil
|
11503163
|
import graphlayer as g
import graphlayer.sqlalchemy as gsql
from .. import database
from . import authors
Book = g.ObjectType("Book", fields=lambda: (
g.field("author", type=authors.Author),
g.field("title", type=g.String),
))
class BookQuery(object):
@staticmethod
def select(type_query):
return gsql.select(type_query)
@staticmethod
def select_by_author_ids(type_query, author_ids):
return BookQuery.select(type_query).by(database.Book.author_id, author_ids)
book_sql_resolver = gsql.sql_table_resolver(
Book,
database.Book,
fields=lambda: {
Book.fields.author: gsql.join(
key=database.Book.author_id,
resolve=lambda graph, field_query, ids: graph.resolve(
authors.AuthorQuery.select_by_id(field_query.type_query, ids=ids),
),
),
Book.fields.title: gsql.expression(database.Book.title),
},
)
resolvers = (
book_sql_resolver,
)
|
11503219
|
from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from rest_framework_swagger.views import get_swagger_view
from . import views, views_api
# Django REST framework
router = DefaultRouter()
router.register(r'election', views_api.ElectionInterface)
router.register(r'district', views_api.DistrictInterface)
router.register(r'municipality', views_api.MunicipalityInterface)
router.register(r'party', views_api.PartyInterface)
router.register(r'polling_station', views_api.PollingStationInterface)
router.register(r'list', views_api.ListInterface)
router.register(r'result', views_api.PollingStationResultInterface)
router.register(r'regional_electoral_district', views_api.RegionalElectoralDistrictInterface)
# Django OpenAPI Swagger
schema_view = get_swagger_view(title='Offene Wahlen API')
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^loaderio-eac9628bcae9be5601e1f3c62594d162.txt$', views.load_test, name='load_test'),
url(r'^api/', include(router.urls)),
url(r'^api/docs$', schema_view)
]
|
11503254
|
import logging
import asyncio
import aiohttp.web
import aiopg
import psycopg2.extras
import api_hour
from . import endpoints
LOG = logging.getLogger(__name__)
class Container(api_hour.Container):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.config is None:
raise ValueError('An API-Hour config dir is needed.')
## Servers
# You can define several servers, to listen HTTP and SSH for example.
# If you do that, you need to listen on two ports with api_hour --bind command line.
self.servers['http'] = aiohttp.web.Application(loop=kwargs['loop'])
self.servers['http']['ah_container'] = self # keep a reference to Container
# routes
self.servers['http'].router.add_route('GET',
'/index',
endpoints.index.index)
self.servers['http'].router.add_route('GET',
'/agents',
endpoints.benchmarks.agents)
def make_servers(self):
# This method is used by api_hour command line to bind each server on each socket
# Please don't touch if you don't understand how it works
return [self.servers['http'].make_handler(logger=self.worker.log,
debug=False,
keep_alive=self.worker.cfg.keepalive,
access_log=self.worker.log.access_log,
access_log_format=self.worker.cfg.access_log_format)]
@classmethod
def make_event_loop(cls, config):
"""To customize loop generation"""
if config['event_loop'] == 'aiouv':
LOG.info('Using aiouv event loop')
import aiouv
return aiouv.EventLoop()
else:
LOG.info('Using default AsyncIO event loop')
return asyncio.new_event_loop()
async def start(self):
await super().start()
LOG.info('Starting engines...')
# Add your custom engines here, example with PostgreSQL:
self.engines['pg'] = self.loop.create_task(aiopg.create_pool(host=self.config['engines']['pg']['host'],
port=int(self.config['engines']['pg']['port']),
sslmode='disable',
dbname=self.config['engines']['pg']['dbname'],
user=self.config['engines']['pg']['user'],
password=self.config['engines']['pg']['password'],
cursor_factory=psycopg2.extras.RealDictCursor,
minsize=int(self.config['engines']['pg']['minsize']),
maxsize=int(self.config['engines']['pg']['maxsize'])))
await asyncio.wait([self.engines['pg']], return_when=asyncio.ALL_COMPLETED)
LOG.info('All engines ready !')
async def stop(self):
LOG.info('Stopping engines...')
# Add your custom end here, example with PostgreSQL:
if 'pg' in self.engines:
if self.engines['pg'].done():
self.engines['pg'].result().terminate()
await self.engines['pg'].result().wait_closed()
else:
await self.engines['pg'].cancel()
LOG.info('All engines stopped !')
await super().stop()
|
11503256
|
import argparse
import random
import numpy as np
from sklearn.model_selection import train_test_split
import torch
import torch.nn as nn
from torch import optim
from torch.utils.data import Dataset, DataLoader
import dataLoader as loader
import preprocessing as pproc
import models
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
def argparser():
p = argparse.ArgumentParser()
p.add_argument('--filename',
default='Posts.xml')
p.add_argument('--clean_drop',
default=False,
help='Drop if either title or body column is NaN')
p.add_argument('--epochs',
type=int,
default=7,
help='Number of epochs to train. Default=7')
p.add_argument('--batch_size',
type=int,
default=2,
help='Mini batch size for gradient descent. Default=2')
p.add_argument('--learning_rate',
type=float,
default=.001,
help='Learning rate. Default=.001')
p.add_argument('--hidden_size',
type=int,
default=64,
help='Hidden size of LSTM. Default=64')
p.add_argument('--n_layers',
type=int,
default=1,
help='Number of layers. Default=1')
p.add_argument('--dropout_p',
type=float,
default=.1,
help='Dropout ratio. Default=.1')
config = p.parse_args()
return config
class QuestionAnswerDataset(Dataset):
# If the data that comes out of the pytorch dataset is unpadded (if samples are of
# different lengths), then pytorch dataloader returns a python list instead of
# pytorch tensor with samples truncated to minimum length of the sample in the batch.
def __init__(self, input, tokenizer, maxlen=32, negative_sampling=True):
self.input = input[input.posttypeid==1]
self.tokenizer = tokenizer
self.maxlen = maxlen
self.questions = np.array([self.indexesFromSentences(sentences) for sentences in self.input.title])
self.answers = np.array([self.indexesFromSentences(sentences) for sentences in self.input.body])
self.labels = np.ones(len(self.questions))
if negative_sampling:
self.n_questions, self.n_answers = self.negativeSampling(questions=self.questions,
answers=self.answers)
self.n_labels = np.zeros(len(self.n_questions))
self.questions = np.concatenate((self.questions, self.n_questions))
self.answers = np.concatenate((self.answers, self.n_answers))
self.labels = np.concatenate((self.labels, self.n_labels))
self.questions_len = np.array([np.count_nonzero(q) for q in self.questions])
self.answers_len = np.array([np.count_nonzero(a) for a in self.answers])
self.labels = self.labels.reshape(-1, 1)
def __getitem__(self, idx):
# |self.questions|, |self.answers| = (n_samples, maxlen)
# |self.questions_len|, |self.answers_len| = (n_samples, 1)
# |self.labels| = (n_samples, 1)
return self.questions[idx], self.answers[idx], self.questions_len[idx], self.answers_len[idx], self.labels[idx]
def __len__(self):
return len(self.questions)
def indexesFromSentences(self, sentences):
indexes = []
for sentence in sentences.splitlines():
sentence = tokenizer.normalizeString(sentence)
indexes += [self.tokenizer.word2index[word] for word in sentence.split(' ')]
padded_indexes = self.padSequences(indexes) # padding
return padded_indexes
def padSequences(self, indexes):
padded = np.zeros((self.maxlen,), dtype=np.int64)
if len(indexes) > self.maxlen:
padded = indexes[:self.maxlen]
else:
padded[:len(indexes)] = indexes
return padded
def negativeSampling(self, questions, answers):
indexes = list(range(len(questions)))
random.shuffle(indexes)
negative_questions = [questions[i] for i in indexes]
random.shuffle(indexes)
negative_answers = [answers[i] for i in indexes]
return np.array(negative_questions), np.array(negative_answers)
def sort_by_len(sequences, sequence_length):
sequence_length, si = sequence_length.sort(0, descending=True)
return sequences[si], sequence_length
def train_model(epoch):
model.train()
losses, accs = 0, 0
for i, data in enumerate(train_loader, 0):
optimizer.zero_grad()
qus, ans, qus_len, ans_len, labels = data
qus, ans, labels = qus.to(device), ans.to(device), labels.to(device=device, dtype=torch.float32)
# |qus|, |ans| = (batch_size, maxlen)
# |qus_len|, |ans_len| = (batch_size)
# |labels| = (batch_size, 1)
# sort by sequence length in descending order
qus, qus_len = sort_by_len(qus, qus_len)
ans, ans_len = sort_by_len(ans, ans_len)
# get loss
output = model(qus, ans, qus_len, ans_len)
loss = criterion(output, labels)
losses += loss.item()
# |output| = (batch_size, 1)
# |loss| = (1)
# get accuracy
acc = (torch.round(output) == labels).sum().item()/len(qus)
accs += acc
loss.backward()
optimizer.step()
# if i % 300 == 0:
# print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}\tAccuracy: {:.2f}%'.format(
# epoch, i * len(qus), len(train_loader.dataset), 100. * i / len(train_loader), loss.item(), 100*acc))
print('====> Train Epoch: {} Average loss: {:.4f}\tAverage accuracy: {:.2f}%'.format(
epoch, losses / len(train_loader), 100*accs/len(train_loader)))
def test_model():
model.eval()
losses, accs = 0, 0
with torch.no_grad():
for i, data in enumerate(test_loader, 0):
qus, ans, qus_len, ans_len, labels = data
qus, ans, labels = qus.to(device), ans.to(device), labels.to(device=device, dtype=torch.float32)
# sort by sequence length in descending order
qus, qus_len = sort_by_len(qus, qus_len)
ans, ans_len = sort_by_len(ans, ans_len)
# get loss
output = model(qus, ans, qus_len, ans_len)
loss = criterion(output, labels)
losses += loss.item()
# get accuracy
acc = (torch.round(output) == labels).sum().item()/len(qus)
accs += acc
print('====> Test Epoch: {} Average loss: {:.4f}\tAverage accuracy: {:.2f}%\n'.format(
epoch, losses / len(test_loader), 100*accs/len(test_loader)))
if __name__=='__main__':
config = argparser()
# data load
data = loader.to_dataframe('data/'+config.filename)
# preprocessing
data, word_emb_matirx, tfidf_matrix, tokenizer = pproc.preprocessing(input = data,
clean_drop = config.clean_drop)
# |data| = (n_pairs, n_columns) = (91,517, 5)
# |word_emb_matrix| = (tokenizer.n_words, 100)
# |tfidf_matrix| = (tokenizer.n_words, 1)
# build dataset & data loader
train, test = train_test_split(data, test_size=0.1)
qa_train = QuestionAnswerDataset(train, tokenizer, negative_sampling=True)
train_loader = DataLoader(dataset=qa_train, batch_size=config.batch_size, shuffle=True, num_workers=4)
qa_test = QuestionAnswerDataset(test, tokenizer, negative_sampling=True)
test_loader = DataLoader(dataset=qa_test, batch_size=len(qa_test), shuffle=False, num_workers=4)
print('Total batches - train: {}, test: {}'.format(len(train_loader), len(test_loader)))
# build model
model = models.Model(input_size = tokenizer.n_words,
embedding_size = 100,
hidden_size = config.hidden_size,
n_layers = config.n_layers,
dropout_p = config.dropout_p,
word_embedding_matrix = word_emb_matirx,
tfidf_matrix = tfidf_matrix
).to(device)
criterion = nn.BCELoss()
optimizer = optim.Adam(model.parameters(), lr=config.learning_rate)
print(model)
# train
for epoch in range(1, config.epochs+1):
train_model(epoch)
test_model()
# save model
torch.save(model.state_dict(), 'model.pth')
|
11503265
|
import datetime
import io
import logging
import unittest
from django.contrib.auth import get_user_model
from django.test import TestCase
from django.utils import timezone
class CreateUserMixin(TestCase):
def setUp(self):
super().setUp()
self.user = self.create_user()
def create_user(self, username="john", password="<PASSWORD>", **kwargs):
User = get_user_model()
user = User(
username=username,
last_login=timezone.now() - datetime.timedelta(seconds=3600),
**kwargs,
)
user.set_password(password)
user.save()
return user
@staticmethod
def get_user(user_id):
return get_user_model().objects.filter(pk=user_id).first()
class CaptureLogMixin(unittest.TestCase):
logger_name = "sesame"
def setUp(self):
super().setUp()
self.buffer = io.StringIO()
self.handler = logging.StreamHandler(self.buffer)
self.logger = logging.getLogger(self.logger_name)
self.logger.addHandler(self.handler)
self.logger.setLevel(logging.DEBUG)
@property
def logs(self):
self.handler.flush()
return self.buffer.getvalue()
def assertNoLogs(self):
self.assertEqual(self.logs, "")
def assertLogsContain(self, message):
self.assertIn(message, self.logs)
def tearDown(self):
self.logger.removeHandler(self.handler)
super().tearDown()
|
11503278
|
from __future__ import print_function
import argparse
import csv
import hashlib
import os
import pytsk3
import pyewf
import sys
from tqdm import tqdm
"""
MIT License
Copyright (c) 2017 <NAME>, <NAME>
Please share comments and questions at:
https://github.com/PythonForensics/PythonForensicsCookbook
or email <EMAIL>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
__authors__ = ["<NAME>", "<NAME>"]
__date__ = 20170815
__description__ = "Utility to hash files within an evidence containers"
def main(image, img_type, hashes, part_type, pbar_total=0):
hash_list, hash_type = read_hashes(hashes)
volume = None
print("[+] Opening {}".format(image))
if img_type == "ewf":
try:
filenames = pyewf.glob(image)
except IOError:
_, e, _ = sys.exc_info()
print("[-] Invalid EWF format:\n {}".format(e))
sys.exit(2)
ewf_handle = pyewf.handle()
ewf_handle.open(filenames)
# Open PYTSK3 handle on EWF Image
img_info = EWFImgInfo(ewf_handle)
else:
img_info = pytsk3.Img_Info(image)
try:
if part_type is not None:
attr_id = getattr(pytsk3, "TSK_VS_TYPE_" + part_type)
volume = pytsk3.Volume_Info(img_info, attr_id)
else:
volume = pytsk3.Volume_Info(img_info)
except IOError:
_, e, _ = sys.exc_info()
print("[-] Unable to read partition table:\n {}".format(e))
open_fs(volume, img_info, hash_list, hash_type, pbar_total)
def read_hashes(hashes):
hash_list = []
hash_type = None
with open(hashes) as infile:
for line in infile:
if hash_type is None:
if len(line.strip()) == 32:
hash_type = "md5"
elif len(line.strip()) == 40:
hash_type == "sha1"
elif len(line.strip()) == 64:
hash_type == "sha256"
hash_list.append(line.strip().lower())
if hash_type is None:
print("[-] No valid hashes identified in {}".format(hashes))
sys.exit(3)
return hash_list, hash_type
def open_fs(vol, img, hashes, hash_type, pbar_total=0):
# Open FS and Recurse
print("[+] Recursing through and hashing files")
pbar = tqdm(desc="Hashing", unit=" files",
unit_scale=True, total=pbar_total)
if vol is not None:
for part in vol:
if part.len > 2048 and "Unallocated" not in part.desc and \
"Extended" not in part.desc and \
"Primary Table" not in part.desc:
try:
fs = pytsk3.FS_Info(
img, offset=part.start * vol.info.block_size)
except IOError:
_, e, _ = sys.exc_info()
print("[-] Unable to open FS:\n {}".format(e))
root = fs.open_dir(path="/")
recurse_files(part.addr, fs, root, [], [""], hashes,
hash_type, pbar)
else:
try:
fs = pytsk3.FS_Info(img)
except IOError:
_, e, _ = sys.exc_info()
print("[-] Unable to open FS:\n {}".format(e))
root = fs.open_dir(path="/")
recurse_files(1, fs, root, [], [""], hashes, hash_type, pbar)
pbar.close()
def recurse_files(part, fs, root_dir, dirs, parent, hashes,
hash_type, pbar):
dirs.append(root_dir.info.fs_file.meta.addr)
for fs_object in root_dir:
# Skip ".", ".." or directory entries without a name.
if not hasattr(fs_object, "info") or \
not hasattr(fs_object.info, "name") or \
not hasattr(fs_object.info.name, "name") or \
fs_object.info.name.name in [".", ".."]:
continue
try:
file_path = "{}/{}".format("/".join(parent),
fs_object.info.name.name)
if getattr(fs_object.info.meta, "type", None) == \
pytsk3.TSK_FS_META_TYPE_DIR:
parent.append(fs_object.info.name.name)
sub_directory = fs_object.as_directory()
inode = fs_object.info.meta.addr
# This ensures that we don't recurse into a directory
# above the current level and thus avoid circular loops.
if inode not in dirs:
recurse_files(part, fs, sub_directory, dirs,
parent, hashes, hash_type, pbar)
parent.pop(-1)
else:
hash_file(fs_object, file_path, hashes, hash_type, pbar)
except IOError:
pass
dirs.pop(-1)
def hash_file(fs_object, path, hashes, hash_type, pbar):
if hash_type == "md5":
hash_obj = hashlib.md5()
elif hash_type == "sha1":
hash_obj = hashlib.sha1()
elif hash_type == "sha256":
hash_obj = hashlib.sha256()
f_size = getattr(fs_object.info.meta, "size", 0)
pbar.set_postfix(File_Size="{:.2f}MB".format(f_size / 1024.0 / 1024))
hash_obj.update(fs_object.read_random(0, f_size))
hash_digest = hash_obj.hexdigest()
pbar.update()
if hash_digest in hashes:
pbar.write("[*] MATCH: {}\n{}".format(path, hash_digest))
class EWFImgInfo(pytsk3.Img_Info):
def __init__(self, ewf_handle):
self._ewf_handle = ewf_handle
super(EWFImgInfo, self).__init__(
url="", type=pytsk3.TSK_IMG_TYPE_EXTERNAL)
def close(self):
self._ewf_handle.close()
def read(self, offset, size):
self._ewf_handle.seek(offset)
return self._ewf_handle.read(size)
def get_size(self):
return self._ewf_handle.get_media_size()
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description=__description__,
epilog="Developed by {} on {}".format(
", ".join(__authors__), __date__)
)
parser.add_argument("EVIDENCE_FILE", help="Evidence file path")
parser.add_argument("TYPE", help="Type of Evidence",
choices=("raw", "ewf"))
parser.add_argument("HASH_LIST",
help="Filepath to Newline-delimited list of "
"hashes (either MD5, SHA1, or SHA-256)")
parser.add_argument("-p", help="Partition Type",
choices=("DOS", "GPT", "MAC", "SUN"))
parser.add_argument("-t", type=int,
help="Total number of files, for the progress bar")
args = parser.parse_args()
if os.path.exists(args.EVIDENCE_FILE) and \
os.path.isfile(args.EVIDENCE_FILE) and \
os.path.exists(args.HASH_LIST) and \
os.path.isfile(args.HASH_LIST):
main(args.EVIDENCE_FILE, args.TYPE, args.HASH_LIST, args.p, args.t)
else:
print("[-] Supplied input file {} does not exist or is not a "
"file".format(args.EVIDENCE_FILE))
sys.exit(1)
|
11503289
|
from .base import ConstituencyParser
from ...tags import *
from ...data_manager import DataManager
class StanfordParser(ConstituencyParser):
"""
Constituency parser based on stanford parser.
:Requirements:
* java
"""
TAGS = { TAG_English }
def __init__(self):
self.__parser = DataManager.load("TProcess.StanfordParser")
def parse(self, sentence: str) -> str:
return str(list(self.__parser(sentence))[0])
|
11503382
|
from pharmpy.rxcui import RxCUIEngine
import unittest
import time
class RxCUIEngineTestCase(unittest.TestCase):
def test_cui(self):
rce = RxCUIEngine()
out = rce.get_rxcui("50090347201")
self.assertEqual(out, "665044")
out = rce.get_rxcui(["50090347201"])
self.assertEqual(out[0], "665044")
if __name__=="__main__":
unittest.main()
|
11503395
|
import numpy as np
import matplotlib.pyplot as plt
from hyperion.model import ModelOutput
from hyperion.util.constants import pc
# Read in the model
m = ModelOutput('quantity_cartesian.rtout')
# Extract the quantities
g = m.get_quantities()
# Get the wall positions in pc
xw, yw = g.x_wall / pc, g.y_wall / pc
# Make a 2-d grid of the wall positions (used by pcoloarmesh)
X, Y = np.meshgrid(xw, yw)
# Calculate the density-weighted temperature
weighted_temperature = (np.sum(g['temperature'][0].array
* g['density'][0].array, axis=2)
/ np.sum(g['density'][0].array, axis=2))
# Make the plot
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
c = ax.pcolormesh(X, Y, weighted_temperature)
ax.set_xlim(xw[0], xw[-1])
ax.set_xlim(yw[0], yw[-1])
ax.set_xlabel('x (pc)')
ax.set_ylabel('y (pc)')
cb = fig.colorbar(c)
cb.set_label('Temperature (K)')
fig.savefig('weighted_temperature_cartesian.png', bbox_inches='tight')
# show image
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
c = ax.pcolormesh(X, Y, g['temperature'][0].array[:, 49, :])
ax.set_xlim(xw[0], xw[-1])
ax.set_xlim(yw[0], yw[-1])
ax.set_xlabel('x (pc)')
ax.set_ylabel('y (pc)')
cb = fig.colorbar(c)
cb.set_label('Temperature (K)')
fig.savefig('sliced_temperature_cartesian.png', bbox_inches='tight')
|
11503404
|
import numpy as np
import json
from sklearn.preprocessing import KBinsDiscretizer
from sklearn.tree import DecisionTreeClassifier
from sklearn.model_selection import train_test_split
class DecisionTreeDiscretizer():
def __init__(self, n_bins):
self.n_bins = n_bins
def fit_transform(self, X, y):
X_train, _, y_train, _ = train_test_split(X, y, test_size=0.2, shuffle=True)
clf = DecisionTreeClassifier(max_leaf_nodes=self.n_bins)
clf.fit(X_train, y_train)
pred = clf.predict(X)
return pred
def discretizer(df, configgers):
"""
Parameters
----------
df: pd.DataFrame, the input DataFrame.
configgers: list of namedtuple, the config setting of encoding continuous variables like namedtuple("config",["encode_col","method","n_bins"])
encode_col: list,the column names of the columns need discretize
method: str,must in ["isometric", "quantile", "KMeans", "trees"]
if method choose tree, the field "target_col" can't be None.
n_bins: int
target_col: str, the target col's name
Returns
df_t: pd.DataFrame, the result DataFrame, the new feature column named like '"_".join(encode_col + [method, "discretize"])'
-------
"""
df_t = df
configgers = json.loads(configgers)
for encode_col in configgers.keys:
method = configgers[encode_col]["method"]
n_bins = configgers[encode_col]["n_bins"]
if method == "isometric":
discretizer = KBinsDiscretizer(n_bins=n_bins, encode="ordinal", strategy="uniform")
elif method == "quantile":
discretizer = KBinsDiscretizer(n_bins=n_bins, encode="ordinal", strategy="quantile")
elif method == "KMeans":
discretizer = KBinsDiscretizer(n_bins=n_bins, encode="ordinal", strategy="kmeans")
elif method == "trees":
discretizer = DecisionTreeDiscretizer(n_bins=n_bins)
else:
raise ValueError(
"""
The method value {func} is not be support for discretizer.
It must be in ["isometric", "quantile", "KMeans", "trees"]""".format(
func=method))
if method == "trees":
target_col = configgers[encode_col]["target_col"]
res = discretizer.fit_transform(X=df[encode_col], y=df[target_col])
else:
res = discretizer.fit_transform(X=df[encode_col])
df_t.loc[:, "_".join(encode_col + [method, "discretize"])] = res
return df_t
|
11503427
|
from contentbase.auditor import (
AuditFailure,
audit_checker,
)
from contentbase import simple_path_ids
from contentbase.schema_utils import validate
@audit_checker('item', frame='object')
def audit_item_schema(value, system):
context = system['context']
registry = system['registry']
if not context.schema:
return
properties = context.properties.copy()
current_version = properties.get('schema_version', '')
target_version = context.type_info.schema_version
if target_version is not None and current_version != target_version:
migrator = registry['migrator']
try:
properties = migrator.upgrade(
context.item_type, properties, current_version, target_version,
finalize=False, context=context, registry=registry)
except RuntimeError:
raise
except Exception as e:
detail = '%r upgrading from %r to %r' % (e, current_version, target_version)
yield AuditFailure('upgrade failure', detail, level='DCC_ACTION')
return
properties['schema_version'] = target_version
properties['uuid'] = str(context.uuid)
validated, errors = validate(context.schema, properties, properties)
for error in errors:
category = 'validation error'
path = list(error.path)
if path:
category += ': ' + '/'.join(path)
detail = 'Object {} has schema error {}'.format(value['@id'], error.message)
yield AuditFailure(category, detail, level='DCC_ACTION')
STATUS_LEVEL = {
# standard_status
'released': 100,
'deleted': 0,
'replaced': 0,
# shared_status
'current': 100,
'disabled': 0,
# file
'obsolete': 50,
# antibody_characterization
'compliant': 100,
'not compliant': 100,
'not reviewed': 100,
'not submitted for review by lab': 100,
# antibody_lot
'eligible for new data': 100,
'not eligible for new data': 100,
'not pursued': 100,
# dataset / experiment
'release ready': 50,
'revoked': 100,
# publication
'published': 100,
}
@audit_checker('item', frame='object')
def audit_item_status(value, system):
if 'status' not in value:
return
level = STATUS_LEVEL.get(value['status'], 50)
if level == 0:
return
context = system['context']
request = system['request']
linked = set()
for schema_path in context.type_info.schema_links:
if schema_path in ['supercedes', 'step_run']:
continue
linked.update(simple_path_ids(value, schema_path))
for path in linked:
linked_value = request.embed(path + '@@object')
if 'status' not in linked_value:
continue
if linked_value['status'] == 'disabled':
continue
if ( # Special case: A revoked file can have a deleted replicate ticket #2938
'file' in value['@type'] and
value['status'] == 'revoked' and
'replicate' in linked_value['@type'] and
linked_value['status'] == 'deleted'
):
continue
linked_level = STATUS_LEVEL.get(linked_value['status'], 50)
if linked_level == 0:
detail = '{} {} has {} subobject {}'.format(
value['status'], value['@id'], linked_value['status'], linked_value['@id'])
yield AuditFailure('mismatched status', detail, level='ERROR')
elif linked_level < level:
detail = '{} {} has {} subobject {}'.format(
value['status'], value['@id'], linked_value['status'], linked_value['@id'])
yield AuditFailure('mismatched status', detail, level='DCC_ACTION')
|
11503438
|
from pandaharvester.harvestercore import core_utils
from pandaharvester.harvestercore.plugin_base import PluginBase
from pandaharvester.harvestercloud.googlecloud import compute, ZONE, PROJECT
from pandaharvester.harvestercore.queue_config_mapper import QueueConfigMapper
import googleapiclient
base_logger = core_utils.setup_logger('google_sweeper')
class GoogleSweeper(PluginBase):
"""
Sweeper with kill/clean-up functions for Google Compute Engine
"""
def __init__(self, **kwarg):
PluginBase.__init__(self, **kwarg)
self.queue_config_mapper = QueueConfigMapper()
def kill_worker(self, work_spec):
"""
Sends the command to Google to destroy a VM
:param work_spec: worker specification
:type work_spec: WorkSpec
:return: A tuple of return code (True for success, False otherwise) and error dialog
:rtype: (bool, string)
"""
try:
vm_name = work_spec.batchID
queue_config = self.queue_config_mapper.get_queue(work_spec.computingSite)
try:
zone = queue_config.zone
except AttributeError:
zone = ZONE
base_logger.debug('Going to kill VM {0}'.format(vm_name))
compute.instances().delete(project=PROJECT, zone=zone, instance=vm_name).execute()
base_logger.debug('Killed VM {0}'.format(vm_name))
return True, ''
except googleapiclient.errors.HttpError as e:
if 'was not found' in e.content:
# the VM was already killed or does not exist for any other reason
message = 'VM does not exist'.format(vm_name)
base_logger.debug(message)
return True, message
else:
# there was an issue killing the VM and it should be retried at another time
return False, 'Problems killing the VM: {0}'.format(e)
except Exception as e:
return False, 'Problems killing the VM: {0}'.format(e)
def sweep_worker(self, work_spec):
"""
In the cloud, cleaning means destroying a VM
:param work_spec: worker specification
:type work_spec: WorkSpec
:return: A tuple of return code (True for success, False otherwise) and error dialog
:rtype: (bool, string)
"""
return self.kill_worker(work_spec)
|
11503450
|
if exists("1579792079784.png"):
click(Pattern("1579792079784.png").targetOffset(4,15))
sleep(1)
type(Key.F4, KeyModifier.ALT)
exit(0)
if exists("1596467122234.png"):
click(Pattern("1596467122234.png").targetOffset(1,13))
sleep(1)
type(Key.F4, KeyModifier.ALT)
exit(0)
exit(0)
|
11503455
|
from app import db
class BuyerEmailDomain(db.Model):
__tablename__ = 'buyer_email_domains'
id = db.Column(db.Integer, primary_key=True)
domain_name = db.Column(db.String(), nullable=False, unique=True)
def serialize(self):
return {
"id": self.id,
"domainName": self.domain_name,
}
|
11503456
|
class Solution(object):
def longest_substr(self, string, k):
if string is None:
raise TypeError('string cannot be None')
if k is None:
raise TypeError('k cannot be None')
low_index = 0
max_length = 0
chars_to_index_map = {}
for index, char in enumerate(string):
chars_to_index_map[char] = index
if len(chars_to_index_map) > k:
low_index = min(chars_to_index_map.values())
del chars_to_index_map[string[low_index]]
low_index += 1
max_length = max(max_length, index - low_index + 1)
return max_length
|
11503502
|
from __future__ import division
'''
This function converts memory mem string to integer
'''
def mem_common_form(mem, **kwargs):
if mem.endswith('M'):
mem = mem[:-1]
mem = int(mem)
mem = mem * 1024
elif mem.endswith('G'):
mem = mem[:-1]
mem = int(mem)
mem = mem * 1024*1024
else:
mem = mem[:-1]
mem = int(mem)
return mem
|
11503561
|
from contracts.main import new_contract
__all__ = []
try:
import numpy # @UnusedImport
except ImportError: # pragma: no cover
new_contract('float', 'Float')
new_contract('int', 'Int')
new_contract('number', 'float|int')
else:
new_contract('float', 'Float|np_scalar_float|(np_scalar, array(float))')
new_contract('int', 'Int|np_scalar_int|(np_scalar,array(int))')
new_contract('uint', 'np_scalar_uint|(np_scalar, array(uint))')
new_contract('number', 'float|int|uint')
|
11503580
|
from rest_framework.reverse import reverse
from rest_framework.status import (
HTTP_200_OK, HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND)
from allegation.factories import (
OfficerFactory, PoliceWitnessFactory, OfficerAllegationFactory)
from common.tests.core import SimpleTestCase
class MobileOfficerViewTest(SimpleTestCase):
def call_related_officer_api(self, params={}):
response = self.client.get(reverse('mobile:mobile-officer'), params)
data = self.json(response)
return response, data
def test_officer_detail_when_successfully_call_the_api(self):
officer = OfficerFactory()
co_accused_officer = OfficerFactory()
witness_officer = OfficerFactory()
officer_allegation = OfficerAllegationFactory(officer=officer)
PoliceWitnessFactory(crid=officer_allegation.allegation.crid, officer=witness_officer,
allegation=officer_allegation.allegation)
OfficerAllegationFactory(allegation=officer_allegation.allegation, officer=co_accused_officer)
response, data = self.call_related_officer_api({'pk': officer.pk})
response.status_code.should.equal(HTTP_200_OK)
detail = data['detail']
complaints = data['complaints']
detail['id'].should.be.equal(officer.id)
detail['appt_date'].should.be.equal(officer.appt_date)
detail['unit'].should.be.equal(officer.unit.unit_name)
detail['gender'].should.be.equal(officer.gender)
detail['rank'].should.be.equal(officer.rank)
detail['race'].should.be.equal(officer.race)
detail['officer_first'].should.be.equal(officer.officer_first)
detail['officer_last'].should.be.equal(officer.officer_last)
len(complaints).should.be(1)
complaints[0]['crid'].should.be.equal(str(officer_allegation.allegation.crid))
len(complaints[0]['officer_allegation_set']).should.be.equal(2)
len(data['co_accused']).should.be.equal(1)
data.should.contain('distribution')
def test_return_404_when_get_invalid_pk(self):
invalid_pk = -1
response, data = self.call_related_officer_api({'pk': invalid_pk})
response.status_code.should.equal(HTTP_404_NOT_FOUND)
def test_return_400_when_get_bad_pk(self):
bad_pk = 'xyz'
response, data = self.call_related_officer_api({'pk': bad_pk})
response.status_code.should.equal(HTTP_400_BAD_REQUEST)
|
11503624
|
import matplotlib.pyplot as plt
from oneibl.one import ONE
import brainbox.plot as bbp
one = ONE()
eid = one.search(lab='wittenlab', date='2019-08-04')[0]
probe_label = 'probe00'
spikes = one.load_object(eid, 'spikes', collection=f'alf/{probe_label}')
trials = one.load_object(eid, 'trials', collection='alf')
# For a simple peth plot without a raster, all we need to input is spike times, clusters, event
# times, and the identity of the cluster we want to plot, e.g. in this case cluster 121
ax = bbp.peri_event_time_histogram(spikes.times, spikes.clusters, trials.goCue_times, 121)
# Or we can include a raster plot below the PETH:
fig = plt.figure()
ax = plt.gca()
bbp.peri_event_time_histogram(spikes.times, # Spike times first
spikes.clusters, # Then cluster ids
trials.goCue_times, # Event markers we want to plot against
121, # Identity of the cluster we plot
t_before=0.4, t_after=0.4, # Time before and after the event
error_bars='sem', # Whether we want Stdev, SEM, or no error
include_raster=True, # adds a raster to the bottom
n_rasters=55, # How many raster traces to include
ax=ax) # Make sure we plot to the axis we created
|
11503635
|
import os
from math import ceil
import _path_cheesecake
from _helper_cheesecake import DATA_PATH
from cheesecake.cheesecake_index import Cheesecake, CodeParser
class TestIndexDocstrings(object):
def setUp(self):
self.cheesecake = Cheesecake(path=os.path.join(DATA_PATH, "package2.tar.gz"))
modules = 5
classes = 2
functions = 4
methods = 3
self.documentable_objects = modules + classes + functions + methods
self.docstring_count = 7
self.index_float = float(self.docstring_count) / self.documentable_objects
self.index_int = int(ceil(self.index_float*100))
def tearDown(self):
self.cheesecake.cleanup()
def test_index_docstrings(self):
index = self.cheesecake.index["DOCUMENTATION"]["IndexDocstrings"]
index.compute_with(self.cheesecake)
assert index.name == "IndexDocstrings"
assert index.value == self.index_int
assert index.details == "found %d/%d=%.2f%% objects with docstrings" %\
(self.docstring_count, self.documentable_objects, self.index_float*100)
|
11503638
|
from .abasearchitecture import ABaseArchitecture
from .dcgan import DcganEncoder, DcganDecoder, DcganAutoEncoder
from fusion.utils import ObjectProvider
architecture_provider = ObjectProvider()
architecture_provider.register_object('DcganEncoder', DcganEncoder)
architecture_provider.register_object('DcganDecoder', DcganDecoder)
architecture_provider.register_object('DcganAutoEncoder', DcganAutoEncoder)
__all__ = [
'ABaseArchitecture',
'architecture_provider',
]
|
11503639
|
import json
import numpy as np
import argparse
from pathlib import Path
import logging
from logging.config import fileConfig
import cv2
from typing import List, Tuple
import deeptennis.utils as utils
def mask_image(img: np.ndarray, pts: np.ndarray, dilate=False) -> np.ndarray:
"""
Zero out the irrelevant part of the court and do edge detection.
TODO: separate the crop and the edge detection
:param img: original RGB image
:param pts: vertices of the cropping area. Pixels outside this polygon will be zeroed.
:param dilate: dilate image before
:return: cropped/edged image
"""
gray = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
cannyed_image = cv2.Canny(gray, 50, 200)
if dilate:
kernel = cv2.getStructuringElement(cv2.MORPH_CROSS, (2, 2))
cannyed_image = cv2.dilate(cannyed_image, kernel=kernel, iterations=2)
mask = np.zeros(img.shape[:2], dtype=np.int32)
match_mask_color = 255
cv2.fillPoly(mask, [pts], match_mask_color)
return cv2.bitwise_and(cannyed_image.astype(np.int32), mask)
def get_lines(lines: np.ndarray,
min_vert_len: int = 100,
min_horiz_len: int = 100,
min_vert_slope: float = 1.5,
max_horiz_slope: float = 0.01) -> Tuple[np.ndarray, np.ndarray]:
horizontal_lines = []
vertical_lines = []
for line in lines:
distance = np.sqrt((line[0] - line[2])**2 + (line[1] - line[3])**2)
slope = (line[1] - line[3]) / ((line[0] - line[2]) + 1e-6)
if distance > min_horiz_len and abs(slope) < max_horiz_slope:
horizontal_lines.append(line)
elif distance > min_vert_len and abs(slope) > min_vert_slope:
vertical_lines.append(line)
horizontal_lines = np.array(horizontal_lines)
vertical_lines = np.array(vertical_lines)
return horizontal_lines, vertical_lines
def slope(x1: float, y1: float, x2: float, y2: float) -> float:
if x2 == x1:
return 1000000.
else:
return (y2 - y1) / (x2 - x1)
def get_baseline_vertical(horizontal_lines: np.ndarray,
min_separation: int = 30,
max_separation: int = 100) -> Tuple[np.ndarray, np.ndarray]:
if len(horizontal_lines) == 0:
return 0, 0
base = max([l[1] for l in horizontal_lines])
no_base = [l[1] for l in horizontal_lines if abs(l[1] - base) > min_separation \
and abs(l[1] - base) <= max_separation]
if len(no_base) == 0:
return base, 0
serve = max(no_base)
return base, serve
def get_sidelines(vertical_lines: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
if len(vertical_lines) == 0:
return np.zeros(4), np.zeros(4)
intercepts = [l[0] + (360 - l[1]) / slope(*l) for l in vertical_lines]
sorted_intercepts = np.argsort(intercepts)[::-1]
# right sideline should have positive slope, left negative
sorted_intercepts_pos = [i for i in sorted_intercepts if slope(*vertical_lines[i]) > 0]
sorted_intercepts_neg = [i for i in sorted_intercepts if slope(*vertical_lines[i]) <= 0]
if len(sorted_intercepts_neg) == 0 or len(sorted_intercepts_pos) == 0:
return np.zeros(4), np.zeros(4)
max_intercept = intercepts[sorted_intercepts_pos[0]]
min_intercept = intercepts[sorted_intercepts_neg[-1]]
# group similar lines
# TODO: their slopes should be similar too
right_candidates = [i for i in sorted_intercepts_pos if max_intercept - intercepts[i] < 20]
left_candidates = [i for i in sorted_intercepts_neg[::-1] if intercepts[i] - min_intercept < 20]
# take median of candidates
right = vertical_lines[right_candidates[len(right_candidates) // 2]]
left = vertical_lines[left_candidates[len(left_candidates) // 2]]
return right, left
def get_keypoints_horizontal(y_base, y_serve, left_sideline, right_sideline):
x_base_left = left_sideline[0] + (y_base - left_sideline[1]) / slope(*left_sideline)
x_base_right = right_sideline[0] + (y_base - right_sideline[1]) / slope(*right_sideline)
x_serve_left = left_sideline[0] + (y_serve - left_sideline[1]) / slope(*left_sideline)
x_serve_right = right_sideline[0] + (y_serve - right_sideline[1]) / slope(*right_sideline)
return x_base_left, x_base_right, x_serve_right, x_serve_left
def get_top_corner(x_base: float,
y_base: float,
x_serve: float,
y_serve: float,
x_base_opp: float) -> Tuple[float, float]:
w = abs(x_base - x_base_opp)
m = 1 * slope(x_base, y_base, x_serve, y_serve)
x4 = x_base + (y_serve - y_base) * 1 / m
x3 = x_base_opp - (y_serve - y_base) * 1 / m
Vy = y_base - w / 2 * -m
Vx = x_base + w / 2
AV = np.sqrt((x_base - Vx)**2 + (y_base - Vy)**2)
AB = np.sqrt((x_base - x_serve)**2 + (y_base - y_serve)**2)
C = 78. / 60 # dimensions of a real tennis court
l = (AV - AB) * AB / (AV * C - (AV - AB))
theta = np.arctan(-1 / m)
x_top = x_serve + l * np.sin(theta)
y_top = y_serve - l * np.cos(theta)
return x_top, y_top
def get_court_for_frame(frame: np.ndarray,
court_crop_x: List[float],
court_crop_y: List[float],
min_horiz_line_dist: int,
min_vert_line_dist: int,
min_vert_slope: float,
max_horiz_slope: float,
max_baseline_offset: float,
dilate_edges: bool = False) -> List[float]:
im_h, im_w = frame.shape[:2]
crop_points = np.array([court_crop_x, court_crop_y], dtype=np.int32).T
masked_image = mask_image(frame.astype(np.uint8), crop_points, dilate=dilate_edges)
lines = cv2.HoughLinesP(
masked_image.astype(np.uint8),
rho=6,
theta=np.pi / 60,
threshold=160,
lines=np.array([]),
minLineLength=40,
maxLineGap=25
)
court = [0] * 8
if lines is None:
return court
lines = lines[:, 0, :]
horizontal_lines, vertical_lines = get_lines(lines,
min_horiz_len=min_horiz_line_dist,
min_vert_len=min_vert_line_dist,
min_vert_slope=min_vert_slope,
max_horiz_slope=max_horiz_slope)
if len(horizontal_lines) < 2 or len(vertical_lines) < 2:
return court
y_base, y_serve = get_baseline_vertical(horizontal_lines, min_separation=30)
if im_h - y_base > max_baseline_offset or y_base == 0 or y_serve == 0:
return court
right_sideline, left_sideline = get_sidelines(vertical_lines)
if not right_sideline.any() or not left_sideline.any():
return court
x1, x2, x3, x4 = get_keypoints_horizontal(y_base, y_serve, left_sideline, right_sideline)
y1, y2, y3, y4 = y_base, y_base, y_serve, y_serve
x6, y6 = get_top_corner(x1, y1, x4, y4, x2)
x5, y5 = get_top_corner(x2, y2, x3, y3, x1)
return [float(x) for x in [x1, y1, x2, y2, x5, y5, x6, y6]]
def get_court_keypoints(frames: List[Path],
mask: np.ndarray,
court_crop_x: List[float],
court_crop_y: List[float],
min_horiz_line_dist: int,
min_vert_line_dist: int,
min_vert_slope: float,
max_horiz_slope: float,
max_baseline_offset: float,
dilate_edges: bool = False) -> List[List[float]]:
court_boxes = []
num_invalid = 0
for i, frame in enumerate(frames):
img = cv2.imread(str(frame))
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
court = get_court_for_frame(img, court_crop_x, court_crop_y, min_horiz_line_dist,
min_vert_line_dist, min_vert_slope, max_horiz_slope,
max_baseline_offset, dilate_edges)
court_boxes.append(court)
logging.debug(f"{num_invalid}/{np.sum(mask)} were invalid.")
return court_boxes
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--mask-path", type=str)
parser.add_argument("--frames-path", type=str)
parser.add_argument("--save-path", type=str, default=None)
parser.add_argument("--meta-file", type=str, default=None)
args = parser.parse_args()
fileConfig('logging_config.ini')
with open(args.meta_file, 'r') as f:
match_metas = json.load(f)
mask_path = Path(args.mask_path)
frames_path = Path(args.frames_path)
save_path = Path(args.save_path)
if not save_path.parent.exists():
save_path.parent.mkdir()
mask = utils.read_json_lines(mask_path)
mask = np.array([x['action'] for x in mask])
match_name = frames_path.stem
match_meta = match_metas[match_name]
frame_list = list(sorted(frames_path.iterdir()))
court_boxes: List[List[float]] = get_court_keypoints(frame_list, mask, match_meta['court_crop']['x'],
match_meta['court_crop']['y'],
match_meta['min_horiz_line_dist'],
match_meta['min_vert_line_dist'],
match_meta['min_vert_slope'],
match_meta['max_horiz_slope'],
match_meta['max_baseline_offset'],
match_meta['dilate_edges'])
json_lines = []
for f, coords in zip(frame_list, court_boxes):
json_lines.append({'filename': str(f), 'court': coords})
utils.write_json_lines(json_lines, save_path)
|
11503679
|
import os
from typing import Optional
LOG_FILENAME: Optional[str] = os.getenv("ROBOCORP_CODE_DAP_LOG_FILENAME", None)
# Make sure that the log level is an int.
try:
LOG_LEVEL = int(os.getenv("ROBOCORP_CODE_DAP_LOG_LEVEL", "0"))
except:
LOG_LEVEL = 3
DEBUG = LOG_LEVEL > 1
TERMINAL_NONE = "none"
TERMINAL_INTEGRATED = "integrated"
TERMINAL_EXTERNAL = "external"
VALID_TERMINAL_OPTIONS = [TERMINAL_NONE, TERMINAL_INTEGRATED, TERMINAL_EXTERNAL]
MAIN_THREAD_ID = 1
|
11503691
|
from django.apps import AppConfig
class FollowSystemConfig(AppConfig):
name = "apps.follow_system"
label = "follow_system"
|
11503722
|
import argparse
import asyncio
import logging
import os
import ssl
from .admin_server import AdminServer
from .gateway_server import GatewayServer
async def _main():
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument('--host', default='127.0.0.1', help='address to listen on')
parser.add_argument('--port', default=9000, type=int, help='port to listen on')
parser.add_argument(
'--certificate', metavar='PATH', help='path to the SSL certificate. Enables SSL'
)
parser.add_argument('--private-key', metavar='PATH')
parser.add_argument(
'--ca',
metavar='PATH',
help='path to the CA certificate. Enables SSL client auth',
)
parser.add_argument(
'--token-auth',
action='store_true',
help='enables token based auth using the token from the TABBY_AUTH_TOKEN env var',
)
parser.add_argument(
'--no-auth',
action='store_true',
help='disables authentication completely. WARNING: this means your gateway is free-for-all!',
)
parser.add_argument(
'--admin-host',
default='127.0.0.1',
help='address to listen on for management requests',
)
parser.add_argument(
'--admin-port',
type=int,
help='port to listen on for management requests',
)
parser.add_argument(
'--admin-certificate',
metavar='PATH',
help='path to the SSL certificate for the management server',
)
parser.add_argument('--admin-private-key', metavar='PATH')
parser.add_argument(
'--admin-ca',
metavar='PATH',
help='path to the CA certificate for the management server',
)
args = parser.parse_args()
if args.certificate and not args.private_key:
parser.error('--private-key is required if --certificate is set')
if args.token_auth and not os.getenv('TABBY_AUTH_TOKEN'):
parser.error('TABBY_AUTH_TOKEN must be provided when using --token-auth')
if not args.token_auth and not args.no_auth and not args.admin_port:
parser.error('either --no-auth, --token-auth or --admin-port must be provided')
if args.admin_port:
if (
not args.admin_ca or
not args.admin_certificate or
not args.admin_private_key
):
parser.error(
'--admin-ca, --admin-certificate and --admin-private-key are all required when --admin-port is set'
)
logging.basicConfig(level=logging.INFO)
ssl_context = None
if args.certificate:
ssl_context = ssl.create_default_context(
purpose=ssl.Purpose.CLIENT_AUTH,
cafile=os.path.realpath(args.ca) if args.ca else None,
)
ssl_context.load_cert_chain(
os.path.realpath(args.certificate),
os.path.realpath(args.private_key),
)
if args.ca:
ssl_context.verify_mode = ssl.CERT_REQUIRED
admin_ssl_context = None
if args.admin_ca:
admin_ssl_context = ssl.create_default_context(
purpose=ssl.Purpose.CLIENT_AUTH,
cafile=os.path.realpath(args.admin_ca),
)
admin_ssl_context.load_cert_chain(
os.path.realpath(args.admin_certificate),
os.path.realpath(args.admin_private_key),
)
admin_ssl_context.verify_mode = ssl.CERT_REQUIRED
await GatewayServer(
host=args.host,
port=args.port,
ssl=ssl_context,
auth_token=os.getenv('TABBY_AUTH_TOKEN'),
disable_auth=args.no_auth,
).start()
if args.admin_port:
await AdminServer(
host=args.admin_host,
port=args.admin_port,
ssl=admin_ssl_context,
).start()
def main():
asyncio.get_event_loop().run_until_complete(_main())
try:
asyncio.get_event_loop().run_forever()
except KeyboardInterrupt:
os._exit(0)
if __name__ == '__main__':
main()
|
11503781
|
import re
from tps import modules as md
from tps.symbols import valid_symbols_map
from tps.utils import cleaners
class Lower(md.Processor):
def process(self, string: str, **kwargs) -> str:
return string.lower()
class Cleaner(md.Processor):
def __init__(self, charset):
super().__init__()
self.charset = charset
self._invalid_charset = re.compile(
"[^{}]".format(
"".join(sorted(set(valid_symbols_map[self.charset])))
)
)
def process(self, string: str, **kwargs) -> str:
string = cleaners.invalid_charset_cleaner(string, self._invalid_charset)
string = cleaners.collapse_whitespace(string) # need to clean multiple white spaces that have appeared
return string
|
11503802
|
import logging
import jwt
import importlib
from typing import List, Iterable
from abc import ABC, ABCMeta, abstractmethod
from aiohttp import web, hdrs
from datetime import datetime, timedelta
from asyncdb.utils.models import Model
from cryptography import fernet
import base64
from navigator.conf import (
NAV_AUTH_USER,
NAV_AUTH_GROUP,
JWT_ALGORITHM,
USER_MAPPING,
SESSION_TIMEOUT,
CREDENTIALS_REQUIRED,
SESSION_KEY,
SECRET_KEY
)
from navigator.exceptions import (
NavException,
UserDoesntExists,
InvalidAuth
)
from navigator.functions import json_response
from aiohttp.web_urldispatcher import SystemRoute
JWT_EXP_DELTA_SECONDS = int(SESSION_TIMEOUT)
exclude_list = (
"/static/",
"/api/v1/login",
# "/api/v1/logout",
"/login",
# "/logout",
"/signin",
"/signout",
"/_debug/",
)
class BaseAuthBackend(ABC):
"""Abstract Base for Authentication."""
_session = None
user_model: Model = None
group_model: Model = None
user_property: str = "user"
user_attribute: str = "user"
password_attribute: str = "password"
userid_attribute: str = "user_id"
username_attribute: str = "username"
user_mapping: dict = {"user_id": "id", "username": "username"}
session_key_property: str = SESSION_KEY
credentials_required: bool = False
scheme: str = "Bearer"
_authz_backends: List = []
user_mapping: dict = {}
def __init__(
self,
user_property: str = "user",
user_attribute: str = "user",
userid_attribute: str = "user_id",
username_attribute: str = "username",
credentials_required: bool = False,
authorization_backends: tuple = (),
**kwargs,
):
# force using of credentials
self.credentials_required = credentials_required
self.user_property = user_property
self.user_attribute = user_attribute
self.userid_attribute = userid_attribute
self.username_attribute = username_attribute
# authentication scheme
self._scheme = kwargs["scheme"]
# configuration Authorization Backends:
self._authz_backends = authorization_backends
# user and group models
# getting User and Group Models
self.user_model = self.get_model(NAV_AUTH_USER)
self.group_model = self.get_model(NAV_AUTH_GROUP)
self.user_mapping = USER_MAPPING
if not SECRET_KEY:
fernet_key = fernet.Fernet.generate_key()
self.secret_key = base64.urlsafe_b64decode(fernet_key)
else:
self.secret_key = SECRET_KEY
def get_model(self, model, **kwargs):
try:
parts = model.split(".")
name = parts[-1]
classpath = ".".join(parts[:-1])
module = importlib.import_module(classpath, package=name)
obj = getattr(module, name)
return obj
except ImportError:
raise Exception(f"Error loading Auth Model {model}")
async def get_user(self, **search):
"""Getting User Object."""
# TODO: getting Groups based on User
try:
user = await self.user_model.get(**search)
except Exception as err:
logging.error(f"Error getting User {search!s}")
raise Exception(err)
# if not exists, return error of missing
if not user:
raise UserDoesntExists(f"User doesnt exists")
return user
def get_userdata(self, user):
userdata = {}
for name, item in self.user_mapping.items():
if name != self.password_attribute:
userdata[name] = user[item]
return userdata
def configure(self, app, router):
"""Base configuration for Auth Backends, need to be extended
to create Session Object."""
pass
async def authorization_backends(self, app, handler, request):
if isinstance(request.match_info.route, SystemRoute): # eg. 404
return await handler(request)
# avoid authorization on exclude list
if request.path in exclude_list:
return handler(request)
# avoid authorization backend on excluded methods:
if request.method == hdrs.METH_OPTIONS:
return handler(request)
# logic for authorization backends
for backend in self._authz_backends:
if await backend.check_authorization(request):
return handler(request)
return None
def create_jwt(
self,
issuer: str = None,
expiration: int = None,
data: dict = None
) -> str:
"""Creation of JWT tokens based on basic parameters.
issuer: for default, urn:Navigator
expiration: in seconds
**kwargs: data to put in payload
"""
if not expiration:
expiration = JWT_EXP_DELTA_SECONDS
if not issuer:
issuer = "urn:Navigator"
payload = {
"exp": datetime.utcnow() + timedelta(seconds=expiration),
"iat": datetime.utcnow(),
"iss": issuer,
**data,
}
jwt_token = jwt.encode(
payload,
self.secret_key,
JWT_ALGORITHM,
)
return jwt_token
def decode_token(self, request, issuer: str = None):
jwt_token = None
tenant = None
id = None
payload = None
if not issuer:
issuer = "urn:Navigator"
if "Authorization" in request.headers:
try:
scheme, id = (
request.headers.get("Authorization").strip().split(" ", 1)
)
except ValueError:
raise NavException(
"Invalid authorization Header",
state=400
)
if scheme != self._scheme:
raise NavException(
"Invalid Authorization Scheme",
state=400
)
try:
tenant, jwt_token = id.split(":")
except Exception:
# normal Token:
jwt_token = id
try:
payload = jwt.decode(
jwt_token,
self.secret_key,
algorithms=[JWT_ALGORITHM],
iss=issuer,
leeway=30,
)
logging.debug(f"Decoded Token: {payload!s}")
return [tenant, payload]
except (jwt.exceptions.InvalidSignatureError):
raise NavException("Invalid Signature Error")
except (jwt.DecodeError) as err:
raise NavException(f"Token Decoding Error: {err}", state=400)
except jwt.InvalidTokenError as err:
print(err)
raise NavException(f"Invalid authorization token {err!s}")
except (jwt.ExpiredSignatureError) as err:
print(err)
raise NavException(f"Token Expired {err!s}", state=403)
except Exception as err:
print(err)
raise NavException(err, state=501)
else:
return [tenant, payload]
@abstractmethod
async def check_credentials(self, request):
""" Authenticate against user credentials (token, user/password)."""
pass
|
11503811
|
import gym
import pytest
from utilities.Utility_Functions import flatten_action_id_to_actions
from utilities.data_structures.Config import Config
config = Config()
config.seed = 1
config.environment = gym.make("Taxi-v2")
config.env_parameters = {}
config.num_episodes_to_run = 1000
config.file_to_save_data_results = None
config.file_to_save_results_graph = None
config.show_solution_score = False
config.visualise_individual_results = False
config.visualise_overall_agent_results = True
config.standard_deviation_results = 1.0
config.runs_per_agent = 3
config.use_GPU = False
config.overwrite_existing_results_file = False
config.randomise_random_seed = True
config.save_model = False
linear_hidden_units = [10, 5]
learning_rate = 0.01
buffer_size = 40000
batch_size = 256
batch_norm = False
embedding_dimensionality = 15
gradient_clipping_norm = 5
update_every_n_steps = 1
learning_iterations = 1
epsilon_decay_rate_denominator = 400
discount_rate = 0.99
tau = 0.01
sequitur_k = 10
config.hyperparameters = {
"linear_hidden_units": linear_hidden_units,
"learning_rate": learning_rate,
"buffer_size": buffer_size,
"batch_size": batch_size,
"final_layer_activation": "None",
"columns_of_data_to_be_embedded": [0],
"embedding_dimensions": [[config.environment.observation_space.n, embedding_dimensionality]],
"batch_norm": batch_norm,
"gradient_clipping_norm": gradient_clipping_norm,
"update_every_n_steps": update_every_n_steps,
"epsilon_decay_rate_denominator": epsilon_decay_rate_denominator,
"discount_rate": discount_rate,
"learning_iterations": learning_iterations,
"tau": tau,
"sequitur_k": sequitur_k,
"action_length_reward_bonus": 0.1,
"episodes_to_run_with_no_exploration": 10,
"pre_training_learning_iterations_multiplier": 0.1,
"copy_over_hidden_layers": True,
"use_global_list_of_best_performing_actions": True
}
# hrl = HRL(config)
# def test_flatten_action_id_to_actions():
# """Tests flatten_action_id_to_actions"""
# action_id_to_actions = {0: (0,), 1:(1,), 2:(0, 1), 3: (2, 1), 4:(2, 3)}
# original_number_of_primitive_actions = 2
#
#
#
# flattened_action_id_to_actions = flatten_action_id_to_actions(action_id_to_actions, original_number_of_primitive_actions)
# assert flattened_action_id_to_actions == {0: (0,), 1:(1,), 2:(0, 1), 3: (0, 1, 1), 4:(0, 1, 0, 1, 1)}, flattened_action_id_to_actions
#
# action_id_to_actions = {0: (0,), 1:(1,), 2:(2,)}
# original_number_of_primitive_actions = 3
# flattened_action_id_to_actions = flatten_action_id_to_actions(action_id_to_actions, original_number_of_primitive_actions)
# assert flattened_action_id_to_actions == action_id_to_actions
#
# with pytest.raises(AssertionError):
# action_id_to_actions = {0: (0,), 1: (1,), 2: (2,)}
# original_number_of_primitive_actions = 4
# flattened_action_id_to_actions = flatten_action_id_to_actions(action_id_to_actions,
# original_number_of_primitive_actions)
# with pytest.raises(AssertionError):
# action_id_to_actions = {0: (0,), 1: (1,), 2: (2, 2)}
# original_number_of_primitive_actions = 3
# flattened_action_id_to_actions = flatten_action_id_to_actions(action_id_to_actions,
# original_number_of_primitive_actions)
|
11503813
|
import numpy as np
def np_to_string(n):
"""
Converts a one dimensional numpy array
into a string format to be stored in db
"""
# Squeeze out dims
n = np.squeeze(n)
return np.array_str(n)[1:-1]
def string_to_np(s):
"""
Converts string to numpy array.
"""
return np.fromstring(s, sep=' ')
|
11503831
|
from typing import Set, Dict, List, NamedTuple
import joblib
from transformers.tokenization_utils import PreTrainedTokenizer
from allennlp.data import Token
from allennlp.common import FromParams
from luke.utils.entity_vocab import EntityVocab, Entity
from luke.utils.interwiki_db import InterwikiDB
from .entity_db import EntityDB
from .wiki_link_db import WikiLinkDB
class Mention(NamedTuple):
entity: Entity
start: int
end: int
class WikiMentionDetector(FromParams):
"""
Detect entity mentions in text from Wikipedia articles.
"""
def __init__(
self,
wiki_link_db_path: str,
model_redirect_mappings_path: str,
link_redirect_mappings_path: str,
entity_vocab_path: str,
source_language: str = "en",
inter_wiki_path: str = None,
multilingual_entity_db_path: Dict[str, str] = None,
min_mention_link_prob: float = 0.01,
max_mention_length: int = 10,
):
self.tokenizer = None
self.wiki_link_db = WikiLinkDB(wiki_link_db_path)
self.model_redirect_mappings: Dict[str, str] = joblib.load(model_redirect_mappings_path)
self.link_redirect_mappings: Dict[str, str] = joblib.load(link_redirect_mappings_path)
self.source_language = source_language
if inter_wiki_path is not None:
self.inter_wiki_db = InterwikiDB.load(inter_wiki_path)
else:
self.inter_wiki_db = None
self.entity_vocab = EntityVocab(entity_vocab_path)
multilingual_entity_db_path = multilingual_entity_db_path or {}
self.entity_db_dict = {lang: EntityDB(path) for lang, path in multilingual_entity_db_path.items()}
self.min_mention_link_prob = min_mention_link_prob
self.max_mention_length = max_mention_length
def set_tokenizer(self, tokenizer: PreTrainedTokenizer):
self.tokenizer = tokenizer
def get_mention_candidates(self, title: str) -> Dict[str, str]:
"""
Returns a dict of [mention, entity (title)]
"""
title = self.link_redirect_mappings.get(title, title)
# mention_to_entity
mention_candidates: Dict[str, str] = {}
ambiguous_mentions: Set[str] = set()
for link in self.wiki_link_db.get(title):
if link.link_prob < self.min_mention_link_prob:
continue
link_text = self._normalize_mention(link.text)
if link_text in mention_candidates and mention_candidates[link_text] != link.title:
ambiguous_mentions.add(link_text)
continue
mention_candidates[link_text] = link.title
for link_text in ambiguous_mentions:
del mention_candidates[link_text]
return mention_candidates
def _detect_mentions(self, tokens: List[str], mention_candidates: Dict[str, str], language: str) -> List[Mention]:
mentions = []
cur = 0
for start, token in enumerate(tokens):
if start < cur:
continue
for end in range(min(start + self.max_mention_length, len(tokens)), start, -1):
mention_text = self.tokenizer.convert_tokens_to_string(tokens[start:end])
mention_text = self._normalize_mention(mention_text)
if mention_text in mention_candidates:
cur = end
title = mention_candidates[mention_text]
title = self.model_redirect_mappings.get(title, title) # resolve mismatch between two dumps
if self.entity_vocab.contains(title, language):
mention = Mention(Entity(title, language), start, end)
mentions.append(mention)
break
return mentions
def detect_mentions(self, tokens: List[Token], title: str, language: str) -> List[Mention]:
if self.tokenizer is None:
raise RuntimeError("self.tokenizer is None. Did you call self.set_tokenizer()?")
source_mention_candidates = self.get_mention_candidates(title)
if language == self.source_language:
target_mention_candidates = source_mention_candidates
else:
if self.inter_wiki_db is None:
raise ValueError(
f"You need InterWikiDB to detect mentions from other languages except for {self.source_language}."
)
source_entities = list(source_mention_candidates.values())
target_entities = []
for ent in source_entities:
translated_ent = self.inter_wiki_db.get_title_translation(ent, self.source_language, language)
if translated_ent is not None:
target_entities.append(translated_ent)
target_mention_candidates = {}
for target_entity in target_entities:
for entity, mention, count in self.entity_db_dict[language].query(target_entity):
target_mention_candidates[mention] = entity
target_mentions = self._detect_mentions([t.text for t in tokens], target_mention_candidates, language)
return target_mentions
@staticmethod
def _normalize_mention(text: str):
return " ".join(text.lower().split(" ")).strip()
def mentions_to_entity_features(self, tokens: List[Token], mentions: List[Mention]) -> Dict:
if len(mentions) == 0:
entity_ids = [0]
entity_type_ids = [0]
entity_attention_mask = [0]
entity_position_ids = [[-1 for y in range(self.max_mention_length)]]
else:
entity_ids = [0] * len(mentions)
entity_type_ids = [0] * len(mentions)
entity_attention_mask = [1] * len(mentions)
entity_position_ids = [[-1 for y in range(self.max_mention_length)] for x in range(len(mentions))]
for i, (entity, start, end) in enumerate(mentions):
entity_ids[i] = self.entity_vocab.get_id(entity.title, entity.language)
entity_position_ids[i][: end - start] = range(start, end)
if tokens[start].type_id is not None:
entity_type_ids[i] = tokens[start].type_id
return {
"entity_ids": entity_ids,
"entity_attention_mask": entity_attention_mask,
"entity_position_ids": entity_position_ids,
"entity_type_ids": entity_type_ids,
}
|
11503844
|
from .iob_to_docs import iob_to_docs # noqa: F401
from .conll_ner_to_docs import conll_ner_to_docs # noqa: F401
from .json_to_docs import json_to_docs # noqa: F401
from .conllu_to_docs import conllu_to_docs # noqa: F401
|
11503910
|
import cube
import unittest
class TestCube(unittest.TestCase):
def test_0(self):
self.assertEqual(cube.cube(0), 0)
def test_1(self):
self.assertEqual(cube.cube(1), 1)
def test_2(self):
self.assertEqual(cube.cube(2), 8)
def test_3(self):
self.assertEqual(cube.cube(3), 27)
def test_no_arguments(self):
with self.assertRaises(TypeError):
cube.cube()
def test_exception_str(self):
with self.assertRaises(TypeError):
cube.cube('x')
if __name__ == '__main__':
unittest.main()
|
11503921
|
import os
import subprocess
import CheckPython
# Make sure everything we need is installed
CheckPython.ValidatePackages()
import Vulkan
# Change from Scripts directory to root
os.chdir('../')
if (not Vulkan.CheckVulkanSDK()):
print("Vulkan SDK not installed.")
if (not Vulkan.CheckVulkanSDKDebugLibs()):
print("Vulkan SDK debug libs not found.")
|
11503928
|
import numpy
from matchms import Spectrum
from matchms.filtering import correct_charge
def test_correct_charge_no_ionmode():
"""Test if no charge is added for empty ionmode."""
spectrum_in = Spectrum(mz=numpy.array([], dtype='float'),
intensities=numpy.array([], dtype='float'),
metadata={})
spectrum = correct_charge(spectrum_in)
assert spectrum.get("charge") == 0, "Expected zero charge value."
def test_correct_charge_add_charge():
"""Test if charge is corrected as expected."""
spectrum_in = Spectrum(mz=numpy.array([], dtype='float'),
intensities=numpy.array([], dtype='float'),
metadata={"ionmode": "positive"})
spectrum = correct_charge(spectrum_in)
assert spectrum.get("charge") == 1, "Expected different charge value."
def test_correct_charge_sign_plus_to_min():
"""Test if charge is corrected as expected."""
spectrum_in = Spectrum(mz=numpy.array([], dtype='float'),
intensities=numpy.array([], dtype='float'),
metadata={"ionmode": "negative",
"charge": 2})
spectrum = correct_charge(spectrum_in)
assert spectrum.get("charge") == -2, "Expected different charge value."
def test_correct_charge_sign_min_to_plus():
"""Test if charge is corrected as expected."""
spectrum_in = Spectrum(mz=numpy.array([], dtype='float'),
intensities=numpy.array([], dtype='float'),
metadata={"ionmode": "positive",
"charge": -2})
spectrum = correct_charge(spectrum_in)
assert spectrum.get("charge") == 2, "Expected different charge value."
def test_correct_charge_empty_spectrum():
spectrum_in = None
spectrum = correct_charge(spectrum_in)
assert spectrum is None, "Expected different handling of None spectrum."
|
11503942
|
if __name__ == '__main__':
f = open('downloaded_summaries/all_text.txt', 'r')
s = f.read()
summaries = s.split('---###<summary>###---')
print(len(summaries))
|
11503959
|
from geosnap import Community
import numpy as np
columns = ["median_household_income", "p_poverty_rate", "p_unemployment_rate"]
reno = Community.from_census(msa_fips="39900")
reno = reno.harmonize(intensive_variables=columns, target_year=2010, allocate_total=True, extensive_variables=['n_total_pop'])
reno = reno.cluster(columns=columns, method='kmeans', n_clusters=3)
def test_single_simulation():
simulated = reno.simulate(model_name='kmeans', base_year=2010, time_steps=1)
assert simulated.shape == (107, 3)
def test_multi_simulation():
simulated = reno.simulate(model_name='kmeans', base_year=2010)
assert simulated.gdf.shape == (428, 4)
|
11503984
|
from HABApp.openhab.connection_handler.http_connection import is_disconnect_exception
def test_aiohttp_sse_client_exceptions():
list = [ConnectionError, ConnectionRefusedError, ConnectionAbortedError]
for k in list:
try:
raise k()
except Exception as e:
assert is_disconnect_exception(e)
|
11503988
|
from unittest import TestCase
from terraform_compliance.common.bdd_tags import look_for_bdd_tags
from terraform_compliance.common.exceptions import Failure
from tests.mocks import MockedStep, MockedTags
class TestBddTags(TestCase):
def test_unchanged_step_object(self):
step = MockedStep()
look_for_bdd_tags(step)
self.assertFalse(step.context.no_failure)
self.assertIsNone(step.context.failure_class)
def test_warning_case(self):
step = MockedStep()
step.all_tags = [MockedTags(name='warning')]
look_for_bdd_tags(step)
self.assertTrue(step.context.no_failure)
self.assertEqual(step.context.failure_class, 'warning')
|
11503992
|
import unittest
from lsml.initializer.provided.threshold import ThresholdInitializer
class TestThresholdInitialize(unittest.TestCase):
def test_square_image(self):
import numpy as np
random_state = np.random.RandomState(1234)
mask = np.pad(np.ones((5, 5), dtype=bool), 5, 'constant')
image = np.zeros(mask.shape)
image[mask] = 1 + 0.5*random_state.randn(mask.sum())
image[~mask] = -1 - 0.5*random_state.rand((~mask).sum())
initializer = ThresholdInitializer(sigma=0)
u0, _, _ = initializer(image)
self.assertTrue((mask == (u0 > 0)).all())
|
11504006
|
from ....Functions.Geometry.comp_flower_arc import comp_flower_arc
from ....Classes.Arc1 import Arc1
from ....Classes.Segment import Segment
from ....Methods import ParentMissingError
from numpy import pi, sqrt, exp
def get_bore_line(self, prop_dict=None):
"""Return the bore line description
Parameters
----------
self : BoreLSRPM
A BoreLSRPM object
prop_dict : dict
Property dictionary to apply on the lines
Returns
-------
bore_list : list
List of bore lines
"""
if self.parent is not None:
Rbo = self.parent.get_Rbo()
else:
raise ParentMissingError("Error: The slot is not inside a Lamination")
# Compute the shape
alpha1 = pi / self.N
# Z1
Z1 = Rbo * exp(-1j * alpha1)
# ZC1
ZC1 = (Rbo - self.Rarc) * exp(-1j * alpha1)
XC1 = ZC1.real
YC1 = ZC1.imag
# Z2
X2 = sqrt(self.Rarc ** 2 - (self.W1 + YC1) ** 2) + XC1
Y2 = -self.W1
Z2 = X2 + 1j * Y2
Z3 = Z2.conjugate()
Z4 = Z1.conjugate()
# Create the lines
bore_list = list()
for ii in range(self.N):
bore_list.append(
Arc1(
begin=Z1 * exp(1j * (2 * pi / self.N * (ii - 1) + self.alpha)),
end=Z2 * exp(1j * (2 * pi / self.N * (ii - 1) + self.alpha)),
radius=self.Rarc,
is_trigo_direction=True,
prop_dict=prop_dict,
)
)
bore_list.append(
Segment(
Z2 * exp(1j * (2 * pi / self.N * (ii - 1) + self.alpha)),
Z3 * exp(1j * (2 * pi / self.N * (ii - 1) + self.alpha)),
prop_dict=prop_dict,
)
)
bore_list.append(
Arc1(
begin=Z3 * exp(1j * (2 * pi / self.N * (ii - 1) + self.alpha)),
end=Z4 * exp(1j * (2 * pi / self.N * (ii - 1) + self.alpha)),
radius=self.Rarc,
is_trigo_direction=True,
prop_dict=prop_dict,
)
)
return bore_list
|
11504015
|
import os, re
import numpy as np
import tensorflow as tf
# ---------------------------------------------------------------------------- #
# decorator
# ---------------------------------------------------------------------------- #
def tf_scope(func):
""" decorator: automatically wrap a var scope """
def scopped_func(*args, name=None, reuse=None, **kwargs):
if name is not None and not reuse:
with tf.variable_scope(name):
return func(*args, **kwargs)
elif name is not None and reuse: # variable reuse, naming ops as desired
with tf.variable_scope(reuse, auxiliary_name_scope=False, reuse=True):
with tf.name_scope(name):
return func(*args, **kwargs)
elif reuse: # variable reuse + naming ops as is re-enter the scope
with tf.variable_scope(reuse, reuse=True):
return func(*args, **kwargs)
else:
return func(*args, **kwargs)
return scopped_func
def tf_device(func):
""" decorator: automatically wrap a device scope """
def scopped_func(*args, device=None, **kwargs):
if device is not None:
with tf.device(device):
return func(*args, **kwargs)
return func(*args, **kwargs)
return scopped_func
def tf_Print(*args, summarize=100, **kwargs):
if 'summarize' in kwargs:
summarize = kwargs['summarize']
del kwargs['summarize']
with tf.device('/cpu:0'): return tf.Print(*args, summarize=summarize, **kwargs)
# ---------------------------------------------------------------------------- #
# helper func
# ---------------------------------------------------------------------------- #
def get_kr(config, stage_n, stage_i):
assert stage_n in _valid_stage, f'invalid stage_n={stage_n}'
if stage_n:
kr = config.kr_sample[stage_i - 1] if stage_n == 'down' else config.kr_sample_up[stage_i]
else:
kr = config.kr_search[stage_i]
return kr
def get_kwargs(block_cfg, config, is_training, act=False):
# NOTE: may consider provide bias, bn, activation - 1. matching def of dense_layer & mlps, 2. arch level bn control, e.g. ln/gn
kwargs = {
'is_training': is_training,
'initializer': block_cfg.init if block_cfg.init != '' else config.init,
'weight_decay': block_cfg.wd if block_cfg.wd != '' else config.weight_decay,
'bn_momentum': config.bn_momentum, 'bn_eps': config.bn_eps,
}
if block_cfg.bn != '' or config.bn != '':
kwargs['bn'] = block_cfg.bn if block_cfg.bn != '' else config.bn
if act is True:
kwargs['activation'] = block_cfg.act if block_cfg.act else config.activation
elif act:
kwargs['activation'] = act
return kwargs
def get_kwargs_mlp(block_cfg, config, is_training, act=True, **_kwargs):
kwargs = get_kwargs(block_cfg, config, is_training, act=act)
kwargs.update({
'linearbn': block_cfg.linearbn if block_cfg.linearbn != '' else config.linearbn if config.linearbn != '' else False,
})
kwargs.update(_kwargs)
return kwargs
def get_ftype(ftype, raise_not_found=True):
if ftype in ['out', 'fout', 'f_out']:
ptype = 'p_out'
ftype = 'f_out'
elif any(re.fullmatch(f'{k}(\d*mlp|mlp\d*|linear|)', ftype) for k in ['latent', 'logits', 'probs']):
ptype = 'p_out'
ftype = [k for k in ['latent', 'logits', 'probs'] if ftype.startswith(k)][0]
elif ftype in ['sample', 'fsample', 'f_sample']:
ptype = 'p_sample'
ftype = 'f_sample' if ftype in ['sample', 'fsample'] else ftype
elif raise_not_found:
raise KeyError(f'not supported ftype = {ftype}')
else:
ftype = ptype = None
return ftype, ptype
_valid_stage = ['down', 'up', '']
def fetch_supports_flow(inputs, stage_n, stage_i):
# update based on the flowing direction down/up - building
assert stage_n in _valid_stage, f'invalid stage_n={stage_n}'
if stage_n:
stage_i += -1 if stage_n == 'down' else 1
idx = inputs['sample_idx'][stage_n][stage_i]
pts = inputs['points'][stage_i]
else:
idx = inputs['neighbors'][stage_i]
pts = inputs['points'][stage_i]
return pts, idx
def fetch_supports_stage(inputs, stage_n, stage_i, ftype):
# indexing the existing stages - all built
stage_n = to_valid_stage(stage_n)
stage = inputs['stage_list'][stage_n][stage_i]
ftype, ptype = get_ftype(ftype)
pts = stage[ptype]
f = stage[ftype]
idx = inputs['neighbors'][stage_i]
return pts, f, idx
def to_valid_stage(stage_n, short=False):
if stage_n in ['D', 'down']:
stage_n = 'D' if short else 'down'
elif stage_n in ['U', 'up']:
stage_n = 'U' if short else 'up'
else:
raise ValueError(f'invalid stage_n={stage_n}')
return stage_n
def parse_stage(stage, num_layers):
stage = stage.replace('a', ''.join(f'{i}' for i in range(num_layers)))
stage_list = [i.strip('_') for i in re.split('(\d+)', stage) if i and i.strip('_')] # e.g. D012_U34
assert len(stage_list) % 2 == 0, f'invalid stage compound: stage_list={stage_list} from stage={stage}'
stage_n = [s for i, s in enumerate(stage_list) if i % 2 == 0]
stage_i = [s for i, s in enumerate(stage_list) if i % 2 == 1]
stage_list = [[(to_valid_stage(n), int(i)) for i in i_str] for n, i_str in zip(stage_n, stage_i)]
stage_list = sum(stage_list, [])
return stage_list
|
11504063
|
import pytest
import supriya.nonrealtime
def test_01():
"""
With Session.at(...) context manager.
"""
session = supriya.nonrealtime.Session()
with session.at(0):
group_one = session.add_group()
group_two = session.add_group()
node = group_one.add_synth()
with session.at(5):
group_two.move_node(node)
with session.at(5):
assert node.get_parent() is group_two
def test_02():
"""
With offset=... keyword.
"""
session = supriya.nonrealtime.Session()
with session.at(0):
group_one = session.add_group()
group_two = session.add_group()
node = group_one.add_synth()
group_two.move_node(node, offset=5)
with session.at(5):
assert node.get_parent() is group_two
def test_03():
"""
Without Session.at(...) context manager or offset keyword.
"""
session = supriya.nonrealtime.Session()
with session.at(0):
group_one = session.add_group()
group_two = session.add_group()
node = group_one.add_synth()
with session.at(10):
group_two.move_node(node)
with pytest.raises(ValueError):
group_two.move_node(node)
def test_04():
"""
With both Session.at(...) context manager and offset keyword.
"""
session = supriya.nonrealtime.Session()
with session.at(0):
group_one = session.add_group()
group_two = session.add_group()
node = group_one.add_synth()
with session.at(5):
group_two.move_node(node, offset=15)
with session.at(5):
assert node.get_parent() is group_one
with session.at(15):
assert node.get_parent() is group_two
def test_05():
"""
Create then immediately move.
"""
session = supriya.nonrealtime.Session()
with session.at(0):
group_one = session.add_group()
group_two = session.add_group()
group_one.move_node(group_two)
assert session.to_lists(10) == [
[0.0, [["/g_new", 1000, 0, 0], ["/g_new", 1001, 0, 1000]]],
[10.0, [["/n_free", 1000, 1001], [0]]],
]
@pytest.mark.xfail()
def test_06():
"""
Create then immediately move, but in reverse creation order.
"""
session = supriya.nonrealtime.Session()
with session.at(0):
group_one = session.add_group()
group_two = session.add_group()
group_two.move_node(group_one)
assert session.to_lists(10) == []
|
11504074
|
import sys
import os.path
import binascii
from pathlib import Path
from cryptography.hazmat.backends import default_backend
from cryptography import x509
from cryptography.hazmat.primitives.serialization import Encoding
from cryptography.hazmat.primitives.asymmetric import ec
from cryptoauthlib import *
from certs_handler import *
def isMCHP_cert():
try:
root_cert_der_size = AtcaReference(0)
assert tng_atcacert_root_cert_size(root_cert_der_size) == Status.ATCA_SUCCESS
root_cert_der = bytearray(root_cert_der_size.value)
assert tng_atcacert_root_cert(root_cert_der, root_cert_der_size) == Status.ATCA_SUCCESS
root_cert = x509.load_der_x509_certificate(root_cert_der, default_backend())
# Note that this is a simple cryptographic validation and does not check
# any of the actual certificate data (validity dates, extensions, names,
# etc...)
root_cert.public_key().verify(
signature=root_cert.signature,
data=root_cert.tbs_certificate_bytes,
signature_algorithm=ec.ECDSA(root_cert.signature_hash_algorithm)
)
signer_cert_der_size = AtcaReference(0)
assert tng_atcacert_max_signer_cert_size(signer_cert_der_size) == Status.ATCA_SUCCESS
signer_cert_der = bytearray(signer_cert_der_size.value)
assert tng_atcacert_read_signer_cert(signer_cert_der, signer_cert_der_size) == Status.ATCA_SUCCESS
signer_cert = x509.load_der_x509_certificate(signer_cert_der, default_backend())
# Note that this is a simple cryptographic validation and does not check
# any of the actual certificate data (validity dates, extensions, names,
# etc...)
root_cert.public_key().verify(
signature=signer_cert.signature,
data=signer_cert.tbs_certificate_bytes,
signature_algorithm=ec.ECDSA(signer_cert.signature_hash_algorithm)
)
device_cert_der_size = AtcaReference(0)
assert tng_atcacert_max_device_cert_size(device_cert_der_size) == Status.ATCA_SUCCESS
device_cert_der = bytearray(device_cert_der_size.value)
assert tng_atcacert_read_device_cert(device_cert_der, device_cert_der_size) == Status.ATCA_SUCCESS
device_cert = x509.load_der_x509_certificate(device_cert_der, default_backend())
# Note that this is a simple cryptographic validation and does not check
# any of the actual certificate data (validity dates, extensions, names,
# etc...)
signer_cert.public_key().verify(
signature=device_cert.signature,
data=device_cert.tbs_certificate_bytes,
signature_algorithm=ec.ECDSA(device_cert.signature_hash_algorithm)
)
return {'status':Status.ATCA_SUCCESS, 'root_cert':root_cert, 'signer_cert':signer_cert, 'device_cert':device_cert}
except:
return {'status':1}
def get_signer_cert_signer_id(name):
"""
Get the Signer ID string from a Subject common name
"""
for attr in name:
if attr.oid == x509.oid.NameOID.COMMON_NAME:
return attr.value[-4:]
return None
def mchp_cert_bckp(mchp_cert_info):
serial_number = bytearray(9)
assert atcab_read_serial_number(serial_number) == Status.ATCA_SUCCESS
trustplatform_directory = ".trustplatform"
mchp_cert_directory = os.path.join(Path.home(), trustplatform_directory, "mchp_certs_bckp")
ser_num_str = str(binascii.hexlify(serial_number), 'utf-8').upper()
root_cert_path = os.path.join(mchp_cert_directory, ser_num_str + "_root.der")
signer_cert_path = os.path.join(mchp_cert_directory, ser_num_str + "_signer.der")
device_cert_path = os.path.join(mchp_cert_directory, ser_num_str + "_device.der")
if(mchp_cert_info['status'] == Status.ATCA_SUCCESS):
if not os.path.exists(mchp_cert_directory):
os.mkdir(mchp_cert_directory)
print("MCHP certificates found in the device, backing up...", end='')
root_cert_der = mchp_cert_info['root_cert'].public_bytes(encoding=Encoding.DER)
with open(root_cert_path, "wb") as f:
f.write(root_cert_der)
signer_cert_der = mchp_cert_info['signer_cert'].public_bytes(encoding=Encoding.DER)
with open(signer_cert_path, "wb") as f:
f.write(signer_cert_der)
device_cert_der = mchp_cert_info['device_cert'].public_bytes(encoding=Encoding.DER)
with open(device_cert_path, "wb") as f:
f.write(device_cert_der)
print("OK")
status = Status.ATCA_SUCCESS
else:
# Device does not contain a valid MCHP cert
# Check for backups
if ((os.path.exists(root_cert_path)) and
(os.path.exists(signer_cert_path)) and
(os.path.exists(device_cert_path))):
# Backup exists, restore certs
signer_id = get_signer_cert_signer_id(read_cert(signer_cert_path).subject)
signer_cert_def, device_cert_def = certs_handler.generate_cert_def_files(root_cert_path, signer_id, signer_cert_path, device_cert_path)
print('Saving signer certificate to device...', end='')
signer_cert_der = read_cert(signer_cert_path).public_bytes(encoding=Encoding.DER)
assert Status.ATCA_SUCCESS == atcacert_write_cert(signer_cert_def, signer_cert_der, len(signer_cert_der))
print('OK')
print('Saving device certificate to device...', end='')
device_cert_der = read_cert(device_cert_path).public_bytes(encoding=Encoding.DER)
assert Status.ATCA_SUCCESS == atcacert_write_cert(device_cert_def, device_cert_der, len(device_cert_der))
print('OK')
print("Restoring MCHP device and signer certificates - Success\n\r")
status = Status.ATCA_SUCCESS
else:
# Backup does not exist.
print("Device contains custom device and signer certificates")
status = 1
return status
|
11504075
|
import datetime
import itertools
import os
import shutil
import subprocess
import tempfile
from collections import defaultdict
from pathlib import Path
import ast
import json
import hydra
import numpy as np
import yaml
from omegaconf import DictConfig
from covid19sim.plotting.utils import env_to_path
from covid19sim.utils.utils import parse_search_configuration, is_app_based_tracing_intervention, NpEncoder
SAMPLE_KEYS = {"list", "uniform", "range", "cartesian", "sequential", "chain"}
HYDRA_CONF_PATH = Path(__file__).parent.parent / "configs/exp/config.yaml"
np.random.seed(seed=0)
class RandomSearchError(Exception):
pass
def first_key(d):
"""get the first key of a dict"""
return list(d.keys())[0]
def first_value(d):
"""get the first value of a dict"""
return list(d.values())[0]
def get_extension(x):
"""
Map a key, value tuple to a string to create the folder name in base_dir
"""
k, v = x
if k == "REC_LEVEL_THRESHOLDS":
return "".join(map(str, (v if not isinstance(v, str) else ast.literal_eval(v))))
return str(v)
def get_model(conf):
if conf["RISK_MODEL"] == "":
if conf.get("DAILY_TARGET_REC_LEVEL_DIST", False):
return "unmitigated_normed"
return "unmitigated"
if conf["RISK_MODEL"] == "digital":
if conf["TRACING_ORDER"] == 1:
if conf.get("DAILY_TARGET_REC_LEVEL_DIST", False):
return "binary_digital_tracing_order_1_normed"
return "binary_digital_tracing_order_1"
elif conf["TRACING_ORDER"] == 2:
if conf.get("DAILY_TARGET_REC_LEVEL_DIST", False):
return "binary_digital_tracing_order_2_normed"
return "binary_digital_tracing_order_2"
else:
raise ValueError(
"Unknown binary digital tracing order: {}".format(conf["TRACING_ORDER"])
)
if conf["RISK_MODEL"] == "transformer":
if conf["USE_ORACLE"]:
if conf.get("DAILY_TARGET_REC_LEVEL_DIST", False):
return "oracle_normed"
return "oracle"
# FIXME this won't work if the run used the inference server
if conf.get("DAILY_TARGET_REC_LEVEL_DIST", False):
return "transformer_normed"
return "transformer"
if conf["RISK_MODEL"] == "heuristicv1":
if conf.get("DAILY_TARGET_REC_LEVEL_DIST", False):
return "heuristicv1_normed"
return "heuristicv1"
if conf["RISK_MODEL"] == "heuristicv2":
if conf.get("DAILY_TARGET_REC_LEVEL_DIST", False):
return "heuristicv2_normed"
return "heuristicv2"
raise ValueError("Unknown RISK_MODEL {}".format(conf["RISK_MODEL"]))
def normalize(opts):
if "normalization_folder" not in opts:
return opts
folder = opts["normalization_folder"]
run = opts[folder]
sim_configs = Path(__file__).resolve().parent.parent / "configs" / "simulation"
run_yaml = sim_configs / folder / f"{run}.yaml"
if not run_yaml.exists():
raise RandomSearchError(
f"Unknown normalized run: \nFolder: {folder}\nFile: {run_yaml.name}"
)
with run_yaml.open("r") as f:
run_conf = yaml.safe_load(f)
opts["intervention"] = get_model(run_conf)
if "TRANSFORMER_EXP_PATH" in run_conf:
weights = Path(run_conf["TRANSFORMER_EXP_PATH"]).name
opts["intervention"] += f">{weights}"
return opts
def check_conf(conf):
tracing_methods = []
hydra_configs = Path(__file__).resolve().parent.parent / "configs"
exp_file = conf.get("exp_file", "experiment")
use_tmpdir = conf.get("use_tmpdir")
infra = conf.get("infra")
zip_outdir = conf.get("zip_outdir")
for k, v in conf.items():
if isinstance(v, dict) and "sample" in v:
if v["sample"] not in SAMPLE_KEYS:
raise RandomSearchError(
"Unknown sampling procedure {} for {}".format(v["sample"], k)
)
if "from" not in v:
raise RandomSearchError(f"No 'from' key for {k}")
if v["sample"] == "cartesian" and not isinstance(v["from"], list):
raise RandomSearchError(f"'from' field for {k} should be a list")
if k == "intervention":
if isinstance(v, dict) and "sample" in v:
tracing_methods += v["from"]
else:
tracing_methods.append(v)
elif k == "tune":
if conf.get("use_tmpdir", False) is not False:
raise RandomSearchError("Cannot use 'tune' and use_tmpdir:true")
elif k == "run_type":
if not (hydra_configs / "simulation" / "run_type" / f"{v}.yaml").exists():
raise RandomSearchError(f"Unknown run_type {v}")
if not (hydra_configs / "experiment" / f"{exp_file}.yaml").exists():
raise RandomSearchError(f"Unknown exp_file {exp_file}")
weights_dir = conf.get("weights_dir", "None")
for tm in tracing_methods:
weights = conf.get("weights", None)
if isinstance(tm, dict):
model = first_key(tm)
if weights is None and "weights" not in tm[model]:
raise RandomSearchError(
f"Unknown {tm[model]} weights. Please specify '>' or 'weights: ...'"
)
elif "weights" in tm[model]:
if not Path(weights_dir).exists():
raise RandomSearchError(
"No 'weights' specified and unknown 'weights_dir' {}".format(
weights_dir
)
)
w = tm[model]["weights"]
weights = Path(weights_dir) / w
elif weights is not None:
weights = Path(weights)
if not weights.exists():
raise RandomSearchError("Cannot find weights {}".format(str(weights)))
else:
transformer_best = weights / "Weights" / "best.ckpt"
transformer_config = weights / "Configurations" / "train_config.yml"
if not transformer_best.exists():
raise RandomSearchError(
"Cannot find weights {}".format(transformer_best)
)
if not transformer_config.exists():
raise RandomSearchError(
"Cannot find train config {}".format(transformer_config)
)
if use_tmpdir and infra != "intel" and not zip_outdir:
raise RandomSearchError(
"zip_outdir must be true when using tmpdir (use_tmpdir)"
)
def compute_n_search(conf):
"""
Compute the number of searchs to do if using -1 as n_search and using
cartesian search
Args:
conf (dict): exprimental configuraiton
Raises:
RandomSearchError: Cannot be called if no cartesian or sequential field
Returns:
int: size of the cartesian product or length of longest sequential field
"""
samples = defaultdict(list)
for k, v in conf.items():
if not isinstance(v, dict) or "sample" not in v:
continue
samples[v["sample"]].append(v)
if "cartesian" in samples:
total = 1
for s in samples["cartesian"]:
total *= len(s["from"])
return total
if "sequential" in samples:
total = max(map(len, [s["from"] for s in samples["sequential"]]))
return total
if "chain" in samples:
total = sum(map(len, [s["from"] for s in samples["chain"]]))
return total
raise RandomSearchError(
"Used n_search=-1 without any field being 'cartesian' or 'sequential'"
)
def now_str():
"""
20200608_125339_353416
"""
now = str(datetime.datetime.now())
now = now.replace("-", "").replace(":", "").replace(" ", "_").replace(".", "_")
return now
def sample_param(sample_dict):
"""sample a value (hyperparameter) from the instruction in the
sample dict:
{
"sample": "range | list",
"from": [min, max, step] | [v0, v1, v2 etc.]
}
if range, as np.arange is used, "from" MUST be a list, but may contain
only 1 (=min) or 2 (min and max) values, not necessarily 3
Args:
sample_dict (dict): instructions to sample a value
Returns:
scalar: sampled value
"""
if not isinstance(sample_dict, dict) or "sample" not in sample_dict:
return sample_dict
if sample_dict["sample"] == "cartesian":
assert isinstance(
sample_dict["from"], list
), "{}'s `from` field MUST be a list, found {}".format(
sample_dict["sample"], sample_dict["from"]
)
return "__cartesian__"
if sample_dict["sample"] == "sequential":
assert isinstance(
sample_dict["from"], list
), "{}'s `from` field MUST be a list, found {}".format(
sample_dict["sample"], sample_dict["from"]
)
return "__sequential__"
if sample_dict["sample"] == "chain":
assert isinstance(
sample_dict["from"], list
), "{}'s `from` field MUST be a list, found {}".format(
sample_dict["sample"], sample_dict["from"]
)
return "__chain__"
if sample_dict["sample"] == "range":
return np.random.choice(np.arange(*sample_dict["from"]))
if sample_dict["sample"] == "list":
return np.random.choice(sample_dict["from"])
if sample_dict["sample"] == "uniform":
return np.random.uniform(*sample_dict["from"])
raise ValueError("Unknown sample type in dict " + str(sample_dict))
def load_search_conf(path):
"""
Load a yaml file in `path`
Args:
path (str | pathlib.Path): path to yaml file
Returns:
any: python native variable loaded by PyYaml
"""
print(path)
print()
path = Path(path)
assert path.exists()
assert path.suffix in {".yaml", ".yml"}
with path.open("r") as f:
return yaml.safe_load(f)
def sample_cartesians(cartesian_keys, exp, idx):
"""
Returns the `idx`th item in the cartesian product of all cartesian keys to
be sampled.
Args:
cartesian_keys (list): keys in the experimental configuration that
are to be used in the full cartesian product
exp (dict): experimental configuration
idx (int): index of the current sample
Returns:
dict: sampled point in the cartesian space (with keys = cartesian_keys)
"""
conf = {}
cartesian_values = [exp[key]["from"] for key in cartesian_keys]
product = list(itertools.product(*cartesian_values))
for k, v in zip(cartesian_keys, product[idx % len(product)]):
conf[k] = v
return conf
def sample_chains(chain_keys, exp, idx):
"""
Returns the `idx`th item in the chain of all chain keys to be sampled.
Args:
chain_keys (list): keys in the experimental configuration
that are to be used in the full chain
exp (dict): experimental configuration
idx (int): index of the current sample
Returns:
dict: sampled point in the cartesian space (with keys = chain_keys)
"""
conf = {}
chain_values = [[(key, value) for value in exp[key]["from"]] for key in chain_keys]
chain = list(itertools.chain(*chain_values))
k, v = chain[idx % len(chain)]
conf[k] = v
if exp[k].get("normalized"):
conf["normalization_folder"] = k
return conf
def sample_sequentials(sequential_keys, exp, idx):
"""
Samples sequentially from the "from" values specified in each key
of the experimental configuration which have sample == "sequential"
Unlike `cartesian` sampling, `sequential` sampling iterates *independently*
over each keys
Args:
sequential_keys (list): keys to be sampled sequentially
exp (dict): experimental config
idx (int): index of the current sample
Returns:
conf: sampled dict
"""
conf = {}
for k in sequential_keys:
v = exp[k]["from"]
conf[k] = v[idx % len(v)]
return conf
def get_uuid():
return "{}_{}".format(np.random.randint(1e5, 1e6), np.random.randint(1e5, 1e6))
def ipc_addresses():
uuid = get_uuid()
ipc_front = "ipc:///tmp/covid19_{}_frontend.ipc".format(uuid)
ipc_back = "ipc:///tmp/covid19_{}_backend.ipc".format(uuid)
return (ipc_front, ipc_back)
def sample_search_conf(exp, idx=0):
"""
Samples parameters parametrized in `exp`: should be a dict with
values which fit `sample_params(dic)`'s API
Args:
exp (dict): experiment's parametrization
idx (int): experiment's idx in the sampling procedure (useful in case a key
should be sampled in a cartesian or sequential manner)
Returns:
dict: sampled configuration
"""
conf = {}
cartesians = []
sequentials = []
chains = []
for k, v in exp.items():
candidate = sample_param(v)
if candidate == "__cartesian__":
cartesians.append(k)
elif candidate == "__sequential__":
sequentials.append(k)
elif candidate == "__chain__":
chains.append(k)
else:
conf[k] = candidate
if sequentials:
conf.update(sample_sequentials(sequentials, exp, idx))
if chains:
conf.update(sample_chains(chains, exp, idx))
if cartesians:
conf.update(sample_cartesians(cartesians, exp, idx))
return conf
def load_template(infra):
"""
Get the template string to format according to arguments
Returns:
str: template string full of "{variable_name}"
"""
base = Path(__file__).resolve().parent
if infra == "mila":
with (base / "mila_sbatch_template.sh").open("r") as f:
return f.read()
if infra == "intel":
with (base / "intel_template.sh").open("r") as f:
return f.read()
if infra == "beluga":
with (base / "beluga_sbatch_template.sh").open("r") as f:
return f.read()
raise ValueError("Unknown infrastructure " + str(infra))
def fill_intel_template(template_str, conf):
home = os.environ.get("HOME")
env_name = conf.get("env_name", "covid")
code_loc = conf.get("code_loc", str(Path(home) / "simulator/src/covid19sim/"))
weights = conf.get("weights", str(Path(home) / "FRESH-SNOWFLAKE-224B/"))
ipc = conf.get("ipc", {"frontend": "", "backend": ""})
cpu = conf.get("cpus", 6)
use_transformer = str(conf.get("use_transformer", True)).lower()
if conf.get("parallel_search"):
workers = cpu - conf.get("n_runs_per_search", 1)
else:
workers = cpu - 1
if "dev" in conf and conf["dev"]:
print(
"Using:\n"
+ "\n".join(
[
" {:10}: {}".format("env_name", env_name),
" {:10}: {}".format("code_loc", code_loc),
" {:10}: {}".format("weights", weights),
" {:10}: {}".format("use_transformer", use_transformer),
" {:10}: {}".format("workers", workers),
" {:10}: {}".format("frontend", ipc["frontend"]),
" {:10}: {}".format("backend", ipc["backend"]),
]
)
)
frontend = '--frontend="{}"'.format(ipc["frontend"]) if ipc["frontend"] else ""
backend = '--backend="{}"'.format(ipc["backend"]) if ipc["backend"] else ""
return template_str.format(
env_name=env_name,
code_loc=code_loc,
weights=weights,
frontend=frontend,
backend=backend,
use_transformer=use_transformer,
workers=workers,
)
def fill_mila_template(template_str, conf):
"""
Formats the template_str with variables from the conf dict,
which is a sampled experiment
Args:
template_str (str): sbatch template
conf (dict): sbatch parameters
Returns:
str: formated template
"""
user = os.environ.get("USER")
home = os.environ.get("HOME")
email = conf.get('email_id', "")
partition = conf.get("partition", "main")
cpu = conf.get("cpus", 6)
# cpu constraints in long partition
if partition == "long":
cpu = min(cpu, 4)
mem = conf.get("mem", 16)
gres = conf.get("gres", "")
time = str(conf.get("time", "4:00:00"))
slurm_log = conf.get(
"slurm_log", conf.get("base_dir", f"/network/tmp1/{user}/covi-slurm-%j.out")
)
env_name = conf.get("env_name", "covid")
weights = conf.get("weights")
code_loc = conf.get("code_loc", str(Path(home) / "simulator/src/covid19sim/"))
ipc = conf.get("ipc", {"frontend": "", "backend": ""})
use_transformer = conf.get("use_transformer", True)
workers = cpu - 1
if "%j.out" not in slurm_log:
slurm_log = str(Path(slurm_log).resolve() / "covi-slurm-%j.out")
if not Path(slurm_log).parent.exists() and not conf.get("dev"):
Path(slurm_log).parent.mkdir(parents=True)
use_server = str(use_transformer and conf.get("USE_INFERENCE_SERVER", False)).lower()
if "dev" in conf and conf["dev"]:
print(
"Using:\n"
+ "\n".join(
[
" {:10}: {}".format("partition", partition),
" {:10}: {}".format("cpus-per-task", cpu),
" {:10}: {}".format("mem", mem),
" {:10}: {}".format("gres", gres),
" {:10}: {}".format("time", time),
" {:10}: {}".format("slurm_log", slurm_log),
" {:10}: {}".format("env_name", env_name),
" {:10}: {}".format("code_loc", code_loc),
" {:10}: {}".format("weights", weights),
" {:10}: {}".format("frontend", ipc["frontend"]),
" {:10}: {}".format("backend", ipc["backend"]),
" {:10}: {}".format("use_transformer", use_transformer),
" {:10}: {}".format("use_server", use_server),
" {:10}: {}".format("workers", workers),
]
)
)
partition = (
f"#SBATCH --partition={partition}"
if partition != "covid"
else "#SBATCH --reservation=covid\n#SBATCH --partition=long"
)
cpu = f"#SBATCH --cpus-per-task={cpu}"
mem = f"#SBATCH --mem={mem}GB"
gres = f"#SBATCH --gres={gres}" if gres else ""
time = f"#SBATCH --time={time}"
email = f"#SBATCH --mail-user={email}"
slurm_log = f"#SBATCH -o {slurm_log}\n#SBATCH -e {slurm_log}"
frontend = '--frontend="{}"'.format(ipc["frontend"]) if ipc["frontend"] else ""
backend = '--backend="{}"'.format(ipc["backend"]) if ipc["backend"] else ""
return template_str.format(
partition=partition,
cpu=cpu,
mem=mem,
gres=gres,
time=time,
slurm_log=slurm_log,
env_name=env_name,
code_loc=code_loc,
weights=weights,
frontend=frontend,
backend=backend,
use_server=use_server,
workers=workers,
email=email
)
def fill_beluga_template(template_str, conf):
"""
Formats the template_str with variables from the conf dict,
which is a sampled experiment
Args:
template_str (str): sbatch template
conf (dict): sbatch parameters
Returns:
str: formated template
"""
user = os.environ.get("USER")
home = os.environ.get("HOME")
email = conf.get('email_id', "")
cpu = conf.get("cpus", 4)
mem = conf.get("mem", 12)
time = str(conf.get("time", "2:50:00"))
slurm_log = conf.get(
"slurm_log", conf.get("base_dir", f"/scratch/{user}/covi-slurm-%j.out")
)
if "%j.out" not in slurm_log:
slurm_log = str(Path(slurm_log).resolve() / "covi-slurm-%j.out")
if not Path(slurm_log).parent.exists() and not conf.get("dev"):
Path(slurm_log).parent.mkdir(parents=True)
env_name = conf.get("env_name", "covid")
weights = conf.get("weights")
code_loc = conf.get("code_loc", str(Path(home) / "simulator/src/covid19sim/"))
ipc = conf.get("ipc", {"frontend": "", "backend": ""})
use_transformer = conf.get("use_transformer", True)
use_server = str(use_transformer and conf.get("USE_INFERENCE_SERVER", False)).lower()
workers = cpu - 1
if "dev" in conf and conf["dev"]:
print(
"Using:\n"
+ "\n".join(
[
" {:10}: {}".format("cpus-per-task", cpu),
" {:10}: {}".format("mem", mem),
" {:10}: {}".format("time", time),
" {:10}: {}".format("slurm_log", slurm_log),
" {:10}: {}".format("env_name", env_name),
" {:10}: {}".format("code_loc", code_loc),
" {:10}: {}".format("weights", weights),
" {:10}: {}".format("frontend", ipc["frontend"]),
" {:10}: {}".format("backend", ipc["backend"]),
" {:10}: {}".format("use_transformer", use_transformer),
" {:10}: {}".format("use_server", use_server),
" {:10}: {}".format("workers", workers),
]
)
)
cpu = f"#SBATCH --cpus-per-task={cpu}"
mem = f"#SBATCH --mem={mem}GB"
time = f"#SBATCH --time={time}"
email = f"#SBATCH --mail-user={email}"
slurm_log = f"#SBATCH -o {slurm_log}\n#SBATCH -e {slurm_log}"
frontend = '--frontend="{}"'.format(ipc["frontend"]) if ipc["frontend"] else ""
backend = '--backend="{}"'.format(ipc["backend"]) if ipc["backend"] else ""
return template_str.format(
cpu=cpu,
mem=mem,
time=time,
slurm_log=slurm_log,
env_name=env_name,
code_loc=code_loc,
weights=weights,
frontend=frontend,
backend=backend,
use_server=use_server,
workers=workers,
email=email
)
def get_hydra_args(opts, exclude=set()):
hydra_args = ""
for k, v in opts.items():
if k not in exclude:
if isinstance(v, list):
v = f'"{v}"'
hydra_args += f" {k}={v}"
return hydra_args
def printlines():
print("=" * 80)
print("=" * 80)
HYDRA_CONF_PATH = "../configs/experiment/config.yaml"
@hydra.main(config_path=HYDRA_CONF_PATH, strict=False)
def main(conf: DictConfig) -> None:
"""
HOW TO USE
$ python experiment.py exp_file=experiment n_search=20
add `dev=True` to just see the commands that would be run, without
running them
NOTE: ALL parameters used in run.py may be overwritten from this commandline.
For instance you can change init_fraction_sick
$ python experiment.py exp_file=experiment n_search=20 init_fraction_sick=0.1
NOTE: you may also pass arguments overwriting the default `sbatch` job's
parametrization like partition, gres, code_loc (=where is the simulator's code),
env_name (= what conda env to load). For instance:
$ python experiment.py partition=unkillable gres=gpu:1 env_name=covid-env\
n_search=20 init_fraction_sick=0.1
"""
# These will be filtered out when passing arguments to run.py
RANDOM_SEARCH_SPECIFIC_PARAMS = {
"n_search", # number of random iterations
"n_runs_per_search", # number of random iterations
"dev", # dev-mode: print stuff, don't run them
"exp_file", # what experimental parametrization
"partition", # sbatch partition to use
"cpus", # sbatch number of cpus
"mem", # sbatch memory to request
"time", # sbatch job upper bound on duration
"slurm_log", # sbatch logs destination
"gres", # sbatch gres arg, may be nothing or gpu:1
"env_name", # conda environment to load
"code_loc", # where to find the source code, will cd there
"weights", # where to find the transformer's weights
"infra", # using Mila or Intel cluster?
"now_str", # naming scheme
"parallel_search", # run with & at the end instead of ; to run in subshells
"ipc", # run with & at the end instead of ; to run in subshells
"start_index", # ignore the first runs, to continue an exploration for instance
"use_transformer", # defaults to True
"use_server", # defaults to True
"use_tmpdir", # use SLURM_TMPDIR and copy files to outdir after
"weights_dir", # where are the weights
"base_dir", # output dir will be base_dir/tracing_method
"normalization_folder", # if this is a normalization run
"exp_name", # folder name in base_dir => base_dir/exp_name/method/...
"email_id", # email id where you can receive notifications regarding jobs (began, completed, failed)
}
# move back to original directory because hydra moved
os.chdir(hydra.utils.get_original_cwd())
# get command-line arguments as native dict
overrides = parse_search_configuration(conf)
# load experimental configuration
# override with exp_file=<X>
# where <X> is in configs/exp and is ".yaml"
exp_file_path = (
Path(__file__).resolve().parent.parent
/ "configs"
/ "experiment"
/ (overrides.get("exp_file", "randomization") + ".yaml")
)
conf = load_search_conf(exp_file_path)
# override experimental parametrization with the commandline conf
conf.update(overrides)
check_conf(conf)
for k in ["code_loc", "base_dir", "outdir", "weights_dir"]:
if k in conf and conf[k]:
conf[k] = env_to_path(conf[k])
# -------------------------------------
# ----- Compute Specific Values -----
# -------------------------------------
conf["n_runs_per_search"] = conf.get("n_runs_per_search", 1)
if conf.get("n_search") == -1:
total_runs = compute_n_search(conf)
conf["n_search"] = total_runs // conf["n_runs_per_search"]
else:
total_runs = conf["n_runs_per_search"] * conf["n_search"]
if total_runs % conf["n_runs_per_search"] != 0:
raise RandomSearchError(
"n_search ({}) is not divisible by n_runs_per_epoch ({})".format(
total_runs, conf["n_runs_per_search"]
)
)
if "exp_name" in conf:
if "base_dir" in conf:
conf["base_dir"] = str(Path(conf["base_dir"]) / conf["exp_name"])
print(f"Running experiments in base_dir: {conf['base_dir']}")
else:
print(f"Ignoring 'exp_name' {conf['exp_name']} as no base_dir was provided")
print(f"Running {total_runs} scripts")
conf["now_str"] = now_str()
infra = conf.get("infra", "mila")
parallel_search = conf.get("parallel_search", False)
start_index = conf.get("start_index", 0)
template_str = load_template(infra)
use_tmpdir = conf.get("use_tmpdir", False)
outdir = None
dev = "dev" in conf and conf["dev"]
is_tune = conf.get("tune", False)
sampled_keys = [k for k, v in conf.items() if isinstance(v, dict) and "sample" in v]
sampled_str = "\n".join([f" {k}: {{{k}}}" for k in sampled_keys])
if is_tune and use_tmpdir:
raise RandomSearchError("Cannot use tune and $SLURM_TMPDIR together")
if use_tmpdir and not conf["outdir"]:
raise RandomSearchError(
"Using $SLURM_TPMDIR but no `outdir` has been specified"
)
home = os.environ["HOME"]
copy_dest = conf["outdir"] if "outdir" in conf else conf["base_dir"]
if not dev:
Path(copy_dest).mkdir(parents=True, exist_ok=True)
shutil.copy(exp_file_path, Path(copy_dest) / exp_file_path.name)
# run n_search jobs
printlines()
old_opts = set()
run_idx = start_index
for i in range(conf.get("n_search", 1)):
print("\nJOB", i)
ipcf, ipcb = None, None
# fill template
if infra == "mila":
job_str = fill_mila_template(template_str, conf)
elif infra == "beluga":
job_str = fill_beluga_template(template_str, conf)
elif infra == "intel":
job_str = fill_intel_template(template_str, conf)
else:
raise ValueError("Unknown infra " + str(infra))
# do n_runs_per_search simulations per job
for k in range(conf.get("n_runs_per_search", 1)):
skipped = False
opts = sample_search_conf(conf, run_idx)
opts = normalize(opts)
run_idx += 1
# rewrite APP_UPTAKE for non-tracing methods to avoid redundant experiments
if not is_app_based_tracing_intervention(opts['intervention']):
opts['APP_UPTAKE'] = -1
opts_str = json.dumps(opts, sort_keys=True, cls=NpEncoder)
if opts_str in old_opts:
print("\n Ran this job already ... skipping!")
skipped = True
continue
old_opts.add(opts_str)
extension = ""
# specify server frontend
tracing_dict = None
tracing_name = None
if isinstance(opts.get("intervention", ""), dict):
tracing_dict = first_value(opts["intervention"])
tracing_name = first_key(opts["intervention"])
use_transformer = (
tracing_dict is not None
and "weights" in tracing_dict
and tracing_dict["weights"]
)
use_server = use_transformer and opts.get("USE_INFERENCE_SERVER", False)
if use_transformer:
# -------------------------
# ----- Set Weights -----
# -------------------------
if "weights" not in opts:
weights_name = tracing_dict["weights"]
weights_name = weights_name.strip()
opts["weights"] = str(Path(opts["weights_dir"]) / weights_name)
if tracing_dict is not None:
# Create folder name extension based on keys in tracing_method dict
extensions = sorted(tracing_dict.items())
extension = "_" + "_".join(map(get_extension, extensions))
# Add tracing_method dict's keys and values to opts
for k, v in tracing_dict.items():
if k != "weights":
if k in opts:
print(
"Warning, overriding opts[{}]={} to opts[{}]={}".format(
k, opts[k], k, v
)
)
opts[k] = v
# set true tracing_method
opts["intervention"] = tracing_name
# -----------------------------------------------------
# ----- Inference Server / Transformer Exp Path -----
# -----------------------------------------------------
if use_server:
if ipcf is None:
ipcf, ipcb = ipc_addresses()
opts["ipc"] = {"frontend": ipcf, "backend": ipcb}
opts["INFERENCE_SERVER_ADDRESS"] = f'"{ipcf}"'
else:
if opts.get("USE_INFERENCE_SERVER") is not False:
opts["USE_INFERENCE_SERVER"] = False
if use_transformer:
opts["TRANSFORMER_EXP_PATH"] = opts["weights"]
# ----------------------------------------------
# ----- Set outdir from basedir (if any) -----
# ----------------------------------------------
if not opts.get("outdir"):
opts["outdir"] = Path(opts["base_dir"]).resolve()
opts["outdir"] = opts["outdir"] / (opts["intervention"] + extension)
opts["outdir"] = str(opts["outdir"])
opts["outdir"] = opts["outdir"]
# --------------------------------
# ----- Use SLURM_TMPDIR ? -----
# --------------------------------
if use_tmpdir:
outdir = str(opts["outdir"])
if not dev:
Path(outdir).resolve().mkdir(parents=True, exist_ok=True)
opts["outdir"] = "$SLURM_TMPDIR"
# overwrite intervention day if no_intervention
if opts["intervention"] == "no_intervention":
opts["INTERVENTION_DAY"] = -1
# convert params to string command-line args
exclude = RANDOM_SEARCH_SPECIFIC_PARAMS
if opts.get("normalization_folder"):
exclude.add("intervention")
hydra_args = get_hydra_args(opts, exclude)
# echo commandlines run in job
if not dev:
job_str += f"\necho 'python run.py {hydra_args}'\n"
command_suffix = "&\nsleep 5;\n" if parallel_search else ";\n"
# intel doesn't have a log file so let's make one
if infra == "intel":
job_out = Path(home) / "job_logs"
if not dev:
job_out.mkdir(exist_ok=True)
job_out = job_out / f"{now_str()}.out"
print("Job logs:", str(job_out))
command_suffix = f" &> {str(job_out)} {command_suffix}"
# append run command
job_str += "\n{}{}".format("python run.py" + hydra_args, command_suffix)
# sample next params
if skipped:
continue
# output in slurm_tmpdir and move zips to original outdir specified
if use_tmpdir and infra != "intel":
# data needs to be zipped for it to be transferred
assert opts["zip_outdir"]
job_str += f"\ncp $SLURM_TMPDIR/*.zip {outdir}"
# create temporary sbatch file
tmp = Path(tempfile.NamedTemporaryFile(suffix=".sh").name)
# give somewhat meaningful name to t
tmp = tmp.parent / (Path(opts.get("outdir", "")).name + "_" + tmp.name)
if not dev:
with tmp.open("w") as f:
f.write(job_str)
# sbatch or bash execution
if infra in {"beluga", "mila"}:
command = f"sbatch {str(tmp)}"
elif infra == "intel":
command = f"bash {str(tmp)}"
# dev-mode: don't actually run the command
if dev:
print("\n>>> ", command, end="\n\n")
print(str(tmp))
print("." * 50)
print(job_str)
else:
# not dev-mode: run it!
_ = subprocess.call(command.split(), cwd=home)
print("In", opts["outdir"])
print("With Sampled Params:")
print(sampled_str.format(**{k: opts.get(k) for k in sampled_keys}))
# prints
print()
printlines()
if __name__ == "__main__":
main()
|
11504084
|
import os
import time
import pytest
from geth.wrapper import spawn_geth
from geth.utils.dag import is_dag_generated
from geth.utils.timeout import (
Timeout,
)
@pytest.mark.skipif(
'TEST_DAG_WAIT' not in os.environ,
reason="'TEST_DAG_WAIT' environment variable is not set",
)
def test_waiting_for_dag_generation(base_dir):
assert not is_dag_generated(base_dir=base_dir)
command, proc = spawn_geth(dict(
data_dir=base_dir,
suffix_args=['makedag', '0', base_dir],
))
assert not is_dag_generated(base_dir=base_dir)
with Timeout(600) as timeout:
while True:
if is_dag_generated(base_dir=base_dir):
break
time(0.1)
timeout.check()
assert proc.poll() is not None
assert proc.returncode == 0
|
11504104
|
import config
import typing
from api.services import http
from urllib.parse import quote_plus
DISCORD_ENDPOINT = "https://discord.com/api"
SCOPES = ["identify"]
async def exchange_code(
*, code: str, scope: str, redirect_uri: str, grant_type: str = "authorization_code"
) -> typing.Tuple[dict, int]:
"""Exchange discord oauth code for access and refresh tokens."""
async with http.session.post(
"%s/v6/oauth2/token" % DISCORD_ENDPOINT,
data=dict(
code=code,
scope=scope,
grant_type=grant_type,
redirect_uri=redirect_uri,
client_id=config.discord_client_id(),
client_secret=config.discord_client_secret(),
),
headers={"Content-Type": "application/x-www-form-urlencoded"},
) as response:
return await response.json(), response.status
async def get_user(access_token: str) -> dict:
"""Coroutine to fetch User data from discord using the users `access_token`"""
async with http.session.get(
"%s/v6/users/@me" % DISCORD_ENDPOINT,
headers={"Authorization": "Bearer %s" % access_token},
) as response:
return await response.json()
def format_scopes(scopes: typing.List[str]) -> str:
"""Format a list of scopes."""
return " ".join(scopes)
def get_redirect(callback: str, scopes: typing.List[str]) -> str:
"""Generates the correct oauth link depending on our provided arguments."""
return (
"{BASE}/oauth2/authorize?response_type=code"
"&client_id={client_id}"
"&scope={scopes}"
"&redirect_uri={redirect_uri}"
"&prompt=consent"
).format(
BASE=DISCORD_ENDPOINT,
scopes=format_scopes(scopes),
redirect_uri=quote_plus(callback),
client_id=config.discord_client_id(),
)
|
11504125
|
from typing import Literal
from pydantic import BaseModel
class MsgShow(BaseModel):
title: bool = True
picID: bool = False
picWebUrl: bool = True
page: bool = True
author: bool = True
authorID: bool = False
authorWebUrl: bool = True
picOriginalUrl: bool = True
tags: bool = True
class ReplyMsg(BaseModel):
inputError: str = "要阿拉伯数字哦~"
notFound: str = "你的xp好奇怪啊"
tooMuch: str = "爪巴"
tooSmall: str = "¿"
closed: str = "没有,爪巴"
noR18: str = "没有,爪巴"
insufficient: str = "关于{tag}的图片只有{num}张"
freqLimit: str = "本群每{time}s能发{limitCount}张色图,已发{callDone}张,离刷新还有{r_time}s"
class Switch(BaseModel):
group: bool = True
temp: bool = True
class Count(BaseModel):
group: int
temp: int
class Freq(BaseModel):
limitCount: int = 10
refreshTime: int = 120
class API(BaseModel):
lolicon: bool = True
yuban: bool = True
pixiv: bool = True
class Setting(BaseModel):
setu: Switch = {"group": True, "temp": True}
api: API = API()
r18: Switch = {"group": False, "temp": True}
freq: Freq = Freq()
quality: Literal["original", "large", "medium"] = "large"
at: bool = False
sentRefreshTime: int = 600
singleMaximum: Count = {"group": 5, "temp": 10}
revokeTime: Count = {"group": 20, "temp": 0}
class GroupConfig(BaseModel):
admins: list = []
setting: Setting = Setting()
setuInfoShow: MsgShow = MsgShow()
replyMsg: ReplyMsg = ReplyMsg()
|
11504128
|
from framework.fuzzer.fuzzobjects import FuzzRequest
class PluginResult:
def __init__(self):
self.source = ""
self.issue = ""
class PluginRequest():
def __init__(self):
self.source = ""
self.request = None
self.rlevel = 0
@staticmethod
def from_fuzzRes(res, url, source):
fr = FuzzRequest.from_fuzzRes(res, str(url))
fr.wf_description = fr.path
fr.rlevel = res.rlevel + 1
plreq = PluginRequest()
plreq.source = source
plreq.request = fr
plreq.rlevel = res.rlevel + 1
return plreq
|
11504193
|
from pyrogram import filters
from pyrogram.errors import RPCError
from pyrogram.types import User
from app.config import conf
from app.utils import Client, Message
from app.utils.decorators import doc_args
@Client.on_message(filters.me & filters.command("mention", prefixes="."))
@doc_args("username.optional text")
async def mention_command(client: Client, message: Message):
"""
Generate mention link for a user by their username, as in some Telegram clients.
This command lets you specify an <b>optional</b> custom text for the link.
<b>For example:</b> `<code>.mention @username.Text That Would Appear Instead of User's Name</code>`.
"""
args = message.get_args(maximum=1)
if not args:
await message.edit_text(
"Pass the user you want to text-mention:\n<code>.mention @username.Optional text</code>",
message_ttl=conf.default_ttl
)
else:
mention_parts = args[0].split(".", maxsplit=1)
try:
user: User = await client.get_users(mention_parts[0])
text = None if len(mention_parts) == 1 else mention_parts[1]
link = user.mention(text)
entities = message.entities or message.caption_entities
if entities: # Check if there's any styled text in the message.text and apply it
for entity in entities:
if entity.type == "bold":
link = f"<b>{link}</b>"
elif entity.type == "italic":
link = f"<i>{link}</i>"
elif entity.type == "strike":
link = f"<s>{link}</s>"
await message.edit_text(link)
except RPCError:
await message.edit_text("Specified username is incorrect.", message_ttl=conf.default_ttl)
|
11504203
|
import tensorflow as tf
from examples.crystal_volume import optimize_crystal_volume as ocv
from tensorflow.keras import layers
from rlmolecule.crystal.builder import CrystalBuilder
from rlmolecule.crystal.crystal_state import CrystalState
from rlmolecule.crystal.preprocessor import CrystalPreprocessor
from rlmolecule.sql import Base, Session
from rlmolecule.sql.run_config import RunConfig
def policy_model_sequential(features: int = 64,
num_eles_and_stoich: int = 252,
num_crystal_sys: int = 7,
num_proto_strc: int = 4170,
) -> tf.keras.Model:
# crystal_sys_class = layers.Input(shape=[1], dtype=tf.int64, name='crystal_sys')
# proto_strc_class = layers.Input(shape=[1], dtype=tf.int64, name='proto_strc')
crystal_sys_model = tf.keras.models.Sequential()
crystal_sys_model.add(layers.Embedding(num_crystal_sys + 1), features, input_length=1)
crystal_sys_model.add(layers.Dense(4, activation='relu'))
# input_tensors = [element_class, crystal_sys_class, proto_strc_class]
input_tensors = [crystal_sys_class, proto_strc_class]
# element_embedding = layers.Embedding(
# num_eles_and_stoich, features, name='conducting_embedding')(element_class)
#
# elements_output = layers.Dense(features, activation='relu')(element_embedding)
# TODO don't need an embedding because the number of crystal systems is small(?). Just use a one-hot encoding
crystal_sys_embedding = layers.Embedding(
num_crystal_sys + 1, features, name='crystal_sys_embedding')(crystal_sys_class)
crystal_sys_output = layers.Dense(features, activation='relu')(crystal_sys_embedding)
crystal_sys_model = tf.keras.Model(crystal_sys_class, outputs=crystal_sys_output)
proto_strc_embedding = layers.Embedding(
num_proto_strc + 1, features, name='proto_strc_embedding')(proto_strc_class)
proto_strc_output = layers.Dense(features, activation='relu')(proto_strc_embedding)
proto_strc_model = tf.keras.Model(proto_strc_class, outputs=proto_strc_output)
# Merge all available features into a single large vector via concatenation
# x = layers.concatenate([elements_output, crystal_sys_output, proto_strc_output])
x = layers.concatenate([crystal_sys_model.output, proto_strc_model.output])
global_state = layers.Dense(features, activation='relu')(x)
output = layers.Dense(1)(global_state)
return tf.keras.Model(input_tensors, output, name='policy_model')
def policy_model(features: int = 64,
num_eles_and_stoich: int = 252,
num_crystal_sys: int = 7,
num_proto_strc: int = 4170,
) -> tf.keras.Model:
""" Constructs a policy model that predicts value, pi_logits from a batch of molecule inputs. Main model used in
policy training and loading weights
:param preprocessor: a MolPreprocessor class for initializing the embedding matrices
:param features: Size of network hidden layers
:return: The constructed policy model
"""
# Define inputs
# 5 conducting ions, 8 anions, 17 framework cations, up to 8 elements in a composition.
# conducting_ion_class = layers.Input(shape=[None], dtype=tf.int65, name='conducting_ion')
# anion_class = layers.Input(shape=[None], dtype=tf.int65, name='anion')
# framework_cation_class = layers.Input(shape=[None], dtype=tf.int65, name='framework_cation')
# I will include the elements by themselves, and the elements with a stoichiometry e.g., 'Cl', 'Cl6'
# TODO Many element stoichiometries are not present. For now I will just include all of them
element_class = layers.Input(shape=[10], dtype=tf.int64, name='eles_and_stoich')
# 7 crystal systems
crystal_sys_class = layers.Input(shape=[], dtype=tf.int64, name='crystal_sys')
# 4170 total prototype structures
proto_strc_class = layers.Input(shape=[], dtype=tf.int64, name='proto_strc')
input_tensors = [element_class, crystal_sys_class, proto_strc_class]
element_embedding = layers.Embedding(
input_dim=num_eles_and_stoich, output_dim=features,
input_length=None, name='conducting_embedding')(element_class)
print(element_embedding.shape)
element_embedding = layers.Lambda(lambda x: tf.keras.backend.sum(x, axis=-2, keepdims=True),
output_shape=lambda s: (s[-1],))(element_embedding)
print(element_embedding.shape)
element_embedding = layers.Reshape((features,))(element_embedding)
print(element_embedding.shape)
# embedding_dense = layers.Dense(features, activation='relu')(element_embedding)
# print(embedding_dense.shape)
crystal_sys_embedding = layers.Embedding(
input_dim=num_crystal_sys + 1, output_dim=features,
input_length=1, mask_zero=True, name='crystal_sys_embedding')(crystal_sys_class)
print(crystal_sys_embedding.shape)
proto_strc_embedding = layers.Embedding(
input_dim=num_proto_strc + 1, output_dim=features,
input_length=1, mask_zero=True, name='proto_strc_embedding')(proto_strc_class)
print(proto_strc_embedding.shape)
x = layers.concatenate([element_embedding, crystal_sys_embedding, proto_strc_embedding])
# x = np.sum()
# crystal_proto = layers.concatenate([crystal_sys_embedding, proto_strc_embedding])
# crystal_proto_dense = layers.Dense(features, activation='relu')(crystal_proto)
# max_pool = layers.GlobalMaxPooling1D()(crystal_proto_dense)
# x = layers.concatenate(element_embedding + [max_pool])
# elements_output = layers.Dense(features, activation='relu')(element_embedding)
# crystal_sys_output = layers.Dense(features, activation='relu')(crystal_sys_embedding)
# proto_strc_output = layers.Dense(features, activation='relu')(proto_strc_embedding)
# Merge all available features into a single large vector via concatenation
# x = layers.concatenate([elements_output, crystal_sys_output, proto_strc_output])
# x = layers.concatenate([crystal_sys_model.output, proto_strc_model.output])
global_state = layers.Dense(features, activation='relu')(x)
output = layers.Dense(1)(global_state)
return tf.keras.Model(input_tensors, output, name='policy_model')
def test_policy_model(features: int = 64,
num_eles_and_stoich: int = 252,
num_crystal_sys: int = 7,
num_proto_strc: int = 4170,
) -> tf.keras.Model:
""" Constructs a policy model that predicts value, pi_logits from a batch of molecule inputs. Main model used in
policy training and loading weights
:param preprocessor: a MolPreprocessor class for initializing the embedding matrices
:param features: Size of network hidden layers
:return: The constructed policy model
"""
# Define inputs
# 5 conducting ions, 8 anions, 17 framework cations, up to 8 elements in a composition.
# I will include the elements by themselves, and the elements with a stoichiometry e.g., 'Cl', 'Cl6'
# TODO Many element stoichiometries are not present. For now I will just include all of them
element_class = layers.Input(shape=[num_eles_and_stoich], dtype=tf.int64, name='eles_and_stoich')
# 7 crystal systems
crystal_sys_class = layers.Input(shape=[num_crystal_sys], dtype=tf.int64, name='crystal_sys')
# 4170 total prototype structures
proto_strc_class = layers.Input(shape=[num_proto_strc], dtype=tf.int64, name='proto_strc')
input_tensors = [element_class, crystal_sys_class, proto_strc_class]
# element_embedding = layers.Embedding(
# num_eles_and_stoich, features, name='conducting_embedding')(element_class)
# elements_output = layers.Dense(features, activation='relu')(element_embedding)
elements_output = layers.Dense(features // 3, activation='relu')(element_class)
## TODO don't need an embedding because the number of crystal systems is small(?). Just use a one-hot encoding
# crystal_sys_embedding = layers.Embedding(
# num_crystal_sys, features, name='crystal_sys_embedding')(crystal_sys_class)
# crystal_sys_output = layers.Dense(features, activation='relu')(crystal_sys_embedding)
crystal_sys_output = layers.Dense(features // 3, activation='relu')(crystal_sys_class)
# proto_strc_embedding = layers.Embedding(
# num_proto_strc, features, name='proto_strc_embedding')(proto_strc_class)
# proto_strc_output = layers.Dense(features, activation='relu')(proto_strc_embedding)
proto_strc_output = layers.Dense(features // 3, activation='relu')(proto_strc_class)
# Merge all available features into a single large vector via concatenation
x = layers.Concatenate(axis=0)([elements_output, crystal_sys_output, proto_strc_output])
global_state = layers.Dense(features, activation='relu')(x)
output = layers.Dense(1)(global_state)
return tf.keras.Model(input_tensors, output, name='policy_model')
run_config = RunConfig(None)
ocv.run_config = run_config
engine = run_config.start_engine()
Base.metadata.create_all(engine, checkfirst=True)
Session.configure(bind=engine)
session = Session()
ocv.engine = engine
# now try passing each of these through the policy model to see if they work
# problem = CrystalVolOptimizationProblem(engine)
problem = ocv.create_problem()
# model = policy_model()
model = problem.policy_model()
print(model.summary())
preprocessor = CrystalPreprocessor()
# this state will have elements, composition, crystal system, and structure
# print(model.predict(policy_inputs))
# problem = ocv.create_problem()
# builder = problem.builder
builder = CrystalBuilder()
root = 'root'
state = CrystalState(root)
# print(state.get_next_actions())
state = CrystalState('Li')
print(state)
policy_inputs = preprocessor.construct_feature_matrices(state)
print(policy_inputs)
print(model(policy_inputs))
while state.terminal is False:
next_actions = state.get_next_actions(builder)
state = next_actions[-1]
print(state)
policy_inputs = preprocessor.construct_feature_matrices(state)
print(policy_inputs)
print(model(policy_inputs))
|
11504206
|
from setuptools import setup
setup(
name='dnd.py',
version='1.0',
description='Do not distract yourself!',
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/papachristoumarios/dnd.py',
scripts=['dnd.py']
)
|
11504279
|
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
def parse_label_map(path_to_labels: str):
'''
- Arguments:
- path_to_labels (str): path to pbtx file
- Returns:
- dict of form { id(int) : label(str)}
'''
with open(path_to_labels, "r") as f:
text = f.read()
entry_pairs = []
a = text.find('item')
while a != -1:
b = text.find('id:', a)
b1 = text.find('\n', b)
index = int(text[b + len("id:"): b1])
c = text.find('display_name:', a)
c1 = max(text.find("\'", c), text.find('\"', c))
c2 = max(text.find("\'", c1 + 1), text.find('\"', c1 + 1))
klass_name = text[c1 + 1: c2]
entry_pairs.append((index, klass_name))
a = text.find('item', c)
return dict(entry_pairs)
|
11504321
|
import os
import subprocess
import sys
import click
from hatch.commands.utils import (
CONTEXT_SETTINGS, echo_failure, echo_info, echo_success, echo_waiting,
echo_warning
)
from hatch.config import get_proper_pip, get_venv_dir
from hatch.env import install_packages
from hatch.utils import (
NEED_SUBPROCESS_SHELL, ON_WINDOWS, get_admin_command, get_requirements_file,
is_project, venv_active
)
from hatch.venv import create_venv, is_venv, venv
@click.command(context_settings=CONTEXT_SETTINGS, short_help='Uninstalls packages')
@click.argument('packages', nargs=-1)
@click.option('-nd', '--no-detect', is_flag=True,
help=(
"Disables the use of a project's dedicated virtual env. "
'This is useful if you need to be in a project root but '
'wish to not target its virtual env.'
))
@click.option('-e', '--env', 'env_name', help='The named virtual env to use.')
@click.option('-g', '--global', 'global_uninstall', is_flag=True,
help=(
'Uninstalls globally, rather than on a per-user basis. This '
'has no effect if a virtual env is in use.'
))
@click.option('--admin', is_flag=True,
help=(
'When --global is selected, this assumes admin rights are '
'already enabled and therefore sudo/runas will not be used.'
))
@click.option('-d', '--dev', is_flag=True,
help='When locating a requirements file, only use the dev version.')
@click.option('-q', '--quiet', is_flag=True, help='Decreases verbosity.')
@click.option('-y', '--yes', is_flag=True,
help='Confirms the intent to uninstall without a prompt.')
def uninstall(packages, no_detect, env_name, global_uninstall, admin, dev, quiet, yes):
"""If the option --env is supplied, the uninstall will be applied using
that named virtual env. Unless the option --global is selected, the
uninstall will only affect the current user. Of course, this will have
no effect if a virtual env is in use. The desired name of the admin
user can be set with the `_DEFAULT_ADMIN_` environment variable.
With no packages selected, this will uninstall using a `requirements.txt`
or a dev version of that in the current directory.
If no --env is chosen, this will attempt to detect a project and use its
virtual env before resorting to the default pip. No project detection
will occur if a virtual env is active.
"""
if not packages:
reqs = get_requirements_file(os.getcwd(), dev=dev)
if not reqs:
echo_failure('Unable to locate a requirements file.')
sys.exit(1)
packages = ['-r', reqs]
# Windows' `runas` allows only a single argument for the
# command so we catch this case and turn our command into
# a string later.
windows_admin_command = None
if yes: # no cov
packages = ['-y', *packages]
if env_name:
venv_dir = os.path.join(get_venv_dir(), env_name)
if not os.path.exists(venv_dir):
echo_failure('Virtual env named `{}` does not exist.'.format(env_name))
sys.exit(1)
with venv(venv_dir):
command = [get_proper_pip(), 'uninstall', *packages] + (['-q'] if quiet else [])
echo_waiting('Uninstalling in virtual env `{}`...'.format(env_name))
result = subprocess.run(command, shell=NEED_SUBPROCESS_SHELL)
elif not venv_active() and not no_detect and is_project():
venv_dir = os.path.join(os.getcwd(), 'venv')
if not is_venv(venv_dir):
echo_info('A project has been detected!')
echo_waiting('Creating a dedicated virtual env... ', nl=False)
create_venv(venv_dir)
echo_success('complete!')
with venv(venv_dir):
echo_waiting('Installing this project in the virtual env... ', nl=False)
install_packages(['-q', '-e', '.'])
echo_success('complete!')
echo_warning('New virtual envs have nothing to uninstall, exiting...')
sys.exit(2)
with venv(venv_dir):
command = [get_proper_pip(), 'uninstall', *packages] + (['-q'] if quiet else [])
echo_waiting('Uninstalling for this project...')
result = subprocess.run(command, shell=NEED_SUBPROCESS_SHELL)
else:
command = [get_proper_pip(), 'uninstall'] + (['-q'] if quiet else [])
if not venv_active() and global_uninstall: # no cov
if not admin:
if ON_WINDOWS:
windows_admin_command = get_admin_command()
else:
command = get_admin_command() + command
command.extend(packages)
if windows_admin_command: # no cov
command = windows_admin_command + [' '.join(command)]
echo_waiting('Uninstalling...')
result = subprocess.run(command, shell=NEED_SUBPROCESS_SHELL)
sys.exit(result.returncode)
|
11504334
|
from termpixels.util import corners_to_box
from termpixels.util import set_ambiguous_is_wide
from termpixels.util import terminal_char_len
from termpixels.util import terminal_len
from termpixels.util import terminal_printable
from termpixels.util import splitlines_print
from termpixels.util import wrap_text
from unicodedata import east_asian_width
import pytest
# some characters with known properties
control_char = "\b"
narrow_char = "A"
wide_char = "ᄀ"
ambiguous_char = "§"
assert east_asian_width(narrow_char) == "Na"
assert east_asian_width(wide_char) == "W"
assert east_asian_width(ambiguous_char) == "A"
def test_corners_to_box():
assert corners_to_box(0, 0, 0, 0) == (0, 0, 1, 1)
assert corners_to_box(0, 0, 1, 1) == (0, 0, 2, 2)
assert corners_to_box(1, 1, 0, 0) == (0, 0, 2, 2)
assert corners_to_box(5, 6, 7, 8) == (5, 6, 3, 3)
assert corners_to_box(3, 3, -3, -3) == (-3, -3, 7, 7)
def test_terminal_char_len_narrow():
assert terminal_char_len(narrow_char) == 1
def test_terminal_char_len_wide():
assert terminal_char_len(wide_char) == 2
def test_terminal_char_len_control():
assert terminal_char_len(control_char) == 0
def test_terminal_char_len_tab():
# no way to tell tab width without context
assert terminal_char_len("\t") == None
def test_terminal_char_len_ambiguous():
set_ambiguous_is_wide(False)
assert terminal_char_len(ambiguous_char) == 1
set_ambiguous_is_wide(True)
assert terminal_char_len(ambiguous_char) == 2
def test_terminal_len():
test_string = "你好 - Hello"
assert terminal_len(test_string) == sum(terminal_char_len(i) for i in test_string)
def test_terminal_printable():
assert terminal_printable(narrow_char)
assert terminal_printable(wide_char)
def test_terminal_printable_control():
assert not terminal_printable(control_char)
def test_splitlines_print_unix():
assert splitlines_print("a\nb") == ["a", "b"]
assert splitlines_print("a\rb") == ["a", "b"]
def test_splitlines_print_windows():
assert splitlines_print("a\r\nb") == ["a", "b"]
def test_splitlines_print_empty_lines():
assert splitlines_print("a\n\nb") == ["a", "", "b"]
assert splitlines_print("a\n\r\n\nb") == ["a","", "", "b"]
def test_wrap_text():
assert wrap_text("a b c d e", 3) == "a b\nc d\ne"
assert wrap_text("alpha beta", 6) == "alpha \nbeta"
def test_wrap_text_fullwidth():
assert wrap_text("你好", 2) == "你\n好"
|
11504348
|
import sys
from imutils import paths
import cv2
from PIL import Image
sys.path.insert(0, '/home/ihab/ihabgit/zevision')
import lib.util as predict
imagePaths = list(paths.list_images("db_test"))
for image in imagePaths:
response = predict.recognize_objects(image)
print(response)
print("\n\n\n\n")
# test_image = Image.open(image)
test_image = cv2.imread(image)
test_image = predict.draw_object_boxes(test_image,response)
# result_image = Image.fromarray(test_image)
# result_image.save("results/"+image.split('/')[-1])
predict.save_image(image,test_image,"results/")
|
11504397
|
import argparse
import time
from django.core.management import BaseCommand
from django.conf import settings
import botocore
import boto3
from collections import deque
from voter.utils import process_new_zip, out
s3client = boto3.client('s3')
s3client.meta.events.register('choose-signer.s3.*', botocore.handlers.disable_signing)
class Command(BaseCommand):
help = """Fetch historical snapshots of voter data from NCSBE.gov
E.g.
no arg: download all available files that we have not already downloaded,
then exit.
--loop=N: download all available files that we have not already downloaded,
then wait N minutes and start over.
"""
def add_arguments(self, parser):
# Don't rewrap the text in the help/description:
parser.formatter_class = argparse.RawDescriptionHelpFormatter
parser.add_argument(
'--loop', action='store', type=int, default=0,
help='After downloading, wait this many minutes and start over. Default is to stop after downloading.'
)
parser.add_argument(
'--all', action='store_true', default=False,
help='Download all available files. Default is to just download the first one that we have not'
'already downloaded.'
)
parser.add_argument(
'--quiet',
action='store_true',
dest='quiet',
help='Do not output updates or progress while running',
)
def handle(self, *args, **options):
output = not options.get('quiet')
out("Fetching voter files...", output)
while True:
objects = s3client.list_objects(Bucket='dl.ncsbe.gov', Prefix='data/Snapshots/')
filename_list = []
for i in objects['Contents']:
filename = i['Key'].split('/')[-1]
ok = filename.endswith('.zip')
if ok:
filename_list.append(filename)
filename_list = sorted(filename_list)
snapshots = deque()
for l in filename_list:
out(l, output)
snapshots.append(settings.NCVOTER_HISTORICAL_SNAPSHOT_URL + l.strip())
while len(snapshots) > 0:
url = snapshots.popleft()
process_new_zip(url, settings.NCVOTER_DOWNLOAD_PATH, "ncvoter", output=output)
if not options['loop']:
break
else: # pragma: no cover (infinite loop)
minutes = options['loop']
out("Sleep %d minutes..." % minutes, output)
time.sleep(60 * minutes)
|
11504480
|
import subprocess
import pandas as pd
primer_table = pd.read_csv(snakemake.input.primer_t, index_col="Probe",
na_filter=False).to_dict("index")
if not snakemake.params.bar_removed:
r1_barcode = primer_table[snakemake.wildcards.sample + "_"
+ snakemake.wildcards.unit]["Barcode_forward"]
r2_barcode = primer_table[snakemake.wildcards.sample + "_"
+ snakemake.wildcards.unit]["Barcode_reverse"]
r1_primer = primer_table[snakemake.wildcards.sample + "_"
+ snakemake.wildcards.unit]["specific_forward_primer"]
r2_primer = primer_table[snakemake.wildcards.sample + "_"
+ snakemake.wildcards.unit]["specific_reverse_primer"]
logfile = open(str(snakemake.log), "w")
if snakemake.params.paired_end:
if snakemake.params.prim_rm:
subprocess.call(["mv", snakemake.input[0], snakemake.output[0]])
subprocess.call(["mv", snakemake.input[1], snakemake.output[1]])
elif not snakemake.params.bar_removed:
subprocess.call(["cutadapt","-j 0", "-m", str(snakemake.params.minlen), "-M", str(snakemake.params.maxlen),
"-g", r1_primer + r1_barcode, "-G", r2_primer + r2_barcode, "-o",
snakemake.output[0], "-p", snakemake.output[1], snakemake.input[0], snakemake.input[1]], stdout=logfile)
else:
subprocess.call(["cutadapt", "-j 0", "-m", str(snakemake.params.minlen), "-M", str(snakemake.params.maxlen),
"-g", r1_primer, "-G", r2_primer, "-o", snakemake.output[0], "-p",
snakemake.output[1], snakemake.input[0], snakemake.input[1]], stdout=logfile)
else:
if snakemake.params.prim_rm:
subprocess.call(["mv", snakemake.input[0], snakemake.output[0]])
elif not snakemake.params.bar_removed:
subprocess.call(["cutadapt", "-j 0", "-m", str(snakemake.params.minlen), "-M", str(snakemake.params.maxlen),
"-g", r1_primer + r1_barcode, "-o", snakemake.output[0], snakemake.input[0]], stdout=logfile)
else:
subprocess.call(["cutadapt", "-j 0", "-m", str(snakemake.params.minlen), "-M", str(snakemake.params.maxlen),
"-g", r1_primer, "-o", snakemake.output[0], snakemake.input[0]], stdout=logfile)
logfile.close()
|
11504512
|
import re
class TestLang:
def __init__(self, test=""):
self.test = test
def parse(self):
for token in self.test.split(' '):
letters, numbers = re.match('([A-Za-z]*)([0-9]*)', token).groups()
if letters+numbers != token:
raise Exception("Bad token: %s" % token)
if numbers != '':
numbers = int(numbers)
|
11504519
|
import cv2
import xml.etree.ElementTree as ET
from GeoPointCloud import GeoPointCloud
import json
import math
import numpy as np
import overpy
import time
import tgc_definitions
status_print_duration = 1.0 # Print progress every N seconds
spline_configuration = None
# Returns left, top, right, bottom
def nodeBoundingBox(nds):
X = [nd[0] for nd in nds]
#Y = [nd[1] for nd in nds]
Z = [nd[2] for nd in nds]
return (min(X), max(Z), max(X), min(Z))
def shapeCenter(nds):
bb = nodeBoundingBox(nds)
return ((bb[0] + bb[2])/2.0, (bb[1]+bb[3])/2.0)
def getwaypoint(easting, vertical, northing):
output = json.loads('{"pointOne": {"x": 0.0,"y": 0.0},"pointTwo": {"x": 0.0,"y": 0.0},"waypoint": {"x": 0.0,"y": 0.0} }')
output["waypoint"]["x"] = easting
output["waypoint"]["y"] = northing
return output
def getwaypoint3D(x, y, z):
wp = json.loads('{"x": 0.0,"y": 0.0,"z": 0.0}')
wp["x"] = x
wp["y"] = y
wp["z"] = z
return wp
def getTangentAngle(previous_point, next_point):
return math.atan2(float(next_point["y"])-float(previous_point["y"]), float(next_point["x"])-float(previous_point["x"]))
def completeSpline(points, spline_json, handleLength=1.0, is_clockwise=True, tightSplines=True):
number_points = len(spline_json["waypoints"])
for i in range(0, number_points):
prev_index = i - 1 # Works for negative
next_index = i + 1
if next_index == number_points:
next_index = 0
p = spline_json["waypoints"][prev_index]["waypoint"]
t = spline_json["waypoints"][i]["waypoint"]
n = spline_json["waypoints"][next_index]["waypoint"]
# Just guessing what these points are and if they are important
# Set point one and point two to be on the line between the previous and next point, but centered on this point
angle = getTangentAngle(p, n)
if tightSplines:
# Pull the spline handles perpendicular and inside the shape in order to accurately
# represent the shapes downloaded online. Don't want a lot of expansion or smoothing
angle_one = angle - 1.1 * math.pi / 2.0
angle_two = angle - 0.9 * math.pi / 2.0
# Clockwise splines appear to point inward by default, this is what we want
if not is_clockwise:
# Flip handles inwards
angle_temp = angle_one
angle_one = angle_two + math.pi
angle_two = angle_temp + math.pi
else:
# Loose, smooth splines
angle_one = angle + math.pi
angle_two = angle
# TODO Use angle to center to guarantee these point inwards? I see them pointing out sometimes
spline_json["waypoints"][i]["pointOne"]["x"] = t["x"] + handleLength * math.cos(angle_one)
spline_json["waypoints"][i]["pointOne"]["y"] = t["y"] + handleLength * math.sin(angle_one)
spline_json["waypoints"][i]["pointTwo"]["x"] = t["x"] + handleLength * math.cos(angle_two)
spline_json["waypoints"][i]["pointTwo"]["y"] = t["y"] + handleLength * math.sin(angle_two)
def splineIsClockWise(spline_json):
# https://stackoverflow.com/questions/1165647/how-to-determine-if-a-list-of-polygon-points-are-in-clockwise-order
points = spline_json["waypoints"]
edge_sum = 0.0
for i in range(0, len(points)):
edge_sum += (points[i]["waypoint"]["x"]-points[i-1]["waypoint"]["x"])*(points[i]["waypoint"]["y"]+points[i-1]["waypoint"]["y"])
return edge_sum >= 0.0
def shrinkSplineNormals(spline_json, shrink_distance=1.0, is_clockwise=True):
if not shrink_distance:
return spline_json
number_points = len(spline_json["waypoints"])
for i in range(0, number_points):
prev_index = i - 1 # Works for negative
next_index = i + 1
if next_index == number_points:
next_index = 0
p = spline_json["waypoints"][prev_index]["waypoint"]
t = spline_json["waypoints"][i]["waypoint"]
n = spline_json["waypoints"][next_index]["waypoint"]
tangent_angle = getTangentAngle(p, n)
# Move the spline points along the normal to the inside of the shape
# Since the game expands splines by a fixed amount, we need to shrink the shape by a set amount
normal_angle = tangent_angle - math.pi/2.0
# Clockwise splines appear to point inward by default, this is what we want
if not is_clockwise:
# Flip normal inwards
normal_angle = normal_angle + math.pi
# Now shift the spline point by shrink_distance in the direction of normal_angle
t["x"] += math.cos(normal_angle)*shrink_distance
t["y"] += math.sin(normal_angle)*shrink_distance
return spline_json
def newSpline(points, pathWidth=0.01, shrink_distance=None, handleLength=0.5, tightSplines=True, secondarySurface="", secondaryWidth=0.0, spline_json=None):
spline = json.loads('{"surface": 1, \
"secondarySurface": 11, \
"secondaryWidth": -1.0, \
"waypoints": [], \
"width": 0.01, \
"state": 3, \
"ClosedPath": false, \
"isClosed": true, \
"isFilled": true \
}')
try:
if spline_json is not None:
pathWidth = spline_json.get("pathWidth", pathWidth)
handleLength = spline_json.get("handleLength", handleLength)
tightSplines = spline_json.get("tightSplines", tightSplines)
secondarySurface = spline_json.get("secondarySurface", secondarySurface)
secondaryWidth = spline_json.get("secondaryWidth", secondaryWidth)
except:
print("Invalid Spline configuration: " + str(spline_json))
spline["width"] = pathWidth
spline["secondarySurface"] = tgc_definitions.featuresToSurfaces.get(secondarySurface, 11)
spline["secondaryWidth"] = secondaryWidth
for p in points:
spline["waypoints"].append(getwaypoint(*p))
# Determine direction of spline
is_clockwise = splineIsClockWise(spline)
# Reduce spline normal distance (move points inwards) by half of width
# This compensates for the game treating all splines like filled cartpaths
if shrink_distance is None:
shrink_distance = pathWidth/2.0
spline = shrinkSplineNormals(spline, shrink_distance=shrink_distance, is_clockwise=is_clockwise)
# Now that spline is shrunk, set the handles according to the properties we want
completeSpline(points, spline, handleLength=handleLength, is_clockwise=is_clockwise, tightSplines=tightSplines)
return spline
def newBunker(points):
global spline_configuration
spline_json = None
if spline_configuration is not None:
spline_json = spline_configuration.get("bunker", None)
# Very tight shaped to make complex curves
bunker = newSpline(points, pathWidth=0.01, handleLength=1.0, tightSplines=True, secondarySurface="heavyrough", secondaryWidth=2.5, spline_json=spline_json)
bunker["surface"] = tgc_definitions.featuresToSurfaces["bunker"]
return bunker
def newGreen(points):
global spline_configuration
spline_json = None
if spline_configuration is not None:
spline_json = spline_configuration.get("green", None)
green = newSpline(points, pathWidth = 1.7, handleLength=0.2, tightSplines=True, secondarySurface="heavyrough", secondaryWidth=2.5, spline_json=spline_json)
green["surface"] = tgc_definitions.featuresToSurfaces["green"]
return green
def newTeeBox(points):
global spline_configuration
spline_json = None
if spline_configuration is not None:
spline_json = spline_configuration.get("teebox", None)
teebox = newSpline(points, pathWidth = 1.7, handleLength=0.2, tightSplines=True, secondarySurface="heavyrough", secondaryWidth=2.5, spline_json=spline_json)
teebox["surface"] = tgc_definitions.featuresToSurfaces["green"]
return teebox
def newFairway(points):
global spline_configuration
spline_json = None
if spline_configuration is not None:
spline_json = spline_configuration.get("fairway", None)
fw = newSpline(points, pathWidth = 3.0, handleLength=3.0, tightSplines=False, secondarySurface="rough", secondaryWidth=5.0, spline_json=spline_json)
fw["surface"] = tgc_definitions.featuresToSurfaces["fairway"]
return fw
def newRough(points):
global spline_configuration
spline_json = None
if spline_configuration is not None:
spline_json = spline_configuration.get("rough", None)
rh = newSpline(points, pathWidth = 1.7, handleLength=3.0, tightSplines=False, secondarySurface="", secondaryWidth=0.0, spline_json=spline_json)
# Game outputs secondary as 1
# Remove with 0 width
rh["surface"] = tgc_definitions.featuresToSurfaces["rough"]
return rh
def newHeavyRough(points):
global spline_configuration
spline_json = None
if spline_configuration is not None:
spline_json = spline_configuration.get("heavyrough", None)
hr = newSpline(points, pathWidth = 1.7, handleLength=3.0, tightSplines=False, secondarySurface="", secondaryWidth=0.0, spline_json=spline_json)
# Game outputs secondary as 1
# Remove with 0 width
hr["surface"] = tgc_definitions.featuresToSurfaces["heavyrough"]
return hr
def newCartPath(points, area=False):
global spline_configuration
spline_json = None
if spline_configuration is not None:
spline_json = spline_configuration.get("cartpath", None)
pathWidth = 2.0
shrink_distance = 0.0
if area:
shrink_distance = None # Automatic shrink_distance
cp = newSpline(points, pathWidth=pathWidth, shrink_distance=shrink_distance, handleLength=4.0, tightSplines=False, secondarySurface="", secondaryWidth=0.0, spline_json=spline_json) # Smooth a lot
# Cartpath is surface 10 (this is the one with Cartpath logo in Designer)
# Remove secondary with 0 width
cp["surface"] = tgc_definitions.featuresToSurfaces["cartpath"] # Cartpath, Surface #3
# 0 is 'not closed' and 3 is 'closed and filled' maybe a bitmask?
if area:
cp["state"] = 3
cp["isClosed"] = True
cp["isFilled"] = True
else:
cp["state"] = 0 # Todo figure out what this means
cp["isClosed"] = False
cp["isFilled"] = False
return cp
def newWalkingPath(points, area=False):
global spline_configuration
spline_json = None
if spline_configuration is not None:
spline_json = spline_configuration.get("walkingpath", None)
# Minimum width that will render in meters
pathWidth = 1.7
shrink_distance = 0.0
if area:
shrink_distance = None # Automatic shrink_distance
wp = newSpline(points, pathWidth=pathWidth, shrink_distance=shrink_distance, handleLength=2.0, tightSplines=False, secondarySurface="rough", secondaryWidth=0.0, spline_json=spline_json)
# Make walking paths Surface #1 for visibility
# User can switch to green/fairway/rough depending on taste
# Remove secondary with 0 width
wp["surface"] = tgc_definitions.featuresToSurfaces["surface1"]
if area:
wp["state"] = 3
wp["isClosed"] = True
wp["isFilled"] = True
else:
wp["state"] = 0 # Todo figure out what this means
wp["isClosed"] = False
wp["isFilled"] = False
return wp
def newWaterHazard(points, area=True):
global spline_configuration
spline_json = None
if spline_configuration is not None:
spline_json = spline_configuration.get("placeholder", None)
# Add placeholder for water hazard.
# Add spline and fill with black mulch
if area:
# No width, only very detailed fill shape
wh = newSpline(points, pathWidth = 0.01, handleLength=0.2, tightSplines=True, secondarySurface="", secondaryWidth=0.0, spline_json=spline_json)
else:
# Make smooth creek or waterway
wh = newSpline(points, pathWidth=2.0, shrink_distance=0.0, tightSplines=False, secondarySurface="", secondaryWidth=0.0, spline_json=None)
# Fill as mulch/surface #2 as a placeholder
wh["surface"] = tgc_definitions.featuresToSurfaces["surface2"]
if area:
wh["state"] = 3
wh["isClosed"] = True
wh["isFilled"] = True
else:
wh["state"] = 0 # Todo figure out what this means
wh["isClosed"] = False
wh["isFilled"] = False
return wh
def newBuilding(points):
global spline_configuration
spline_json = None
if spline_configuration is not None:
spline_json = spline_configuration.get("placeholder", None)
# Add placeholder for buildings
# Add spline and fill with gravel
# No width, only very detailed fill shape
b = newSpline(points, pathWidth = 0.01, handleLength=0.2, tightSplines=True, secondarySurface="", secondaryWidth=0.0, spline_json=spline_json)
# Fill as a placeholder
b["surface"] = tgc_definitions.featuresToSurfaces["surface1"]
return b
def newForest(points):
global spline_configuration
spline_json = None
if spline_configuration is not None:
spline_json = spline_configuration.get("placeholder", None)
# Add placeholder spline for naturaL:wood in OSM
# Add spline and fill with gravel
# No width, only very detailed fill shape
f = newSpline(points, pathWidth = 0.01, handleLength=0.2, tightSplines=True, secondarySurface="", secondaryWidth=0.0, spline_json=spline_json)
# Fill as a placeholder
f["surface"] = tgc_definitions.featuresToSurfaces["surface1"]
return f
def newTree(point):
# Just set radius and height to be generic values
return (point[0], point[2], 7.0, 10.0)
def addHalfwayPoint(points):
first = points[0]
last = points[-1]
new_point = ((first[0] + last[0])/2.0, (first[1]+last[1])/2.0, (first[2]+last[2])/2.0)
return (first, new_point, last)
def newHole(userpar, points):
hole = json.loads('{"waypoints": [], "teePositions": [],"pinPositions": [{"x": 0.0,"y": 0.0,"z": 0.0}],"greenRadius": 0.0,"teeRadius": 0.0,"fairwayRadius": 0.0, \
"fairwayStart": 0.0,"fairwayEnd": 0.0,"fairwayNoiseScale": -1.0,"roughRadius": 0.0,"heavyRoughRadius": 0.0,"hazardGreenCount": 0.0,"hazardFairwayCount": 0.0, \
"hazardFairwayPeriod": -1.0,"teeHeight": -1.0, "greenSeed": 206208328, "fairwaySeed": 351286870,"teeTexture": -1, \
"creatorDefinedPar": -1, "name": "","flagOffset": {"x": 0.0,"y": 0.0},"par": 4}')
hole["creatorDefinedPar"] = userpar
if len(points) < 2: # Minimum needed points
return None
elif len(points) == 2: # Autogenerated courses put the waypoint halfway between teebox and green.
points = addHalfwayPoint(points)
elif len(points) > 3: # Game only supports start point, waypoint, and endpoint
points = [points[0], points[1], points[-1]]
for p in points:
hole["waypoints"].append(getwaypoint3D(p[0], 0.0, p[2]))
hole["teePositions"].append(getwaypoint3D(points[0][0], 0.0, points[0][2]))
return hole
def getOSMData(bottom_lat, left_lon, top_lat, right_lon, printf=print):
op = overpy.Overpass()
# Order is South, West, North, East
coord_string = str(bottom_lat) + "," + str(left_lon) + "," + str(top_lat) + "," + str(right_lon)
try:
query = "(node(" + coord_string + ");way(" + coord_string + "););out;"
printf("OpenStreetMap Overpass query: " + query)
return op.query(query) # Request both nodes and ways for the region of interest using a union
except overpy.exception.OverPyException:
printf("OpenStreetMap servers are too busy right now. Try running this tool later.")
return None
def clearFeatures(course_json):
# Clear splines? Make this optional
course_json["surfaceSplines"] = []
# Game will crash if more than 18 holes found, so always clear holes
course_json["holes"] = []
return course_json
def addOSMToTGC(course_json, geopointcloud, osm_result, x_offset=0.0, y_offset=0.0, options_dict={}, spline_configuration_json=None, printf=print):
global spline_configuration
# Ways represent features composed of many lat/long points (nodes)
# We can convert these directly into the game's splines
spline_configuration = spline_configuration_json
# Get terrain bounding box
ul_enu = geopointcloud.ulENU()
lr_enu = geopointcloud.lrENU()
ul_tgc = geopointcloud.enuToTGC(*ul_enu, 0.0)
lr_tgc = geopointcloud.enuToTGC(*lr_enu, 0.0)
course_json = clearFeatures(course_json)
hole_dictionary = dict() # Holes must be ordered by hole_num. Must keep track of return order just in case data doesn't have hole number
num_ways = len(osm_result.ways)
last_print_time = time.time()
for n, way in enumerate(osm_result.ways):
if time.time() > last_print_time + status_print_duration:
last_print_time = time.time()
printf(str(round(100.0*float(n) / num_ways, 2)) + "% through OpenStreetMap Ways")
golf_type = way.tags.get("golf", None)
waterway_type = way.tags.get("waterway", None)
building_type = way.tags.get("building", None)
natural_type = way.tags.get("natural", None)
# Double checking types, but things REALLY slow down if we do the necessary bounding box checks without checking if it's a type we even care about
if all(v is None for v in [golf_type, waterway_type, building_type, natural_type]):
continue
area = False
try:
area = "yes" == way.tags.get("area", None)
except:
pass
# Get the shape of this way
nds = []
try:
for node in way.get_nodes(resolve_missing=True): # Allow automatically resolving missing nodes, but this is VERY slow with the API requests, try to request beforehand
nds.append(geopointcloud.latlonToTGC(node.lat, node.lon, x_offset, y_offset))
except overpy.exception.OverPyException:
printf("OpenStreetMap servers are too busy right now. Try running this tool later.")
return []
# Check this shapes bounding box against the limits of the terrain, don't draw outside this bounds
# Left, Top, Right, Bottom
nbb = nodeBoundingBox(nds)
if nbb[0] < ul_tgc[0] or nbb[1] > ul_tgc[2] or nbb[2] > lr_tgc[0] or nbb[3] < lr_tgc[2]:
# Off of map, skip
continue
if golf_type is not None:
if golf_type == "green" and options_dict.get('green', True):
course_json["surfaceSplines"].append(newGreen(nds))
elif golf_type == "bunker" and options_dict.get('bunker', True):
course_json["surfaceSplines"].append(newBunker(nds))
elif golf_type == "tee" and options_dict.get('teebox', True):
course_json["surfaceSplines"].append(newTeeBox(nds))
elif golf_type == "fairway" and options_dict.get('fairway', True):
course_json["surfaceSplines"].append(newFairway(nds))
elif golf_type == "driving_range" and options_dict.get('range', True):
# Add as fairway
course_json["surfaceSplines"].append(newFairway(nds))
elif golf_type == "rough" and options_dict.get('rough', True):
course_json["surfaceSplines"].append(newRough(nds))
elif (golf_type == "water_hazard" or golf_type == "lateral_water_hazard") and options_dict.get('water', True):
course_json["surfaceSplines"].append(newWaterHazard(nds, area=True))
elif golf_type == "cartpath" and options_dict.get('cartpath', True):
course_json["surfaceSplines"].append(newCartPath(nds, area=area))
elif golf_type == "path" and options_dict.get('path', True):
course_json["surfaceSplines"].append(newWalkingPath(nds, area=area))
elif golf_type == "clubhouse" and options_dict.get('building', True):
course_json["surfaceSplines"].append(newBuilding(nds))
elif golf_type == "hole" and options_dict.get('hole', True):
# Only add holes for the course we're interested in
name_filter = options_dict.get('hole_name_filter', None)
hole_name = way.tags.get("name", "")
if name_filter is not None:
if name_filter.lower() not in hole_name.lower():
if hole_name:
printf("Skipping Hole with Name: " + hole_name)
else:
printf("Skipping Unnamed Hole")
continue
try:
par = int(way.tags.get("par", -1))
hole_num = int(way.tags.get("ref", -1))
except:
printf("ERROR: There is an invalid character saved to OpenStreetMap for par or hole number: " + str(way.tags))
par = -1
hole_num = -1
hole = newHole(par, nds)
if hole is not None:
if hole_num == 0:
hole_num = len(hole_dictionary) + 1
hole_dictionary[hole_num] = hole
else:
printf("Skipping: " + golf_type)
elif waterway_type is not None:
# Draw these as water hazards no matter what subtype they are
if options_dict.get('water', True):
course_json["surfaceSplines"].append(newWaterHazard(nds, area=area))
elif building_type is not None:
# Draw these as buildings no matter what subtype they are
if options_dict.get('building', True):
course_json["surfaceSplines"].append(newBuilding(nds))
elif natural_type is not None:
if natural_type == "wood" and options_dict.get('tree', True):
course_json["surfaceSplines"].append(newForest(nds))
# Insert all the found holes
for key in sorted(hole_dictionary):
course_json["holes"].append(hole_dictionary[key])
trees = [] # Trees must be dealt with differently, and are passed up to a higher level. Tree format is (x, z, radius, height)
if options_dict.get('tree', False): # Trees are currently the only node right now. This takes a lot of time to loop through, so skip if possible
if not options_dict.get('lidar_trees', False):
num_nodes = len(osm_result.nodes)
last_print_time = time.time()
for n, node in enumerate(osm_result.nodes):
if time.time() > last_print_time + status_print_duration:
last_print_time = time.time()
printf(str(round(100.0*float(n) / num_nodes, 2)) + "% done looking for OpenStreetMap Trees")
natural_type = node.tags.get("natural", None)
if natural_type == "tree":
nd = geopointcloud.latlonToTGC(node.lat, node.lon, x_offset, y_offset)
# Check this shapes bounding box against the limits of the terrain, don't draw outside this bounds
# Left, Top, Right, Bottom
nbb = nodeBoundingBox([nd])
if nbb[0] < ul_tgc[0] or nbb[1] > ul_tgc[2] or nbb[2] > lr_tgc[0] or nbb[3] < lr_tgc[2]:
# Off of map, skip
continue
trees.append(newTree(nd))
else:
printf("Lidar trees requested: not adding trees from OpenStreetMap")
# Return the tree list for later use
return trees
def addOSMFromXML(course_json, xml_data, options_dict={}, printf=print):
printf("Adding OpenStreetMap from XML")
op = overpy.Overpass()
result = op.parse_xml(xml_data)
printf("Determining the UTM Geo Projection for this area")
# Find the lat and lon bounding box from the XML directly
# Can't find the query bounds in overpy
root = ET.fromstring(xml_data)
for bounds in root.iter('bounds'):
latmin = float(bounds.get('minlat'))
latmax = float(bounds.get('maxlat'))
lonmin = float(bounds.get('minlon'))
lonmax = float(bounds.get('maxlon'))
break
# Create a basic geopointcloud to handle this projection
pc = GeoPointCloud()
pc.addFromLatLon((latmin, lonmin), (latmax, lonmax), printf=printf)
trees = addOSMToTGC(course_json, pc, result, x_offset=float(options_dict.get('adjust_ew', 0.0)), y_offset=float(options_dict.get('adjust_ns', 0.0)), \
options_dict=options_dict, printf=printf)
return course_json, trees
def drawWayOnImage(way, color, im, pc, image_scale, thickness=-1, x_offset=0.0, y_offset=0.0):
# Get the shape of this way and draw it as a poly
nds = []
for node in way.get_nodes(resolve_missing=True): # Allow automatically resolving missing nodes, but this is VERY slow with the API requests, try to request them above instead
nds.append(pc.latlonToCV2(node.lat, node.lon, image_scale, x_offset, y_offset))
# Uses points and not image pixels, so flip the x and y
nds = np.array(nds)
nds[:,[0, 1]] = nds[:,[1, 0]]
nds = np.int32([nds]) # Bug with fillPoly, needs explict cast to 32bit
cv2.fillPoly(im, nds, color)
# Add option to draw shape again, but with thick line
# Use this to automatically expand some shapes, for example water
# For better masking
if thickness > 0:
# Need to draw again since fillPoly has no line thickness options that I've found
cv2.polylines(im, nds, True, color, thickness, lineType=cv2.LINE_AA)
def addOSMToImage(ways, im, pc, image_scale, x_offset=0.0, y_offset=0.0, printf=print):
for way in ways:
golf_type = way.tags.get("golf", None)
thickness = -1
if golf_type is not None:
# Default to green
color = (0, 0.75, 0.2)
if golf_type == "green":
color = (0, 1.0, 0.2)
elif golf_type == "tee":
color = (0, 0.8, 0)
elif golf_type == "water_hazard" or golf_type == "lateral_water_hazard":
color = (0, 0, 1.0)
elif golf_type == "fairway":
color = color
else:
continue
drawWayOnImage(way, color, im, pc, image_scale, thickness, x_offset, y_offset)
# Draw bunkers last on top of all other layers as a hack until proper layer order is established here
# Needed for things like bunkers in greens... :\
for way in ways:
golf_type = way.tags.get("golf", None)
if golf_type == "bunker":
color = (0.85, 0.85, 0.7)
drawWayOnImage(way, color, im, pc, image_scale, x_offset, y_offset)
return im
|
11504550
|
import os
import sys
import torch
import numpy as np
from datetime import datetime
"""
How To:
Example for running from command line:
python <path_to>/ProvablyPowerfulGraphNetworks/main_scripts/main_10fold_experiment.py --config=configs/10fold_config.json --dataset_name=COLLAB
"""
# Change working directory to project's main directory, and add it to path - for library and config usages
project_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
sys.path.append(project_dir)
os.chdir(project_dir)
from data_loader.data_generator import DataGenerator
from models.model_wrapper import ModelWrapper
from trainers.trainer import Trainer
from utils.config import process_config
from utils.dirs import create_dirs
from utils import doc_utils
from utils.utils import get_args
def main():
# capture the config path from the run arguments
# then process the json configuration file
try:
args = get_args()
config = process_config(args.config, args.dataset_name)
except Exception as e:
print("missing or invalid arguments {}".format(e))
exit(0)
# os.environ['CUDA_LAUNCH_BLOCKING'] = "1" # TODO uncomment only for CUDA error debugging
os.environ["CUDA_VISIBLE_DEVICES"] = config.gpu
torch.manual_seed(100)
np.random.seed(100)
# torch.backends.cudnn.deterministic = True # can impact performance
# torch.backends.cudnn.benchmark = False # can impact performance
print("lr = {0}".format(config.hyperparams.learning_rate))
print("decay = {0}".format(config.hyperparams.decay_rate))
print(config.architecture)
# create the experiments dirs
create_dirs([config.summary_dir, config.checkpoint_dir])
doc_utils.doc_used_config(config)
for exp in range(1, config.num_exp+1):
for fold in range(1, 11):
print("Experiment num = {0}\nFold num = {1}".format(exp, fold))
# create your data generator
config.num_fold = fold
data = DataGenerator(config)
# create an instance of the model you want
model_wrapper = ModelWrapper(config, data)
# create trainer and pass all the previous components to it
trainer = Trainer(model_wrapper, data, config)
# here you train your model
trainer.train()
doc_utils.summary_10fold_results(config.summary_dir)
if __name__ == '__main__':
start = datetime.now()
main()
print('Runtime: {}'.format(datetime.now() - start))
|
11504553
|
import testutil
import test_engine
class TestDependencies(test_engine.EngineTestCase):
sample_desc = {
"SCHEMA": [
[1, "Table1", [
[1, "Prev", "Ref:Table1", True, "Table1.lookupOne(id=$id-1)", "", ""],
[2, "Value", "Numeric", False, "", "", ""],
[3, "Sum", "Numeric", True, "($Prev.Sum or 0) + $Value", "", ""],
]]
],
"DATA": {
"Table1": [
["id","Value"],
] + [[n + 1, n + 1] for n in range(3200)]
}
}
def test_recursive_column_dependencies(self):
sample = testutil.parse_test_sample(self.sample_desc)
self.load_sample(sample)
self.apply_user_action(['Calculate'])
# The Sum column contains a cumulative total of the Value column
self.assertTableData("Table1", cols="subset", rows="subset", data=[
["id", "Value", "Sum"],
[1, 1, 1],
[2, 2, 3],
[3, 3, 6],
[3200, 3200, 5121600],
])
# Updating the first Value causes a cascade of changes to Sum,
# invalidating dependencies one cell at a time.
# Previously this cause a recursion error.
self.update_record("Table1", 1, Value=11)
self.assertTableData("Table1", cols="subset", rows="subset", data=[
["id", "Value", "Sum"],
[1, 11, 11],
[2, 2, 13],
[3, 3, 16],
[3200, 3200, 5121610],
])
|
11504600
|
from streaming.transparency.api.sources import Sources
from streaming.transparency.api.records import Records
from streaming.transparency.api.merkle import MerkleTree
|
11504602
|
import a0
import time
EVENT_TOPIC = a0.cfg(a0.env.topic(), "/event_topic", str)
print(f"{EVENT_TOPIC} initialized")
i = 0
p = a0.Publisher(f"{EVENT_TOPIC}")
while True:
msg = f"data {i}"
print(msg)
p.pub(msg)
i += 1
time.sleep(10)
|
11504604
|
import math
import torch
import torch.nn as nn
from ..utils import common_functions as c_f
# https://github.com/pytorch/pytorch/blob/master/torch/nn/modules/linear.py
def reset_parameters_helper(weight, bias):
nn.init.kaiming_uniform_(weight, a=math.sqrt(5))
if bias is not None:
fan_in, _ = nn.init._calculate_fan_in_and_fan_out(weight)
bound = 1 / math.sqrt(fan_in) if fan_in > 0 else 0
nn.init.uniform_(bias, -bound, bound)
# https://openaccess.thecvf.com/content_CVPR_2020/papers/Lu_Stochastic_Classifiers_for_Unsupervised_Domain_Adaptation_CVPR_2020_paper.pdf
class StochasticLinear(nn.Module):
"""
Implementation of the stochastic layer from
[Stochastic Classifiers for Unsupervised Domain Adaptation](https://xiatian-zhu.github.io/papers/LuEtAl_CVPR2020.pdf).
In ```train()``` mode, it uses random weights and biases
that are sampled from a learned normal distribution.
In ```eval()``` mode, the learned mean is used.
"""
def __init__(self, in_features: int, out_features: int, device=None, dtype=None):
"""
Arguments:
in_features: size of each input sample
out_features: size of each output sample
"""
factory_kwargs = {"device": device, "dtype": dtype}
super().__init__()
self.in_features = in_features
self.out_features = out_features
self.weight_mean = nn.Parameter(
torch.empty(in_features, out_features, **factory_kwargs)
)
self.weight_sigma = nn.Parameter(
torch.empty(in_features, out_features, **factory_kwargs)
)
self.bias_mean = nn.Parameter(torch.empty(out_features, **factory_kwargs))
self.bias_sigma = nn.Parameter(torch.empty(out_features, **factory_kwargs))
self.reset_parameters()
def reset_parameters(self):
reset_parameters_helper(self.weight_mean, self.bias_mean)
reset_parameters_helper(self.weight_sigma, self.bias_sigma)
def random_sample(self, mean, sigma):
eps = torch.randn(*sigma.shape, device=sigma.device, dtype=sigma.dtype)
return mean + (sigma * eps)
def forward(self, x):
""""""
if self.training:
weight = self.random_sample(self.weight_mean, self.weight_sigma)
bias = self.random_sample(self.bias_mean, self.bias_sigma)
else:
weight = self.weight_mean
bias = self.bias_mean
return torch.matmul(x, weight) + bias
def extra_repr(self):
""""""
return c_f.extra_repr(self, ["in_features", "out_features"])
|
11504645
|
import asyncio
import math
from typing import (
Awaitable,
Callable,
Iterable,
Optional,
Sized,
Tuple,
Any,
)
async def as_completed_return_exception(coros: [Iterable[Awaitable], Sized]):
"""
Wrapper for asyncio.as_completed. Equivalent return_exceptions=True from asyncio.gather.
Returns `Exception` in case coroutine raises an exception.
Args:
coros: coroutines list.
Returns:
Results generator.
"""
for elem in asyncio.as_completed(coros):
try:
res = await elem
except Exception as err:
res = err
yield res
class Progress:
"""Progress bar mock."""
def __init__(self, desc: str, total: int):
self.desc = desc
self.total = total
def refresh(self):
pass
def close(self):
pass
def update(self):
pass
class Monitor:
def __init__(self, rps: float, tqdm: Any = None):
self._rps = rps
tqdm = tqdm or Progress
self._pbar_total = tqdm(desc='Total', total=0)
self._pbar_rps = tqdm(desc='Per second', total=0)
self._pbar_retry = tqdm(desc='Retry', total=0)
def __del__(self):
for pbar in (self._pbar_total, self._pbar_rps, self._pbar_retry):
pbar.refresh()
pbar.close()
def add(self, num: int):
self._pbar_total.total += num
self._pbar_total.refresh()
def update(self, retry: bool = False):
self._pbar_rps.update()
if retry:
self._pbar_retry.update()
else:
self._pbar_total.update()
class Pulemet:
"""Manage execution speed of coroutines."""
def __init__(
self,
rps: float = 0.1,
pool_size: Optional[int] = None,
pbar: Any = None,
):
"""
Gets config parameters.
Args:
rps: coroutines per second
pool_size: working coroutines limit
pbar: progress bar from tqdm. Example: from tqdm.auto import tqdm; Pulemet(pbar=tqdm())
"""
self._rps_min, self._rps_max = 5, 10
self._rps, self._burst = self._get_rps_and_burst(rps)
if pool_size is not None and pool_size < 1:
raise ValueError('Argument pool_size has to be greater 0')
self._pool_size = pool_size
if pool_size is None:
self._pool_size = int(self._rps * 5) + 10 # 5 seconds accumulating, 10 - bias
self._semaphore_time = asyncio.BoundedSemaphore(value=math.ceil(self._burst))
self._semaphore_pool = asyncio.Semaphore(value=self._pool_size)
self._timer_task = asyncio.ensure_future(self._timer())
self._pbar = Monitor(rps=rps, tqdm=pbar)
def __del__(self):
self._timer_task.cancel()
def process(self, coros: [Iterable[Awaitable], Sized]) -> [Iterable[Awaitable], Sized]:
"""
Runs _wrap_coro for all coroutines in list.
Args:
coros: coroutines list
Returns:
New coroutines list
"""
self._pbar.add(num=len(coros))
res = [self._wrap_coro(coro) for coro in coros]
return res
def process_funcs(
self,
coro_func: Callable[..., Awaitable],
coros_kwargs: [Iterable[dict], Sized],
exceptions: Tuple[BaseException, ...],
exceptions_max: Optional[int] = None,
) -> [Iterable[Awaitable], Sized]:
"""
Runs _wrap_coro for all coroutines in list.
Args:
coro_func: async function
coros_kwargs: list of kwargs for function
exceptions: Exceptions object fot catching
exceptions_max: Retry sort of thing, but with Exceptions
Returns:
New coroutines list
"""
if hasattr(coros_kwargs, '__len__'):
self._pbar.add(num=len(coros_kwargs))
res = [
self._warp_coro_func(
coro_func=coro_func,
exceptions=exceptions,
exceptions_max=exceptions_max,
**coro_kwargs,
)
for coro_kwargs in coros_kwargs
]
return res
def _get_rps_and_burst(self, rps: float):
if rps <= self._rps_max:
rps_target, burst = rps, 1
else:
burst_max = int(rps / self._rps_min)
burst_min = math.ceil(rps / self._rps_max)
res = []
for burst in range(burst_min, burst_max + 1):
rps_target = round(rps / burst, 2)
prec = abs(rps - rps_target * burst) / rps * 100
res.append((burst, rps_target, prec))
if prec < 0.01:
break
burst, rps_target, prec = sorted(res, key=lambda x: x[2])[0]
return rps_target, burst
async def _timer(self):
"""Освобождает семафор на исполнение корутины `burst` раз в `1 / rps` секунд."""
while True:
await asyncio.sleep(1 / self._rps)
for ind in range(self._burst):
try:
self._semaphore_time.release()
except ValueError:
continue
async def _wrap_coro(self, coro: Awaitable, update: bool = True) -> Awaitable:
"""
Waiting for speed and pool size semaphores release then run coroutine.
Args:
coro: coroutine
Returns:
result of the original coroutine
"""
async with self._semaphore_pool:
await self._semaphore_time.acquire()
if update:
self._pbar.update(retry=False)
result = await coro
return result
async def _warp_coro_func(
self,
coro_func: Callable[..., Awaitable],
exceptions: Tuple[BaseException, ...],
exceptions_max: Optional[int] = None,
**coro_kwargs,
) -> Awaitable:
cnt = 0
while True:
coro = coro_func(**coro_kwargs)
try:
coro = await self._wrap_coro(coro, update=False)
self._pbar.update(retry=False)
return coro
except exceptions as exc:
cnt += 1
self._pbar.update(retry=True)
if exceptions_max is not None and cnt == exceptions_max:
raise exc
continue
|
11504648
|
from .para_sen_routes import TOK_BLUEPRINT
from .para_sen_routes import TOK_BLUEPRINT_wf
from .para_sen_routes import TOK_BLOCK_BLUEPRINT_wf
|
11504686
|
import json
import pytuya
# Specify the smoker and get its status
d = pytuya.OutletDevice('<gwID>', '<IP>', '<productKey>')
data = d.status()
# Enable debug to see the raw JSON
Debug = False
#Debug = True
if Debug:
raw = json.dumps(data, indent=4)
print(raw)
# Simple if statement to check if the smoker is on
rt_state = data['dps']['1']
if rt_state:
print('RecTec is on')
else:
print('RecTec is off')
# The following values are based on observation
# dps = '102' & '103' might be swapped
print('Target Temperature: %r' % data['dps']['102'])
print('Current Temperature: %r' % data['dps']['103'])
# When smoker is off (data['dps']['1] = False)
# values of probes might be based on last "on"
print('Probe A Temperature: %r' % data['dps']['105'])
print('Probe B Temperature: %r' % data['dps']['106'])
|
11504698
|
from .panosuncg import PanoSunCG
from .threeD60 import ThreeD60
from .stanford2d3d import Stanford2D3D
from .matterport3d import Matterport3D
|
11504699
|
import RPi.GPIO as GPIO
from time import sleep
import math
class motor_ctrl:
def __init__(self):
self.in1 = 26
self.in2 = 19
self.en = 21
# self.temp1=1
#self.in3=13
#self.in4=6
#self.en1=5
GPIO.setmode(GPIO.BCM)
GPIO.setup(self.in1,GPIO.OUT)
GPIO.setup(self.in2,GPIO.OUT)
GPIO.setup(self.en,GPIO.OUT)
GPIO.output(self.in1,GPIO.LOW)
GPIO.output(self.in2,GPIO.LOW)
self.p=GPIO.PWM(self.en,1000)
self.p.start(25)
print("\n")
print("The default speed & direction of motor is LOW & Forward.....")
print("r-run s-stop f-forward b-backward l-low m-medium h-high e-exit")
print("\n")
self.dutyCycle=0
self.vt = 0
def controller(self,x):
if x ==0:
self.dutyCycle = abs(95)
self.p.ChangeDutyCycle(abs(self.dutyCycle)%100)
GPIO.output(self.in1,GPIO.LOW)
GPIO.output(self.in2,GPIO.HIGH)
print("backward")
elif x==1:
self.dutyCycle = abs(85)
self.p.ChangeDutyCycle(abs(self.dutyCycle)%100)
GPIO.output(self.in1,GPIO.LOW)
GPIO.output(self.in2,GPIO.HIGH)
print("backward")
elif x==2:
self.dutyCycle = abs(75)
self.p.ChangeDutyCycle(abs(self.dutyCycle)%100)
GPIO.output(self.in1,GPIO.LOW)
GPIO.output(self.in2,GPIO.HIGH)
print("backward")
elif x==3:
self.dutyCycle = abs(65)
self.p.ChangeDutyCycle(abs(self.dutyCycle)%100)
GPIO.output(self.in1,GPIO.LOW)
GPIO.output(self.in2,GPIO.HIGH)
print("backward")
elif x==4:
self.dutyCycle = abs(2)%100
self.p.ChangeDutyCycle(abs(self.dutyCycle)%100)
print("NO CHANEGE")
elif x==5:
self.dutyCycle = abs(65)
self.p.ChangeDutyCycle(abs(self.dutyCycle)%100)
GPIO.output(self.in1,GPIO.HIGH)
GPIO.output(self.in2,GPIO.LOW)
print("forward")
elif x==6:
self.dutyCycle = abs(75)
self.p.ChangeDutyCycle(abs(self.dutyCycle)%100)
GPIO.output(self.in1,GPIO.HIGH)
GPIO.output(self.in2,GPIO.LOW)
print("forward")
elif x==7:
self.dutyCycle = abs(85)
self.p.ChangeDutyCycle(abs(self.dutyCycle)%100)
GPIO.output(self.in1,GPIO.HIGH)
GPIO.output(self.in2,GPIO.LOW)
print("forward")
else :
self.dutyCycle = abs(95)
self.p.ChangeDutyCycle(abs(self.dutyCycle)%100)
GPIO.output(self.in1,GPIO.HIGH)
GPIO.output(self.in2,GPIO.LOW)
print("forward")
self.vt = self.dutyCycle*0.314
return self.vt # change it as velocity
|
11504712
|
import random
from metrics import rmse
import numpy as np
from svd import SVDRecommender
def chunk(xs, n):
ys = list(xs)
random.shuffle(ys)
ylen = len(ys)
size, leftover = divmod(ylen, n)
chunks = [ys[size*i : size*(i+1)] for i in xrange(n)]
edge = size*n
for i in xrange(leftover):
chunks[i%n].append(ys[edge+i])
return chunks
def cross_val_score(model=None, data=None, cv=10, scorer=rmse):
data=np.array(data)
print(data.shape)
chunks=chunk(data, cv)
#print chunks
score=list()
for i in range(10):
iter_data=list()
for j in range(len(chunks)):
if j!=i:
iter_data.extend(chunks[j])
pred_data=np.array(chunks[i])
iter_data=np.array(iter_data)
model.fit(iter_data)
pred=model.predict(pred_data)
score.append(rmse(pred_data[ : , model.formatizer['value']], pred))
print(score[i])
return np.mean(score)
|
11504728
|
from django.conf import settings
from django.conf.urls import url, include
from django.contrib import admin
from django.conf.urls.static import static
from django.contrib.auth.views import LogoutView
from django.views.generic import RedirectView
from .views import HomeView, instruction_view
from accounts.views import RegisterView, LoginView, LeaderBoardView, ProfileView, NewsView
from market.views import UserTransactionHistoryView
urlpatterns = [
url(r'^$', HomeView.as_view(), name='home'),
url(r'^login/$', LoginView.as_view(), name='login'),
url(r'^register/$', RegisterView.as_view(), name='register'),
url(r'^logout/$', LogoutView.as_view(), name='logout'),
url(r'^profile/(?P<username>[a-zA-Z0-9]+)/$', ProfileView.as_view(), name='profile'),
url(r'^news/$', NewsView.as_view(), name='news'),
url(r'^account/', include('accounts.urls', namespace='account')),
url(r'^accounts/', include('accounts.passwords.urls')),
url(r'^accounts/$', RedirectView.as_view(url='/account')),
url(r'^leaderboard/$', LeaderBoardView.as_view(), name='leaderboard'),
url(r'^instructions/$', instruction_view, name='instructions'),
url(r'^stocks/', include('market.urls', namespace='market')),
url(r'^history/$', UserTransactionHistoryView.as_view(), name='transaction_history'),
url(r'^admin/', admin.site.urls),
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
11504758
|
import numpy as np
import scipy as sp
import warnings
from sklearn.exceptions import DataConversionWarning
from sklearn.base import BaseEstimator, RegressorMixin
from sklearn.utils.validation import check_is_fitted
from sklearn.utils.extmath import safe_sparse_dot
from sklearn.utils import check_X_y, check_array
from dask import distributed
from dask.distributed import Client, LocalCluster
import dask.dataframe as dd
import dask.array as da
class DaskCholeskySolver(BaseEstimator, RegressorMixin):
"""Out-of-core linear system solver with Dask back-end.
Parameters
----------
alpha : float, non-negative
L2 regularization parameter, larger value means stronger effect. The value may be
increased if the system fails to converge; actual used value stored in `alpha_` parameter.
batch_size : int
Batch size for **samples and features**. Computations proceed on square blocks of data.
For optimal performance, use a number of features that is equal or a bit less than multiple
of a batch size; e.g. 8912 features with 3000 batch size.
swap_dir : str
Directory for temporary storage of Dask data that does not fit in memory. A large and fast
storage is advised, like a local SSD.
Attributes
----------
cluster_ : object
An instance of `dask.distributed.LocalCluster`.
client_ : object
Dask client for running computations.
"""
def __init__(self, alpha=1e-7, batch_size=2000, swap_dir=None):
self.alpha = alpha
self.batch_size = batch_size
self.swap_dir = swap_dir
def _init_dask(self):
self.cluster_ = LocalCluster( n_workers=2, local_dir=self.swap_dir)
self.client_ = Client(self.cluster_)
print("Running on:")
print(self.client_)
def fit(self, X, y):
self.W_ = da.random.normal
return self
def predict(self, X):
return None
class BBvdsnjvlsdnjhbgfndjvksdjkvlndsf(BaseEstimator, RegressorMixin):
def __init__(self, alpha=1e-7):
self.alpha = alpha
def _init_XY(self, X, y):
"""Initialize covariance matrices, including a separate bias term.
"""
d_in = X.shape[1]
self._XtX = np.eye(d_in + 1) * self.alpha
self._XtX[0, 0] = 0
if len(y.shape) == 1:
self._XtY = np.zeros((d_in + 1,))
else:
self._XtY = np.zeros((d_in + 1, y.shape[1]))
@property
def XtY_(self):
return self._XtY
@property
def XtX_(self):
return self._XtX
@XtY_.setter
def XtY_(self, value):
self._XtY = value
@XtX_.setter
def XtX_(self, value):
self._XtX = value
def _solve(self):
"""Second stage of solution (X'X)B = X'Y using Cholesky decomposition.
Sets `is_fitted_` to True.
"""
B = sp.linalg.solve(self._XtX, self._XtY, assume_a='pos', overwrite_a=False, overwrite_b=False)
self.coef_ = B[1:]
self.intercept_ = B[0]
self.is_fitted_ = True
def _reset(self):
"""Erase solution and data matrices.
"""
[delattr(self, attr) for attr in ('_XtX', '_XtY', 'coef_', 'intercept_', 'is_fitted_') if hasattr(self, attr)]
def fit(self, X, y):
"""Solves an L2-regularized linear system like Ridge regression, overwrites any previous solutions.
"""
self._reset() # remove old solution
self.partial_fit(X, y, compute_output_weights=True)
return self
def partial_fit(self, X, y, compute_output_weights=True):
"""Update model with a new batch of data.
Output weight computation can be temporary turned off for faster processing. This will mark model as
not fit. Enable `compute_output_weights` in the final call to `partial_fit`.
Parameters
----------
X : {array-like, sparse matrix}, shape=[n_samples, n_features]
Training input samples
y : array-like, shape=[n_samples, n_targets]
Training targets
compute_output_weights : boolean, optional, default True
Whether to compute new output weights (coef_, intercept_). Disable this in intermediate `partial_fit`
steps to run computations faster, then enable in the last call to compute the new solution.
.. Note::
Solution can be updated without extra data by setting `X=None` and `y=None`.
"""
if self.alpha < 0:
raise ValueError("Regularization parameter alpha must be non-negative.")
# solution only
if X is None and y is None and compute_output_weights:
self._solve()
return self
# validate parameters
X, y = check_X_y(X, y, accept_sparse=True, multi_output=True, y_numeric=True, ensure_2d=True)
if len(y.shape) > 1 and y.shape[1] == 1:
msg = "A column-vector y was passed when a 1d array was expected.\
Please change the shape of y to (n_samples, ), for example using ravel()."
warnings.warn(msg, DataConversionWarning)
# init temporary data storage
if not hasattr(self, '_XtX'):
self._init_XY(X, y)
else:
if X.shape[1] + 1 != self._XtX.shape[0]:
n_new, n_old = X.shape[1], self._XtX.shape[0] - 1
raise ValueError("Number of features %d does not match previous data %d." % (n_new, n_old))
# compute temporary data
X_sum = safe_sparse_dot(X.T, np.ones((X.shape[0],)))
y_sum = safe_sparse_dot(y.T, np.ones((y.shape[0],)))
self._XtX[0, 0] += X.shape[0]
self._XtX[1:, 0] += X_sum
self._XtX[0, 1:] += X_sum
self._XtX[1:, 1:] += X.T @ X
self._XtY[0] += y_sum
self._XtY[1:] += X.T @ y
# solve
if not compute_output_weights:
# mark as not fitted
[delattr(self, attr) for attr in ('coef_', 'intercept_', 'is_fitted_') if hasattr(self, attr)]
else:
self._solve()
return self
def predict(self, X):
check_is_fitted(self, 'is_fitted_')
X = check_array(X, accept_sparse=True)
return safe_sparse_dot(X, self.coef_, dense_output=True) + self.intercept_
|
11504766
|
import os.path
import argparse
from PIL import Image
from hugsvision.inference.TorchVisionClassifierInference import TorchVisionClassifierInference
parser = argparse.ArgumentParser(description='Image classifier')
parser.add_argument('--path', type=str, default="./OUT_TORCHVISION/HAM10000/", help='The model path')
parser.add_argument('--img', type=str, default="/users/ylabrak/datasets/HAM10000/bcc/ISIC_0024331.jpg", help='The input image')
args = parser.parse_args()
classifier = TorchVisionClassifierInference(
model_path = args.path,
# device="cpu",
)
print("Process the image: " + args.img)
label = classifier.predict(img_path=args.img)
print("Predicted class:", label)
label = classifier.predict(img_path=args.img, return_str=False)
print("Vector of probabilities:", label)
label = classifier.predict_image(img=Image.open(args.img))
print("Predicted class:", label)
|
11504770
|
from skimage.io import imread
from gSLICrPy import __get_CUDA_gSLICr__, CUDA_gSLICr
def main():
image = imread('./example.jpg')
img_size_y, img_size_x = image.shape[0:2]
image = image[:, :, ::-1].flatten().astype('uint8')
__CUDA_gSLICr__ = __get_CUDA_gSLICr__()
CUDA_gSLICr(__CUDA_gSLICr__,
image,
img_size_x=img_size_x,
img_size_y=img_size_y,
n_segs=10,
spixel_size=20,
coh_weight=0.6,
n_iters=50,
color_space=2,
segment_color_space=2,
segment_by_size=True,
enforce_connectivity=True,
out_name='example_results')
if __name__ == '__main__':
main()
|
11504776
|
from __future__ import absolute_import
from __future__ import print_function
import veriloggen
import dataflow_sort
expected_verilog = """
module test
(
);
reg CLK;
reg RST;
reg signed [32-1:0] din0;
reg signed [32-1:0] din1;
reg signed [32-1:0] din2;
reg signed [32-1:0] din3;
reg signed [32-1:0] din4;
reg signed [32-1:0] din5;
reg signed [32-1:0] din6;
reg signed [32-1:0] din7;
wire signed [32-1:0] dout0;
wire signed [32-1:0] dout1;
wire signed [32-1:0] dout7;
wire signed [32-1:0] dout6;
wire signed [32-1:0] dout5;
wire signed [32-1:0] dout4;
wire signed [32-1:0] dout3;
wire signed [32-1:0] dout2;
sort
uut
(
.CLK(CLK),
.RST(RST),
.din0(din0),
.din1(din1),
.din2(din2),
.din3(din3),
.din4(din4),
.din5(din5),
.din6(din6),
.din7(din7),
.dout0(dout0),
.dout1(dout1),
.dout7(dout7),
.dout6(dout6),
.dout5(dout5),
.dout4(dout4),
.dout3(dout3),
.dout2(dout2)
);
reg reset_done;
initial begin
$dumpfile("uut.vcd");
$dumpvars(0, uut);
end
initial begin
CLK = 0;
forever begin
#5 CLK = !CLK;
end
end
initial begin
RST = 0;
din0 = 0;
din1 = 0;
din2 = 0;
din3 = 0;
din4 = 0;
din5 = 0;
din6 = 0;
din7 = 0;
#100;
RST = 1;
#100;
RST = 0;
#1000;
reset_done = 1;
@(posedge CLK);
#1;
#100000;
$finish;
end
reg [32-1:0] fsm;
localparam fsm_init = 0;
localparam fsm_1 = 1;
localparam fsm_2 = 2;
localparam fsm_3 = 3;
localparam fsm_4 = 4;
localparam fsm_5 = 5;
localparam fsm_6 = 6;
localparam fsm_7 = 7;
localparam fsm_8 = 8;
localparam fsm_9 = 9;
localparam fsm_10 = 10;
localparam fsm_11 = 11;
localparam fsm_12 = 12;
localparam fsm_13 = 13;
localparam fsm_14 = 14;
localparam fsm_15 = 15;
localparam fsm_16 = 16;
localparam fsm_17 = 17;
localparam fsm_18 = 18;
localparam fsm_19 = 19;
localparam fsm_20 = 20;
localparam fsm_21 = 21;
localparam fsm_22 = 22;
localparam fsm_23 = 23;
localparam fsm_24 = 24;
localparam fsm_25 = 25;
localparam fsm_26 = 26;
localparam fsm_27 = 27;
localparam fsm_28 = 28;
localparam fsm_29 = 29;
localparam fsm_30 = 30;
localparam fsm_31 = 31;
localparam fsm_32 = 32;
localparam fsm_33 = 33;
localparam fsm_34 = 34;
localparam fsm_35 = 35;
localparam fsm_36 = 36;
localparam fsm_37 = 37;
localparam fsm_38 = 38;
localparam fsm_39 = 39;
localparam fsm_40 = 40;
localparam fsm_41 = 41;
localparam fsm_42 = 42;
localparam fsm_43 = 43;
localparam fsm_44 = 44;
localparam fsm_45 = 45;
localparam fsm_46 = 46;
localparam fsm_47 = 47;
localparam fsm_48 = 48;
localparam fsm_49 = 49;
localparam fsm_50 = 50;
localparam fsm_51 = 51;
localparam fsm_52 = 52;
localparam fsm_53 = 53;
localparam fsm_54 = 54;
localparam fsm_55 = 55;
localparam fsm_56 = 56;
localparam fsm_57 = 57;
localparam fsm_58 = 58;
localparam fsm_59 = 59;
localparam fsm_60 = 60;
localparam fsm_61 = 61;
localparam fsm_62 = 62;
localparam fsm_63 = 63;
localparam fsm_64 = 64;
localparam fsm_65 = 65;
localparam fsm_66 = 66;
always @(posedge CLK) begin
if(RST) begin
fsm <= fsm_init;
end else begin
case(fsm)
fsm_init: begin
if(reset_done) begin
fsm <= fsm_1;
end
end
fsm_1: begin
din0 <= 100;
din1 <= 99;
din2 <= 98;
din3 <= 97;
din4 <= 96;
din5 <= 95;
din6 <= 94;
din7 <= 93;
fsm <= fsm_2;
end
fsm_2: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_3;
end
fsm_3: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_4;
end
fsm_4: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_5;
end
fsm_5: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_6;
end
fsm_6: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_7;
end
fsm_7: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_8;
end
fsm_8: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_9;
end
fsm_9: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_10;
end
fsm_10: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_11;
end
fsm_11: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_12;
end
fsm_12: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_13;
end
fsm_13: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_14;
end
fsm_14: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_15;
end
fsm_15: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_16;
end
fsm_16: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_17;
end
fsm_17: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_18;
end
fsm_18: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_19;
end
fsm_19: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_20;
end
fsm_20: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_21;
end
fsm_21: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_22;
end
fsm_22: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_23;
end
fsm_23: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_24;
end
fsm_24: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_25;
end
fsm_25: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_26;
end
fsm_26: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_27;
end
fsm_27: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_28;
end
fsm_28: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_29;
end
fsm_29: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_30;
end
fsm_30: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_31;
end
fsm_31: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_32;
end
fsm_32: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_33;
end
fsm_33: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_34;
end
fsm_34: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_35;
end
fsm_35: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_36;
end
fsm_36: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_37;
end
fsm_37: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_38;
end
fsm_38: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_39;
end
fsm_39: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_40;
end
fsm_40: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_41;
end
fsm_41: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_42;
end
fsm_42: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_43;
end
fsm_43: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_44;
end
fsm_44: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_45;
end
fsm_45: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_46;
end
fsm_46: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_47;
end
fsm_47: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_48;
end
fsm_48: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_49;
end
fsm_49: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_50;
end
fsm_50: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_51;
end
fsm_51: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_52;
end
fsm_52: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_53;
end
fsm_53: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_54;
end
fsm_54: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_55;
end
fsm_55: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_56;
end
fsm_56: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_57;
end
fsm_57: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_58;
end
fsm_58: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_59;
end
fsm_59: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_60;
end
fsm_60: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_61;
end
fsm_61: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_62;
end
fsm_62: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_63;
end
fsm_63: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_64;
end
fsm_64: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_65;
end
fsm_65: begin
$display("%s = %d", "dout0", dout0);
$display("%s = %d", "dout1", dout1);
$display("%s = %d", "dout2", dout2);
$display("%s = %d", "dout3", dout3);
$display("%s = %d", "dout4", dout4);
$display("%s = %d", "dout5", dout5);
$display("%s = %d", "dout6", dout6);
$display("%s = %d", "dout7", dout7);
$display("----");
fsm <= fsm_66;
end
fsm_66: begin
$finish;
end
endcase
end
end
endmodule
module sort
(
input CLK,
input RST,
input signed [32-1:0] din0,
input signed [32-1:0] din1,
input signed [32-1:0] din2,
input signed [32-1:0] din3,
input signed [32-1:0] din4,
input signed [32-1:0] din5,
input signed [32-1:0] din6,
input signed [32-1:0] din7,
output signed [32-1:0] dout0,
output signed [32-1:0] dout1,
output signed [32-1:0] dout7,
output signed [32-1:0] dout6,
output signed [32-1:0] dout5,
output signed [32-1:0] dout4,
output signed [32-1:0] dout3,
output signed [32-1:0] dout2
);
reg [1-1:0] _dataflow_lessthan_data_8;
reg _dataflow_lessthan_valid_8;
wire _dataflow_lessthan_ready_8;
reg signed [32-1:0] _dataflow__delay_data_92;
reg _dataflow__delay_valid_92;
wire _dataflow__delay_ready_92;
reg signed [32-1:0] _dataflow__delay_data_93;
reg _dataflow__delay_valid_93;
wire _dataflow__delay_ready_93;
reg signed [32-1:0] _dataflow__delay_data_94;
reg _dataflow__delay_valid_94;
wire _dataflow__delay_ready_94;
reg signed [32-1:0] _dataflow__delay_data_98;
reg _dataflow__delay_valid_98;
wire _dataflow__delay_ready_98;
reg signed [32-1:0] _dataflow__delay_data_104;
reg _dataflow__delay_valid_104;
wire _dataflow__delay_ready_104;
reg signed [32-1:0] _dataflow__delay_data_112;
reg _dataflow__delay_valid_112;
wire _dataflow__delay_ready_112;
reg signed [32-1:0] _dataflow__delay_data_122;
reg _dataflow__delay_valid_122;
wire _dataflow__delay_ready_122;
reg signed [32-1:0] _dataflow__delay_data_134;
reg _dataflow__delay_valid_134;
wire _dataflow__delay_ready_134;
reg signed [32-1:0] _dataflow_cond_data_9;
reg _dataflow_cond_valid_9;
wire _dataflow_cond_ready_9;
reg signed [32-1:0] _dataflow_cond_data_10;
reg _dataflow_cond_valid_10;
wire _dataflow_cond_ready_10;
assign _dataflow_lessthan_ready_8 = (_dataflow_cond_ready_9 || !_dataflow_cond_valid_9) && (_dataflow_lessthan_valid_8 && _dataflow__delay_valid_93 && _dataflow__delay_valid_92) && ((_dataflow_cond_ready_10 || !_dataflow_cond_valid_10) && (_dataflow_lessthan_valid_8 && _dataflow__delay_valid_92 && _dataflow__delay_valid_93));
assign _dataflow__delay_ready_92 = (_dataflow_cond_ready_9 || !_dataflow_cond_valid_9) && (_dataflow_lessthan_valid_8 && _dataflow__delay_valid_93 && _dataflow__delay_valid_92) && ((_dataflow_cond_ready_10 || !_dataflow_cond_valid_10) && (_dataflow_lessthan_valid_8 && _dataflow__delay_valid_92 && _dataflow__delay_valid_93));
assign _dataflow__delay_ready_93 = (_dataflow_cond_ready_9 || !_dataflow_cond_valid_9) && (_dataflow_lessthan_valid_8 && _dataflow__delay_valid_93 && _dataflow__delay_valid_92) && ((_dataflow_cond_ready_10 || !_dataflow_cond_valid_10) && (_dataflow_lessthan_valid_8 && _dataflow__delay_valid_92 && _dataflow__delay_valid_93));
reg signed [32-1:0] _dataflow__delay_data_95;
reg _dataflow__delay_valid_95;
wire _dataflow__delay_ready_95;
assign _dataflow__delay_ready_94 = (_dataflow__delay_ready_95 || !_dataflow__delay_valid_95) && _dataflow__delay_valid_94;
reg signed [32-1:0] _dataflow__delay_data_99;
reg _dataflow__delay_valid_99;
wire _dataflow__delay_ready_99;
assign _dataflow__delay_ready_98 = (_dataflow__delay_ready_99 || !_dataflow__delay_valid_99) && _dataflow__delay_valid_98;
reg signed [32-1:0] _dataflow__delay_data_105;
reg _dataflow__delay_valid_105;
wire _dataflow__delay_ready_105;
assign _dataflow__delay_ready_104 = (_dataflow__delay_ready_105 || !_dataflow__delay_valid_105) && _dataflow__delay_valid_104;
reg signed [32-1:0] _dataflow__delay_data_113;
reg _dataflow__delay_valid_113;
wire _dataflow__delay_ready_113;
assign _dataflow__delay_ready_112 = (_dataflow__delay_ready_113 || !_dataflow__delay_valid_113) && _dataflow__delay_valid_112;
reg signed [32-1:0] _dataflow__delay_data_123;
reg _dataflow__delay_valid_123;
wire _dataflow__delay_ready_123;
assign _dataflow__delay_ready_122 = (_dataflow__delay_ready_123 || !_dataflow__delay_valid_123) && _dataflow__delay_valid_122;
reg signed [32-1:0] _dataflow__delay_data_135;
reg _dataflow__delay_valid_135;
wire _dataflow__delay_ready_135;
assign _dataflow__delay_ready_134 = (_dataflow__delay_ready_135 || !_dataflow__delay_valid_135) && _dataflow__delay_valid_134;
reg [1-1:0] _dataflow_lessthan_data_11;
reg _dataflow_lessthan_valid_11;
wire _dataflow_lessthan_ready_11;
reg signed [32-1:0] _dataflow__delay_data_96;
reg _dataflow__delay_valid_96;
wire _dataflow__delay_ready_96;
assign _dataflow__delay_ready_95 = (_dataflow_lessthan_ready_11 || !_dataflow_lessthan_valid_11) && (_dataflow_cond_valid_10 && _dataflow__delay_valid_95) && ((_dataflow__delay_ready_96 || !_dataflow__delay_valid_96) && _dataflow__delay_valid_95);
reg signed [32-1:0] _dataflow__delay_data_97;
reg _dataflow__delay_valid_97;
wire _dataflow__delay_ready_97;
assign _dataflow_cond_ready_10 = (_dataflow_lessthan_ready_11 || !_dataflow_lessthan_valid_11) && (_dataflow_cond_valid_10 && _dataflow__delay_valid_95) && ((_dataflow__delay_ready_97 || !_dataflow__delay_valid_97) && _dataflow_cond_valid_10);
reg signed [32-1:0] _dataflow__delay_data_100;
reg _dataflow__delay_valid_100;
wire _dataflow__delay_ready_100;
assign _dataflow__delay_ready_99 = (_dataflow__delay_ready_100 || !_dataflow__delay_valid_100) && _dataflow__delay_valid_99;
reg signed [32-1:0] _dataflow__delay_data_106;
reg _dataflow__delay_valid_106;
wire _dataflow__delay_ready_106;
assign _dataflow__delay_ready_105 = (_dataflow__delay_ready_106 || !_dataflow__delay_valid_106) && _dataflow__delay_valid_105;
reg signed [32-1:0] _dataflow__delay_data_114;
reg _dataflow__delay_valid_114;
wire _dataflow__delay_ready_114;
assign _dataflow__delay_ready_113 = (_dataflow__delay_ready_114 || !_dataflow__delay_valid_114) && _dataflow__delay_valid_113;
reg signed [32-1:0] _dataflow__delay_data_124;
reg _dataflow__delay_valid_124;
wire _dataflow__delay_ready_124;
assign _dataflow__delay_ready_123 = (_dataflow__delay_ready_124 || !_dataflow__delay_valid_124) && _dataflow__delay_valid_123;
reg signed [32-1:0] _dataflow__delay_data_136;
reg _dataflow__delay_valid_136;
wire _dataflow__delay_ready_136;
assign _dataflow__delay_ready_135 = (_dataflow__delay_ready_136 || !_dataflow__delay_valid_136) && _dataflow__delay_valid_135;
reg signed [32-1:0] _dataflow__delay_data_148;
reg _dataflow__delay_valid_148;
wire _dataflow__delay_ready_148;
assign _dataflow_cond_ready_9 = (_dataflow__delay_ready_148 || !_dataflow__delay_valid_148) && _dataflow_cond_valid_9;
reg signed [32-1:0] _dataflow_cond_data_12;
reg _dataflow_cond_valid_12;
wire _dataflow_cond_ready_12;
reg signed [32-1:0] _dataflow_cond_data_13;
reg _dataflow_cond_valid_13;
wire _dataflow_cond_ready_13;
assign _dataflow_lessthan_ready_11 = (_dataflow_cond_ready_12 || !_dataflow_cond_valid_12) && (_dataflow_lessthan_valid_11 && _dataflow__delay_valid_97 && _dataflow__delay_valid_96) && ((_dataflow_cond_ready_13 || !_dataflow_cond_valid_13) && (_dataflow_lessthan_valid_11 && _dataflow__delay_valid_96 && _dataflow__delay_valid_97));
assign _dataflow__delay_ready_96 = (_dataflow_cond_ready_12 || !_dataflow_cond_valid_12) && (_dataflow_lessthan_valid_11 && _dataflow__delay_valid_97 && _dataflow__delay_valid_96) && ((_dataflow_cond_ready_13 || !_dataflow_cond_valid_13) && (_dataflow_lessthan_valid_11 && _dataflow__delay_valid_96 && _dataflow__delay_valid_97));
assign _dataflow__delay_ready_97 = (_dataflow_cond_ready_12 || !_dataflow_cond_valid_12) && (_dataflow_lessthan_valid_11 && _dataflow__delay_valid_97 && _dataflow__delay_valid_96) && ((_dataflow_cond_ready_13 || !_dataflow_cond_valid_13) && (_dataflow_lessthan_valid_11 && _dataflow__delay_valid_96 && _dataflow__delay_valid_97));
reg signed [32-1:0] _dataflow__delay_data_101;
reg _dataflow__delay_valid_101;
wire _dataflow__delay_ready_101;
assign _dataflow__delay_ready_100 = (_dataflow__delay_ready_101 || !_dataflow__delay_valid_101) && _dataflow__delay_valid_100;
reg signed [32-1:0] _dataflow__delay_data_107;
reg _dataflow__delay_valid_107;
wire _dataflow__delay_ready_107;
assign _dataflow__delay_ready_106 = (_dataflow__delay_ready_107 || !_dataflow__delay_valid_107) && _dataflow__delay_valid_106;
reg signed [32-1:0] _dataflow__delay_data_115;
reg _dataflow__delay_valid_115;
wire _dataflow__delay_ready_115;
assign _dataflow__delay_ready_114 = (_dataflow__delay_ready_115 || !_dataflow__delay_valid_115) && _dataflow__delay_valid_114;
reg signed [32-1:0] _dataflow__delay_data_125;
reg _dataflow__delay_valid_125;
wire _dataflow__delay_ready_125;
assign _dataflow__delay_ready_124 = (_dataflow__delay_ready_125 || !_dataflow__delay_valid_125) && _dataflow__delay_valid_124;
reg signed [32-1:0] _dataflow__delay_data_137;
reg _dataflow__delay_valid_137;
wire _dataflow__delay_ready_137;
assign _dataflow__delay_ready_136 = (_dataflow__delay_ready_137 || !_dataflow__delay_valid_137) && _dataflow__delay_valid_136;
reg signed [32-1:0] _dataflow__delay_data_149;
reg _dataflow__delay_valid_149;
wire _dataflow__delay_ready_149;
assign _dataflow__delay_ready_148 = (_dataflow__delay_ready_149 || !_dataflow__delay_valid_149) && _dataflow__delay_valid_148;
reg [1-1:0] _dataflow_lessthan_data_14;
reg _dataflow_lessthan_valid_14;
wire _dataflow_lessthan_ready_14;
reg [1-1:0] _dataflow_lessthan_data_29;
reg _dataflow_lessthan_valid_29;
wire _dataflow_lessthan_ready_29;
reg signed [32-1:0] _dataflow__delay_data_102;
reg _dataflow__delay_valid_102;
wire _dataflow__delay_ready_102;
assign _dataflow__delay_ready_101 = (_dataflow_lessthan_ready_14 || !_dataflow_lessthan_valid_14) && (_dataflow_cond_valid_13 && _dataflow__delay_valid_101) && ((_dataflow__delay_ready_102 || !_dataflow__delay_valid_102) && _dataflow__delay_valid_101);
reg signed [32-1:0] _dataflow__delay_data_103;
reg _dataflow__delay_valid_103;
wire _dataflow__delay_ready_103;
assign _dataflow_cond_ready_13 = (_dataflow_lessthan_ready_14 || !_dataflow_lessthan_valid_14) && (_dataflow_cond_valid_13 && _dataflow__delay_valid_101) && ((_dataflow__delay_ready_103 || !_dataflow__delay_valid_103) && _dataflow_cond_valid_13);
reg signed [32-1:0] _dataflow__delay_data_108;
reg _dataflow__delay_valid_108;
wire _dataflow__delay_ready_108;
assign _dataflow__delay_ready_107 = (_dataflow__delay_ready_108 || !_dataflow__delay_valid_108) && _dataflow__delay_valid_107;
reg signed [32-1:0] _dataflow__delay_data_116;
reg _dataflow__delay_valid_116;
wire _dataflow__delay_ready_116;
assign _dataflow__delay_ready_115 = (_dataflow__delay_ready_116 || !_dataflow__delay_valid_116) && _dataflow__delay_valid_115;
reg signed [32-1:0] _dataflow__delay_data_126;
reg _dataflow__delay_valid_126;
wire _dataflow__delay_ready_126;
assign _dataflow__delay_ready_125 = (_dataflow__delay_ready_126 || !_dataflow__delay_valid_126) && _dataflow__delay_valid_125;
reg signed [32-1:0] _dataflow__delay_data_138;
reg _dataflow__delay_valid_138;
wire _dataflow__delay_ready_138;
assign _dataflow__delay_ready_137 = (_dataflow__delay_ready_138 || !_dataflow__delay_valid_138) && _dataflow__delay_valid_137;
reg signed [32-1:0] _dataflow__delay_data_150;
reg _dataflow__delay_valid_150;
wire _dataflow__delay_ready_150;
assign _dataflow_cond_ready_12 = (_dataflow_lessthan_ready_29 || !_dataflow_lessthan_valid_29) && (_dataflow__delay_valid_149 && _dataflow_cond_valid_12) && ((_dataflow__delay_ready_150 || !_dataflow__delay_valid_150) && _dataflow_cond_valid_12);
reg signed [32-1:0] _dataflow__delay_data_151;
reg _dataflow__delay_valid_151;
wire _dataflow__delay_ready_151;
assign _dataflow__delay_ready_149 = (_dataflow_lessthan_ready_29 || !_dataflow_lessthan_valid_29) && (_dataflow__delay_valid_149 && _dataflow_cond_valid_12) && ((_dataflow__delay_ready_151 || !_dataflow__delay_valid_151) && _dataflow__delay_valid_149);
reg signed [32-1:0] _dataflow_cond_data_15;
reg _dataflow_cond_valid_15;
wire _dataflow_cond_ready_15;
reg signed [32-1:0] _dataflow_cond_data_16;
reg _dataflow_cond_valid_16;
wire _dataflow_cond_ready_16;
assign _dataflow_lessthan_ready_14 = (_dataflow_cond_ready_15 || !_dataflow_cond_valid_15) && (_dataflow_lessthan_valid_14 && _dataflow__delay_valid_103 && _dataflow__delay_valid_102) && ((_dataflow_cond_ready_16 || !_dataflow_cond_valid_16) && (_dataflow_lessthan_valid_14 && _dataflow__delay_valid_102 && _dataflow__delay_valid_103));
assign _dataflow__delay_ready_102 = (_dataflow_cond_ready_15 || !_dataflow_cond_valid_15) && (_dataflow_lessthan_valid_14 && _dataflow__delay_valid_103 && _dataflow__delay_valid_102) && ((_dataflow_cond_ready_16 || !_dataflow_cond_valid_16) && (_dataflow_lessthan_valid_14 && _dataflow__delay_valid_102 && _dataflow__delay_valid_103));
assign _dataflow__delay_ready_103 = (_dataflow_cond_ready_15 || !_dataflow_cond_valid_15) && (_dataflow_lessthan_valid_14 && _dataflow__delay_valid_103 && _dataflow__delay_valid_102) && ((_dataflow_cond_ready_16 || !_dataflow_cond_valid_16) && (_dataflow_lessthan_valid_14 && _dataflow__delay_valid_102 && _dataflow__delay_valid_103));
reg signed [32-1:0] _dataflow_cond_data_30;
reg _dataflow_cond_valid_30;
wire _dataflow_cond_ready_30;
reg signed [32-1:0] _dataflow_cond_data_31;
reg _dataflow_cond_valid_31;
wire _dataflow_cond_ready_31;
assign _dataflow_lessthan_ready_29 = (_dataflow_cond_ready_30 || !_dataflow_cond_valid_30) && (_dataflow_lessthan_valid_29 && _dataflow__delay_valid_151 && _dataflow__delay_valid_150) && ((_dataflow_cond_ready_31 || !_dataflow_cond_valid_31) && (_dataflow_lessthan_valid_29 && _dataflow__delay_valid_150 && _dataflow__delay_valid_151));
assign _dataflow__delay_ready_150 = (_dataflow_cond_ready_30 || !_dataflow_cond_valid_30) && (_dataflow_lessthan_valid_29 && _dataflow__delay_valid_151 && _dataflow__delay_valid_150) && ((_dataflow_cond_ready_31 || !_dataflow_cond_valid_31) && (_dataflow_lessthan_valid_29 && _dataflow__delay_valid_150 && _dataflow__delay_valid_151));
assign _dataflow__delay_ready_151 = (_dataflow_cond_ready_30 || !_dataflow_cond_valid_30) && (_dataflow_lessthan_valid_29 && _dataflow__delay_valid_151 && _dataflow__delay_valid_150) && ((_dataflow_cond_ready_31 || !_dataflow_cond_valid_31) && (_dataflow_lessthan_valid_29 && _dataflow__delay_valid_150 && _dataflow__delay_valid_151));
reg signed [32-1:0] _dataflow__delay_data_109;
reg _dataflow__delay_valid_109;
wire _dataflow__delay_ready_109;
assign _dataflow__delay_ready_108 = (_dataflow__delay_ready_109 || !_dataflow__delay_valid_109) && _dataflow__delay_valid_108;
reg signed [32-1:0] _dataflow__delay_data_117;
reg _dataflow__delay_valid_117;
wire _dataflow__delay_ready_117;
assign _dataflow__delay_ready_116 = (_dataflow__delay_ready_117 || !_dataflow__delay_valid_117) && _dataflow__delay_valid_116;
reg signed [32-1:0] _dataflow__delay_data_127;
reg _dataflow__delay_valid_127;
wire _dataflow__delay_ready_127;
assign _dataflow__delay_ready_126 = (_dataflow__delay_ready_127 || !_dataflow__delay_valid_127) && _dataflow__delay_valid_126;
reg signed [32-1:0] _dataflow__delay_data_139;
reg _dataflow__delay_valid_139;
wire _dataflow__delay_ready_139;
assign _dataflow__delay_ready_138 = (_dataflow__delay_ready_139 || !_dataflow__delay_valid_139) && _dataflow__delay_valid_138;
reg [1-1:0] _dataflow_lessthan_data_17;
reg _dataflow_lessthan_valid_17;
wire _dataflow_lessthan_ready_17;
reg [1-1:0] _dataflow_lessthan_data_32;
reg _dataflow_lessthan_valid_32;
wire _dataflow_lessthan_ready_32;
reg signed [32-1:0] _dataflow__delay_data_110;
reg _dataflow__delay_valid_110;
wire _dataflow__delay_ready_110;
assign _dataflow__delay_ready_109 = (_dataflow_lessthan_ready_17 || !_dataflow_lessthan_valid_17) && (_dataflow_cond_valid_16 && _dataflow__delay_valid_109) && ((_dataflow__delay_ready_110 || !_dataflow__delay_valid_110) && _dataflow__delay_valid_109);
reg signed [32-1:0] _dataflow__delay_data_111;
reg _dataflow__delay_valid_111;
wire _dataflow__delay_ready_111;
assign _dataflow_cond_ready_16 = (_dataflow_lessthan_ready_17 || !_dataflow_lessthan_valid_17) && (_dataflow_cond_valid_16 && _dataflow__delay_valid_109) && ((_dataflow__delay_ready_111 || !_dataflow__delay_valid_111) && _dataflow_cond_valid_16);
reg signed [32-1:0] _dataflow__delay_data_118;
reg _dataflow__delay_valid_118;
wire _dataflow__delay_ready_118;
assign _dataflow__delay_ready_117 = (_dataflow__delay_ready_118 || !_dataflow__delay_valid_118) && _dataflow__delay_valid_117;
reg signed [32-1:0] _dataflow__delay_data_128;
reg _dataflow__delay_valid_128;
wire _dataflow__delay_ready_128;
assign _dataflow__delay_ready_127 = (_dataflow__delay_ready_128 || !_dataflow__delay_valid_128) && _dataflow__delay_valid_127;
reg signed [32-1:0] _dataflow__delay_data_140;
reg _dataflow__delay_valid_140;
wire _dataflow__delay_ready_140;
assign _dataflow__delay_ready_139 = (_dataflow__delay_ready_140 || !_dataflow__delay_valid_140) && _dataflow__delay_valid_139;
reg signed [32-1:0] _dataflow__delay_data_152;
reg _dataflow__delay_valid_152;
wire _dataflow__delay_ready_152;
assign _dataflow_cond_ready_15 = (_dataflow_lessthan_ready_32 || !_dataflow_lessthan_valid_32) && (_dataflow_cond_valid_31 && _dataflow_cond_valid_15) && ((_dataflow__delay_ready_152 || !_dataflow__delay_valid_152) && _dataflow_cond_valid_15);
reg signed [32-1:0] _dataflow__delay_data_153;
reg _dataflow__delay_valid_153;
wire _dataflow__delay_ready_153;
assign _dataflow_cond_ready_31 = (_dataflow_lessthan_ready_32 || !_dataflow_lessthan_valid_32) && (_dataflow_cond_valid_31 && _dataflow_cond_valid_15) && ((_dataflow__delay_ready_153 || !_dataflow__delay_valid_153) && _dataflow_cond_valid_31);
reg signed [32-1:0] _dataflow__delay_data_162;
reg _dataflow__delay_valid_162;
wire _dataflow__delay_ready_162;
assign _dataflow_cond_ready_30 = (_dataflow__delay_ready_162 || !_dataflow__delay_valid_162) && _dataflow_cond_valid_30;
reg signed [32-1:0] _dataflow_cond_data_18;
reg _dataflow_cond_valid_18;
wire _dataflow_cond_ready_18;
reg signed [32-1:0] _dataflow_cond_data_19;
reg _dataflow_cond_valid_19;
wire _dataflow_cond_ready_19;
assign _dataflow_lessthan_ready_17 = (_dataflow_cond_ready_18 || !_dataflow_cond_valid_18) && (_dataflow_lessthan_valid_17 && _dataflow__delay_valid_111 && _dataflow__delay_valid_110) && ((_dataflow_cond_ready_19 || !_dataflow_cond_valid_19) && (_dataflow_lessthan_valid_17 && _dataflow__delay_valid_110 && _dataflow__delay_valid_111));
assign _dataflow__delay_ready_110 = (_dataflow_cond_ready_18 || !_dataflow_cond_valid_18) && (_dataflow_lessthan_valid_17 && _dataflow__delay_valid_111 && _dataflow__delay_valid_110) && ((_dataflow_cond_ready_19 || !_dataflow_cond_valid_19) && (_dataflow_lessthan_valid_17 && _dataflow__delay_valid_110 && _dataflow__delay_valid_111));
assign _dataflow__delay_ready_111 = (_dataflow_cond_ready_18 || !_dataflow_cond_valid_18) && (_dataflow_lessthan_valid_17 && _dataflow__delay_valid_111 && _dataflow__delay_valid_110) && ((_dataflow_cond_ready_19 || !_dataflow_cond_valid_19) && (_dataflow_lessthan_valid_17 && _dataflow__delay_valid_110 && _dataflow__delay_valid_111));
reg signed [32-1:0] _dataflow_cond_data_33;
reg _dataflow_cond_valid_33;
wire _dataflow_cond_ready_33;
reg signed [32-1:0] _dataflow_cond_data_34;
reg _dataflow_cond_valid_34;
wire _dataflow_cond_ready_34;
assign _dataflow_lessthan_ready_32 = (_dataflow_cond_ready_33 || !_dataflow_cond_valid_33) && (_dataflow_lessthan_valid_32 && _dataflow__delay_valid_153 && _dataflow__delay_valid_152) && ((_dataflow_cond_ready_34 || !_dataflow_cond_valid_34) && (_dataflow_lessthan_valid_32 && _dataflow__delay_valid_152 && _dataflow__delay_valid_153));
assign _dataflow__delay_ready_152 = (_dataflow_cond_ready_33 || !_dataflow_cond_valid_33) && (_dataflow_lessthan_valid_32 && _dataflow__delay_valid_153 && _dataflow__delay_valid_152) && ((_dataflow_cond_ready_34 || !_dataflow_cond_valid_34) && (_dataflow_lessthan_valid_32 && _dataflow__delay_valid_152 && _dataflow__delay_valid_153));
assign _dataflow__delay_ready_153 = (_dataflow_cond_ready_33 || !_dataflow_cond_valid_33) && (_dataflow_lessthan_valid_32 && _dataflow__delay_valid_153 && _dataflow__delay_valid_152) && ((_dataflow_cond_ready_34 || !_dataflow_cond_valid_34) && (_dataflow_lessthan_valid_32 && _dataflow__delay_valid_152 && _dataflow__delay_valid_153));
reg signed [32-1:0] _dataflow__delay_data_119;
reg _dataflow__delay_valid_119;
wire _dataflow__delay_ready_119;
assign _dataflow__delay_ready_118 = (_dataflow__delay_ready_119 || !_dataflow__delay_valid_119) && _dataflow__delay_valid_118;
reg signed [32-1:0] _dataflow__delay_data_129;
reg _dataflow__delay_valid_129;
wire _dataflow__delay_ready_129;
assign _dataflow__delay_ready_128 = (_dataflow__delay_ready_129 || !_dataflow__delay_valid_129) && _dataflow__delay_valid_128;
reg signed [32-1:0] _dataflow__delay_data_141;
reg _dataflow__delay_valid_141;
wire _dataflow__delay_ready_141;
assign _dataflow__delay_ready_140 = (_dataflow__delay_ready_141 || !_dataflow__delay_valid_141) && _dataflow__delay_valid_140;
reg signed [32-1:0] _dataflow__delay_data_163;
reg _dataflow__delay_valid_163;
wire _dataflow__delay_ready_163;
assign _dataflow__delay_ready_162 = (_dataflow__delay_ready_163 || !_dataflow__delay_valid_163) && _dataflow__delay_valid_162;
reg [1-1:0] _dataflow_lessthan_data_20;
reg _dataflow_lessthan_valid_20;
wire _dataflow_lessthan_ready_20;
reg [1-1:0] _dataflow_lessthan_data_35;
reg _dataflow_lessthan_valid_35;
wire _dataflow_lessthan_ready_35;
reg [1-1:0] _dataflow_lessthan_data_47;
reg _dataflow_lessthan_valid_47;
wire _dataflow_lessthan_ready_47;
reg signed [32-1:0] _dataflow__delay_data_120;
reg _dataflow__delay_valid_120;
wire _dataflow__delay_ready_120;
assign _dataflow__delay_ready_119 = (_dataflow_lessthan_ready_20 || !_dataflow_lessthan_valid_20) && (_dataflow_cond_valid_19 && _dataflow__delay_valid_119) && ((_dataflow__delay_ready_120 || !_dataflow__delay_valid_120) && _dataflow__delay_valid_119);
reg signed [32-1:0] _dataflow__delay_data_121;
reg _dataflow__delay_valid_121;
wire _dataflow__delay_ready_121;
assign _dataflow_cond_ready_19 = (_dataflow_lessthan_ready_20 || !_dataflow_lessthan_valid_20) && (_dataflow_cond_valid_19 && _dataflow__delay_valid_119) && ((_dataflow__delay_ready_121 || !_dataflow__delay_valid_121) && _dataflow_cond_valid_19);
reg signed [32-1:0] _dataflow__delay_data_130;
reg _dataflow__delay_valid_130;
wire _dataflow__delay_ready_130;
assign _dataflow__delay_ready_129 = (_dataflow__delay_ready_130 || !_dataflow__delay_valid_130) && _dataflow__delay_valid_129;
reg signed [32-1:0] _dataflow__delay_data_142;
reg _dataflow__delay_valid_142;
wire _dataflow__delay_ready_142;
assign _dataflow__delay_ready_141 = (_dataflow__delay_ready_142 || !_dataflow__delay_valid_142) && _dataflow__delay_valid_141;
reg signed [32-1:0] _dataflow__delay_data_154;
reg _dataflow__delay_valid_154;
wire _dataflow__delay_ready_154;
assign _dataflow_cond_ready_18 = (_dataflow_lessthan_ready_35 || !_dataflow_lessthan_valid_35) && (_dataflow_cond_valid_34 && _dataflow_cond_valid_18) && ((_dataflow__delay_ready_154 || !_dataflow__delay_valid_154) && _dataflow_cond_valid_18);
reg signed [32-1:0] _dataflow__delay_data_155;
reg _dataflow__delay_valid_155;
wire _dataflow__delay_ready_155;
assign _dataflow_cond_ready_34 = (_dataflow_lessthan_ready_35 || !_dataflow_lessthan_valid_35) && (_dataflow_cond_valid_34 && _dataflow_cond_valid_18) && ((_dataflow__delay_ready_155 || !_dataflow__delay_valid_155) && _dataflow_cond_valid_34);
reg signed [32-1:0] _dataflow__delay_data_164;
reg _dataflow__delay_valid_164;
wire _dataflow__delay_ready_164;
assign _dataflow_cond_ready_33 = (_dataflow_lessthan_ready_47 || !_dataflow_lessthan_valid_47) && (_dataflow__delay_valid_163 && _dataflow_cond_valid_33) && ((_dataflow__delay_ready_164 || !_dataflow__delay_valid_164) && _dataflow_cond_valid_33);
reg signed [32-1:0] _dataflow__delay_data_165;
reg _dataflow__delay_valid_165;
wire _dataflow__delay_ready_165;
assign _dataflow__delay_ready_163 = (_dataflow_lessthan_ready_47 || !_dataflow_lessthan_valid_47) && (_dataflow__delay_valid_163 && _dataflow_cond_valid_33) && ((_dataflow__delay_ready_165 || !_dataflow__delay_valid_165) && _dataflow__delay_valid_163);
reg signed [32-1:0] _dataflow_cond_data_21;
reg _dataflow_cond_valid_21;
wire _dataflow_cond_ready_21;
reg signed [32-1:0] _dataflow_cond_data_22;
reg _dataflow_cond_valid_22;
wire _dataflow_cond_ready_22;
assign _dataflow_lessthan_ready_20 = (_dataflow_cond_ready_21 || !_dataflow_cond_valid_21) && (_dataflow_lessthan_valid_20 && _dataflow__delay_valid_121 && _dataflow__delay_valid_120) && ((_dataflow_cond_ready_22 || !_dataflow_cond_valid_22) && (_dataflow_lessthan_valid_20 && _dataflow__delay_valid_120 && _dataflow__delay_valid_121));
assign _dataflow__delay_ready_120 = (_dataflow_cond_ready_21 || !_dataflow_cond_valid_21) && (_dataflow_lessthan_valid_20 && _dataflow__delay_valid_121 && _dataflow__delay_valid_120) && ((_dataflow_cond_ready_22 || !_dataflow_cond_valid_22) && (_dataflow_lessthan_valid_20 && _dataflow__delay_valid_120 && _dataflow__delay_valid_121));
assign _dataflow__delay_ready_121 = (_dataflow_cond_ready_21 || !_dataflow_cond_valid_21) && (_dataflow_lessthan_valid_20 && _dataflow__delay_valid_121 && _dataflow__delay_valid_120) && ((_dataflow_cond_ready_22 || !_dataflow_cond_valid_22) && (_dataflow_lessthan_valid_20 && _dataflow__delay_valid_120 && _dataflow__delay_valid_121));
reg signed [32-1:0] _dataflow_cond_data_36;
reg _dataflow_cond_valid_36;
wire _dataflow_cond_ready_36;
reg signed [32-1:0] _dataflow_cond_data_37;
reg _dataflow_cond_valid_37;
wire _dataflow_cond_ready_37;
assign _dataflow_lessthan_ready_35 = (_dataflow_cond_ready_36 || !_dataflow_cond_valid_36) && (_dataflow_lessthan_valid_35 && _dataflow__delay_valid_155 && _dataflow__delay_valid_154) && ((_dataflow_cond_ready_37 || !_dataflow_cond_valid_37) && (_dataflow_lessthan_valid_35 && _dataflow__delay_valid_154 && _dataflow__delay_valid_155));
assign _dataflow__delay_ready_154 = (_dataflow_cond_ready_36 || !_dataflow_cond_valid_36) && (_dataflow_lessthan_valid_35 && _dataflow__delay_valid_155 && _dataflow__delay_valid_154) && ((_dataflow_cond_ready_37 || !_dataflow_cond_valid_37) && (_dataflow_lessthan_valid_35 && _dataflow__delay_valid_154 && _dataflow__delay_valid_155));
assign _dataflow__delay_ready_155 = (_dataflow_cond_ready_36 || !_dataflow_cond_valid_36) && (_dataflow_lessthan_valid_35 && _dataflow__delay_valid_155 && _dataflow__delay_valid_154) && ((_dataflow_cond_ready_37 || !_dataflow_cond_valid_37) && (_dataflow_lessthan_valid_35 && _dataflow__delay_valid_154 && _dataflow__delay_valid_155));
reg signed [32-1:0] _dataflow_cond_data_48;
reg _dataflow_cond_valid_48;
wire _dataflow_cond_ready_48;
reg signed [32-1:0] _dataflow_cond_data_49;
reg _dataflow_cond_valid_49;
wire _dataflow_cond_ready_49;
assign _dataflow_lessthan_ready_47 = (_dataflow_cond_ready_48 || !_dataflow_cond_valid_48) && (_dataflow_lessthan_valid_47 && _dataflow__delay_valid_165 && _dataflow__delay_valid_164) && ((_dataflow_cond_ready_49 || !_dataflow_cond_valid_49) && (_dataflow_lessthan_valid_47 && _dataflow__delay_valid_164 && _dataflow__delay_valid_165));
assign _dataflow__delay_ready_164 = (_dataflow_cond_ready_48 || !_dataflow_cond_valid_48) && (_dataflow_lessthan_valid_47 && _dataflow__delay_valid_165 && _dataflow__delay_valid_164) && ((_dataflow_cond_ready_49 || !_dataflow_cond_valid_49) && (_dataflow_lessthan_valid_47 && _dataflow__delay_valid_164 && _dataflow__delay_valid_165));
assign _dataflow__delay_ready_165 = (_dataflow_cond_ready_48 || !_dataflow_cond_valid_48) && (_dataflow_lessthan_valid_47 && _dataflow__delay_valid_165 && _dataflow__delay_valid_164) && ((_dataflow_cond_ready_49 || !_dataflow_cond_valid_49) && (_dataflow_lessthan_valid_47 && _dataflow__delay_valid_164 && _dataflow__delay_valid_165));
reg signed [32-1:0] _dataflow__delay_data_131;
reg _dataflow__delay_valid_131;
wire _dataflow__delay_ready_131;
assign _dataflow__delay_ready_130 = (_dataflow__delay_ready_131 || !_dataflow__delay_valid_131) && _dataflow__delay_valid_130;
reg signed [32-1:0] _dataflow__delay_data_143;
reg _dataflow__delay_valid_143;
wire _dataflow__delay_ready_143;
assign _dataflow__delay_ready_142 = (_dataflow__delay_ready_143 || !_dataflow__delay_valid_143) && _dataflow__delay_valid_142;
reg [1-1:0] _dataflow_lessthan_data_23;
reg _dataflow_lessthan_valid_23;
wire _dataflow_lessthan_ready_23;
reg [1-1:0] _dataflow_lessthan_data_38;
reg _dataflow_lessthan_valid_38;
wire _dataflow_lessthan_ready_38;
reg [1-1:0] _dataflow_lessthan_data_50;
reg _dataflow_lessthan_valid_50;
wire _dataflow_lessthan_ready_50;
reg signed [32-1:0] _dataflow__delay_data_132;
reg _dataflow__delay_valid_132;
wire _dataflow__delay_ready_132;
assign _dataflow__delay_ready_131 = (_dataflow_lessthan_ready_23 || !_dataflow_lessthan_valid_23) && (_dataflow_cond_valid_22 && _dataflow__delay_valid_131) && ((_dataflow__delay_ready_132 || !_dataflow__delay_valid_132) && _dataflow__delay_valid_131);
reg signed [32-1:0] _dataflow__delay_data_133;
reg _dataflow__delay_valid_133;
wire _dataflow__delay_ready_133;
assign _dataflow_cond_ready_22 = (_dataflow_lessthan_ready_23 || !_dataflow_lessthan_valid_23) && (_dataflow_cond_valid_22 && _dataflow__delay_valid_131) && ((_dataflow__delay_ready_133 || !_dataflow__delay_valid_133) && _dataflow_cond_valid_22);
reg signed [32-1:0] _dataflow__delay_data_144;
reg _dataflow__delay_valid_144;
wire _dataflow__delay_ready_144;
assign _dataflow__delay_ready_143 = (_dataflow__delay_ready_144 || !_dataflow__delay_valid_144) && _dataflow__delay_valid_143;
reg signed [32-1:0] _dataflow__delay_data_156;
reg _dataflow__delay_valid_156;
wire _dataflow__delay_ready_156;
assign _dataflow_cond_ready_21 = (_dataflow_lessthan_ready_38 || !_dataflow_lessthan_valid_38) && (_dataflow_cond_valid_37 && _dataflow_cond_valid_21) && ((_dataflow__delay_ready_156 || !_dataflow__delay_valid_156) && _dataflow_cond_valid_21);
reg signed [32-1:0] _dataflow__delay_data_157;
reg _dataflow__delay_valid_157;
wire _dataflow__delay_ready_157;
assign _dataflow_cond_ready_37 = (_dataflow_lessthan_ready_38 || !_dataflow_lessthan_valid_38) && (_dataflow_cond_valid_37 && _dataflow_cond_valid_21) && ((_dataflow__delay_ready_157 || !_dataflow__delay_valid_157) && _dataflow_cond_valid_37);
reg signed [32-1:0] _dataflow__delay_data_166;
reg _dataflow__delay_valid_166;
wire _dataflow__delay_ready_166;
assign _dataflow_cond_ready_36 = (_dataflow_lessthan_ready_50 || !_dataflow_lessthan_valid_50) && (_dataflow_cond_valid_49 && _dataflow_cond_valid_36) && ((_dataflow__delay_ready_166 || !_dataflow__delay_valid_166) && _dataflow_cond_valid_36);
reg signed [32-1:0] _dataflow__delay_data_167;
reg _dataflow__delay_valid_167;
wire _dataflow__delay_ready_167;
assign _dataflow_cond_ready_49 = (_dataflow_lessthan_ready_50 || !_dataflow_lessthan_valid_50) && (_dataflow_cond_valid_49 && _dataflow_cond_valid_36) && ((_dataflow__delay_ready_167 || !_dataflow__delay_valid_167) && _dataflow_cond_valid_49);
reg signed [32-1:0] _dataflow__delay_data_174;
reg _dataflow__delay_valid_174;
wire _dataflow__delay_ready_174;
assign _dataflow_cond_ready_48 = (_dataflow__delay_ready_174 || !_dataflow__delay_valid_174) && _dataflow_cond_valid_48;
reg signed [32-1:0] _dataflow_cond_data_24;
reg _dataflow_cond_valid_24;
wire _dataflow_cond_ready_24;
reg signed [32-1:0] _dataflow_cond_data_25;
reg _dataflow_cond_valid_25;
wire _dataflow_cond_ready_25;
assign _dataflow_lessthan_ready_23 = (_dataflow_cond_ready_24 || !_dataflow_cond_valid_24) && (_dataflow_lessthan_valid_23 && _dataflow__delay_valid_133 && _dataflow__delay_valid_132) && ((_dataflow_cond_ready_25 || !_dataflow_cond_valid_25) && (_dataflow_lessthan_valid_23 && _dataflow__delay_valid_132 && _dataflow__delay_valid_133));
assign _dataflow__delay_ready_132 = (_dataflow_cond_ready_24 || !_dataflow_cond_valid_24) && (_dataflow_lessthan_valid_23 && _dataflow__delay_valid_133 && _dataflow__delay_valid_132) && ((_dataflow_cond_ready_25 || !_dataflow_cond_valid_25) && (_dataflow_lessthan_valid_23 && _dataflow__delay_valid_132 && _dataflow__delay_valid_133));
assign _dataflow__delay_ready_133 = (_dataflow_cond_ready_24 || !_dataflow_cond_valid_24) && (_dataflow_lessthan_valid_23 && _dataflow__delay_valid_133 && _dataflow__delay_valid_132) && ((_dataflow_cond_ready_25 || !_dataflow_cond_valid_25) && (_dataflow_lessthan_valid_23 && _dataflow__delay_valid_132 && _dataflow__delay_valid_133));
reg signed [32-1:0] _dataflow_cond_data_39;
reg _dataflow_cond_valid_39;
wire _dataflow_cond_ready_39;
reg signed [32-1:0] _dataflow_cond_data_40;
reg _dataflow_cond_valid_40;
wire _dataflow_cond_ready_40;
assign _dataflow_lessthan_ready_38 = (_dataflow_cond_ready_39 || !_dataflow_cond_valid_39) && (_dataflow_lessthan_valid_38 && _dataflow__delay_valid_157 && _dataflow__delay_valid_156) && ((_dataflow_cond_ready_40 || !_dataflow_cond_valid_40) && (_dataflow_lessthan_valid_38 && _dataflow__delay_valid_156 && _dataflow__delay_valid_157));
assign _dataflow__delay_ready_156 = (_dataflow_cond_ready_39 || !_dataflow_cond_valid_39) && (_dataflow_lessthan_valid_38 && _dataflow__delay_valid_157 && _dataflow__delay_valid_156) && ((_dataflow_cond_ready_40 || !_dataflow_cond_valid_40) && (_dataflow_lessthan_valid_38 && _dataflow__delay_valid_156 && _dataflow__delay_valid_157));
assign _dataflow__delay_ready_157 = (_dataflow_cond_ready_39 || !_dataflow_cond_valid_39) && (_dataflow_lessthan_valid_38 && _dataflow__delay_valid_157 && _dataflow__delay_valid_156) && ((_dataflow_cond_ready_40 || !_dataflow_cond_valid_40) && (_dataflow_lessthan_valid_38 && _dataflow__delay_valid_156 && _dataflow__delay_valid_157));
reg signed [32-1:0] _dataflow_cond_data_51;
reg _dataflow_cond_valid_51;
wire _dataflow_cond_ready_51;
reg signed [32-1:0] _dataflow_cond_data_52;
reg _dataflow_cond_valid_52;
wire _dataflow_cond_ready_52;
assign _dataflow_lessthan_ready_50 = (_dataflow_cond_ready_51 || !_dataflow_cond_valid_51) && (_dataflow_lessthan_valid_50 && _dataflow__delay_valid_167 && _dataflow__delay_valid_166) && ((_dataflow_cond_ready_52 || !_dataflow_cond_valid_52) && (_dataflow_lessthan_valid_50 && _dataflow__delay_valid_166 && _dataflow__delay_valid_167));
assign _dataflow__delay_ready_166 = (_dataflow_cond_ready_51 || !_dataflow_cond_valid_51) && (_dataflow_lessthan_valid_50 && _dataflow__delay_valid_167 && _dataflow__delay_valid_166) && ((_dataflow_cond_ready_52 || !_dataflow_cond_valid_52) && (_dataflow_lessthan_valid_50 && _dataflow__delay_valid_166 && _dataflow__delay_valid_167));
assign _dataflow__delay_ready_167 = (_dataflow_cond_ready_51 || !_dataflow_cond_valid_51) && (_dataflow_lessthan_valid_50 && _dataflow__delay_valid_167 && _dataflow__delay_valid_166) && ((_dataflow_cond_ready_52 || !_dataflow_cond_valid_52) && (_dataflow_lessthan_valid_50 && _dataflow__delay_valid_166 && _dataflow__delay_valid_167));
reg signed [32-1:0] _dataflow__delay_data_145;
reg _dataflow__delay_valid_145;
wire _dataflow__delay_ready_145;
assign _dataflow__delay_ready_144 = (_dataflow__delay_ready_145 || !_dataflow__delay_valid_145) && _dataflow__delay_valid_144;
reg signed [32-1:0] _dataflow__delay_data_175;
reg _dataflow__delay_valid_175;
wire _dataflow__delay_ready_175;
assign _dataflow__delay_ready_174 = (_dataflow__delay_ready_175 || !_dataflow__delay_valid_175) && _dataflow__delay_valid_174;
reg [1-1:0] _dataflow_lessthan_data_26;
reg _dataflow_lessthan_valid_26;
wire _dataflow_lessthan_ready_26;
reg [1-1:0] _dataflow_lessthan_data_41;
reg _dataflow_lessthan_valid_41;
wire _dataflow_lessthan_ready_41;
reg [1-1:0] _dataflow_lessthan_data_53;
reg _dataflow_lessthan_valid_53;
wire _dataflow_lessthan_ready_53;
reg [1-1:0] _dataflow_lessthan_data_62;
reg _dataflow_lessthan_valid_62;
wire _dataflow_lessthan_ready_62;
reg signed [32-1:0] _dataflow__delay_data_146;
reg _dataflow__delay_valid_146;
wire _dataflow__delay_ready_146;
assign _dataflow__delay_ready_145 = (_dataflow_lessthan_ready_26 || !_dataflow_lessthan_valid_26) && (_dataflow_cond_valid_25 && _dataflow__delay_valid_145) && ((_dataflow__delay_ready_146 || !_dataflow__delay_valid_146) && _dataflow__delay_valid_145);
reg signed [32-1:0] _dataflow__delay_data_147;
reg _dataflow__delay_valid_147;
wire _dataflow__delay_ready_147;
assign _dataflow_cond_ready_25 = (_dataflow_lessthan_ready_26 || !_dataflow_lessthan_valid_26) && (_dataflow_cond_valid_25 && _dataflow__delay_valid_145) && ((_dataflow__delay_ready_147 || !_dataflow__delay_valid_147) && _dataflow_cond_valid_25);
reg signed [32-1:0] _dataflow__delay_data_158;
reg _dataflow__delay_valid_158;
wire _dataflow__delay_ready_158;
assign _dataflow_cond_ready_24 = (_dataflow_lessthan_ready_41 || !_dataflow_lessthan_valid_41) && (_dataflow_cond_valid_40 && _dataflow_cond_valid_24) && ((_dataflow__delay_ready_158 || !_dataflow__delay_valid_158) && _dataflow_cond_valid_24);
reg signed [32-1:0] _dataflow__delay_data_159;
reg _dataflow__delay_valid_159;
wire _dataflow__delay_ready_159;
assign _dataflow_cond_ready_40 = (_dataflow_lessthan_ready_41 || !_dataflow_lessthan_valid_41) && (_dataflow_cond_valid_40 && _dataflow_cond_valid_24) && ((_dataflow__delay_ready_159 || !_dataflow__delay_valid_159) && _dataflow_cond_valid_40);
reg signed [32-1:0] _dataflow__delay_data_168;
reg _dataflow__delay_valid_168;
wire _dataflow__delay_ready_168;
assign _dataflow_cond_ready_39 = (_dataflow_lessthan_ready_53 || !_dataflow_lessthan_valid_53) && (_dataflow_cond_valid_52 && _dataflow_cond_valid_39) && ((_dataflow__delay_ready_168 || !_dataflow__delay_valid_168) && _dataflow_cond_valid_39);
reg signed [32-1:0] _dataflow__delay_data_169;
reg _dataflow__delay_valid_169;
wire _dataflow__delay_ready_169;
assign _dataflow_cond_ready_52 = (_dataflow_lessthan_ready_53 || !_dataflow_lessthan_valid_53) && (_dataflow_cond_valid_52 && _dataflow_cond_valid_39) && ((_dataflow__delay_ready_169 || !_dataflow__delay_valid_169) && _dataflow_cond_valid_52);
reg signed [32-1:0] _dataflow__delay_data_176;
reg _dataflow__delay_valid_176;
wire _dataflow__delay_ready_176;
assign _dataflow_cond_ready_51 = (_dataflow_lessthan_ready_62 || !_dataflow_lessthan_valid_62) && (_dataflow__delay_valid_175 && _dataflow_cond_valid_51) && ((_dataflow__delay_ready_176 || !_dataflow__delay_valid_176) && _dataflow_cond_valid_51);
reg signed [32-1:0] _dataflow__delay_data_177;
reg _dataflow__delay_valid_177;
wire _dataflow__delay_ready_177;
assign _dataflow__delay_ready_175 = (_dataflow_lessthan_ready_62 || !_dataflow_lessthan_valid_62) && (_dataflow__delay_valid_175 && _dataflow_cond_valid_51) && ((_dataflow__delay_ready_177 || !_dataflow__delay_valid_177) && _dataflow__delay_valid_175);
reg signed [32-1:0] _dataflow_cond_data_27;
reg _dataflow_cond_valid_27;
wire _dataflow_cond_ready_27;
reg signed [32-1:0] _dataflow_cond_data_28;
reg _dataflow_cond_valid_28;
wire _dataflow_cond_ready_28;
assign _dataflow_lessthan_ready_26 = (_dataflow_cond_ready_27 || !_dataflow_cond_valid_27) && (_dataflow_lessthan_valid_26 && _dataflow__delay_valid_147 && _dataflow__delay_valid_146) && ((_dataflow_cond_ready_28 || !_dataflow_cond_valid_28) && (_dataflow_lessthan_valid_26 && _dataflow__delay_valid_146 && _dataflow__delay_valid_147));
assign _dataflow__delay_ready_146 = (_dataflow_cond_ready_27 || !_dataflow_cond_valid_27) && (_dataflow_lessthan_valid_26 && _dataflow__delay_valid_147 && _dataflow__delay_valid_146) && ((_dataflow_cond_ready_28 || !_dataflow_cond_valid_28) && (_dataflow_lessthan_valid_26 && _dataflow__delay_valid_146 && _dataflow__delay_valid_147));
assign _dataflow__delay_ready_147 = (_dataflow_cond_ready_27 || !_dataflow_cond_valid_27) && (_dataflow_lessthan_valid_26 && _dataflow__delay_valid_147 && _dataflow__delay_valid_146) && ((_dataflow_cond_ready_28 || !_dataflow_cond_valid_28) && (_dataflow_lessthan_valid_26 && _dataflow__delay_valid_146 && _dataflow__delay_valid_147));
reg signed [32-1:0] _dataflow_cond_data_42;
reg _dataflow_cond_valid_42;
wire _dataflow_cond_ready_42;
reg signed [32-1:0] _dataflow_cond_data_43;
reg _dataflow_cond_valid_43;
wire _dataflow_cond_ready_43;
assign _dataflow_lessthan_ready_41 = (_dataflow_cond_ready_42 || !_dataflow_cond_valid_42) && (_dataflow_lessthan_valid_41 && _dataflow__delay_valid_159 && _dataflow__delay_valid_158) && ((_dataflow_cond_ready_43 || !_dataflow_cond_valid_43) && (_dataflow_lessthan_valid_41 && _dataflow__delay_valid_158 && _dataflow__delay_valid_159));
assign _dataflow__delay_ready_158 = (_dataflow_cond_ready_42 || !_dataflow_cond_valid_42) && (_dataflow_lessthan_valid_41 && _dataflow__delay_valid_159 && _dataflow__delay_valid_158) && ((_dataflow_cond_ready_43 || !_dataflow_cond_valid_43) && (_dataflow_lessthan_valid_41 && _dataflow__delay_valid_158 && _dataflow__delay_valid_159));
assign _dataflow__delay_ready_159 = (_dataflow_cond_ready_42 || !_dataflow_cond_valid_42) && (_dataflow_lessthan_valid_41 && _dataflow__delay_valid_159 && _dataflow__delay_valid_158) && ((_dataflow_cond_ready_43 || !_dataflow_cond_valid_43) && (_dataflow_lessthan_valid_41 && _dataflow__delay_valid_158 && _dataflow__delay_valid_159));
reg signed [32-1:0] _dataflow_cond_data_54;
reg _dataflow_cond_valid_54;
wire _dataflow_cond_ready_54;
reg signed [32-1:0] _dataflow_cond_data_55;
reg _dataflow_cond_valid_55;
wire _dataflow_cond_ready_55;
assign _dataflow_lessthan_ready_53 = (_dataflow_cond_ready_54 || !_dataflow_cond_valid_54) && (_dataflow_lessthan_valid_53 && _dataflow__delay_valid_169 && _dataflow__delay_valid_168) && ((_dataflow_cond_ready_55 || !_dataflow_cond_valid_55) && (_dataflow_lessthan_valid_53 && _dataflow__delay_valid_168 && _dataflow__delay_valid_169));
assign _dataflow__delay_ready_168 = (_dataflow_cond_ready_54 || !_dataflow_cond_valid_54) && (_dataflow_lessthan_valid_53 && _dataflow__delay_valid_169 && _dataflow__delay_valid_168) && ((_dataflow_cond_ready_55 || !_dataflow_cond_valid_55) && (_dataflow_lessthan_valid_53 && _dataflow__delay_valid_168 && _dataflow__delay_valid_169));
assign _dataflow__delay_ready_169 = (_dataflow_cond_ready_54 || !_dataflow_cond_valid_54) && (_dataflow_lessthan_valid_53 && _dataflow__delay_valid_169 && _dataflow__delay_valid_168) && ((_dataflow_cond_ready_55 || !_dataflow_cond_valid_55) && (_dataflow_lessthan_valid_53 && _dataflow__delay_valid_168 && _dataflow__delay_valid_169));
reg signed [32-1:0] _dataflow_cond_data_63;
reg _dataflow_cond_valid_63;
wire _dataflow_cond_ready_63;
reg signed [32-1:0] _dataflow_cond_data_64;
reg _dataflow_cond_valid_64;
wire _dataflow_cond_ready_64;
assign _dataflow_lessthan_ready_62 = (_dataflow_cond_ready_63 || !_dataflow_cond_valid_63) && (_dataflow_lessthan_valid_62 && _dataflow__delay_valid_177 && _dataflow__delay_valid_176) && ((_dataflow_cond_ready_64 || !_dataflow_cond_valid_64) && (_dataflow_lessthan_valid_62 && _dataflow__delay_valid_176 && _dataflow__delay_valid_177));
assign _dataflow__delay_ready_176 = (_dataflow_cond_ready_63 || !_dataflow_cond_valid_63) && (_dataflow_lessthan_valid_62 && _dataflow__delay_valid_177 && _dataflow__delay_valid_176) && ((_dataflow_cond_ready_64 || !_dataflow_cond_valid_64) && (_dataflow_lessthan_valid_62 && _dataflow__delay_valid_176 && _dataflow__delay_valid_177));
assign _dataflow__delay_ready_177 = (_dataflow_cond_ready_63 || !_dataflow_cond_valid_63) && (_dataflow_lessthan_valid_62 && _dataflow__delay_valid_177 && _dataflow__delay_valid_176) && ((_dataflow_cond_ready_64 || !_dataflow_cond_valid_64) && (_dataflow_lessthan_valid_62 && _dataflow__delay_valid_176 && _dataflow__delay_valid_177));
reg [1-1:0] _dataflow_lessthan_data_44;
reg _dataflow_lessthan_valid_44;
wire _dataflow_lessthan_ready_44;
reg [1-1:0] _dataflow_lessthan_data_56;
reg _dataflow_lessthan_valid_56;
wire _dataflow_lessthan_ready_56;
reg [1-1:0] _dataflow_lessthan_data_65;
reg _dataflow_lessthan_valid_65;
wire _dataflow_lessthan_ready_65;
reg signed [32-1:0] _dataflow__delay_data_160;
reg _dataflow__delay_valid_160;
wire _dataflow__delay_ready_160;
assign _dataflow_cond_ready_27 = (_dataflow_lessthan_ready_44 || !_dataflow_lessthan_valid_44) && (_dataflow_cond_valid_43 && _dataflow_cond_valid_27) && ((_dataflow__delay_ready_160 || !_dataflow__delay_valid_160) && _dataflow_cond_valid_27);
reg signed [32-1:0] _dataflow__delay_data_161;
reg _dataflow__delay_valid_161;
wire _dataflow__delay_ready_161;
assign _dataflow_cond_ready_43 = (_dataflow_lessthan_ready_44 || !_dataflow_lessthan_valid_44) && (_dataflow_cond_valid_43 && _dataflow_cond_valid_27) && ((_dataflow__delay_ready_161 || !_dataflow__delay_valid_161) && _dataflow_cond_valid_43);
reg signed [32-1:0] _dataflow__delay_data_170;
reg _dataflow__delay_valid_170;
wire _dataflow__delay_ready_170;
assign _dataflow_cond_ready_42 = (_dataflow_lessthan_ready_56 || !_dataflow_lessthan_valid_56) && (_dataflow_cond_valid_55 && _dataflow_cond_valid_42) && ((_dataflow__delay_ready_170 || !_dataflow__delay_valid_170) && _dataflow_cond_valid_42);
reg signed [32-1:0] _dataflow__delay_data_171;
reg _dataflow__delay_valid_171;
wire _dataflow__delay_ready_171;
assign _dataflow_cond_ready_55 = (_dataflow_lessthan_ready_56 || !_dataflow_lessthan_valid_56) && (_dataflow_cond_valid_55 && _dataflow_cond_valid_42) && ((_dataflow__delay_ready_171 || !_dataflow__delay_valid_171) && _dataflow_cond_valid_55);
reg signed [32-1:0] _dataflow__delay_data_178;
reg _dataflow__delay_valid_178;
wire _dataflow__delay_ready_178;
assign _dataflow_cond_ready_54 = (_dataflow_lessthan_ready_65 || !_dataflow_lessthan_valid_65) && (_dataflow_cond_valid_64 && _dataflow_cond_valid_54) && ((_dataflow__delay_ready_178 || !_dataflow__delay_valid_178) && _dataflow_cond_valid_54);
reg signed [32-1:0] _dataflow__delay_data_179;
reg _dataflow__delay_valid_179;
wire _dataflow__delay_ready_179;
assign _dataflow_cond_ready_64 = (_dataflow_lessthan_ready_65 || !_dataflow_lessthan_valid_65) && (_dataflow_cond_valid_64 && _dataflow_cond_valid_54) && ((_dataflow__delay_ready_179 || !_dataflow__delay_valid_179) && _dataflow_cond_valid_64);
reg signed [32-1:0] _dataflow__delay_data_184;
reg _dataflow__delay_valid_184;
wire _dataflow__delay_ready_184;
assign _dataflow_cond_ready_63 = (_dataflow__delay_ready_184 || !_dataflow__delay_valid_184) && _dataflow_cond_valid_63;
reg signed [32-1:0] _dataflow__delay_data_202;
reg _dataflow__delay_valid_202;
wire _dataflow__delay_ready_202;
assign _dataflow_cond_ready_28 = (_dataflow__delay_ready_202 || !_dataflow__delay_valid_202) && _dataflow_cond_valid_28;
reg signed [32-1:0] _dataflow_cond_data_45;
reg _dataflow_cond_valid_45;
wire _dataflow_cond_ready_45;
reg signed [32-1:0] _dataflow_cond_data_46;
reg _dataflow_cond_valid_46;
wire _dataflow_cond_ready_46;
assign _dataflow_lessthan_ready_44 = (_dataflow_cond_ready_45 || !_dataflow_cond_valid_45) && (_dataflow_lessthan_valid_44 && _dataflow__delay_valid_161 && _dataflow__delay_valid_160) && ((_dataflow_cond_ready_46 || !_dataflow_cond_valid_46) && (_dataflow_lessthan_valid_44 && _dataflow__delay_valid_160 && _dataflow__delay_valid_161));
assign _dataflow__delay_ready_160 = (_dataflow_cond_ready_45 || !_dataflow_cond_valid_45) && (_dataflow_lessthan_valid_44 && _dataflow__delay_valid_161 && _dataflow__delay_valid_160) && ((_dataflow_cond_ready_46 || !_dataflow_cond_valid_46) && (_dataflow_lessthan_valid_44 && _dataflow__delay_valid_160 && _dataflow__delay_valid_161));
assign _dataflow__delay_ready_161 = (_dataflow_cond_ready_45 || !_dataflow_cond_valid_45) && (_dataflow_lessthan_valid_44 && _dataflow__delay_valid_161 && _dataflow__delay_valid_160) && ((_dataflow_cond_ready_46 || !_dataflow_cond_valid_46) && (_dataflow_lessthan_valid_44 && _dataflow__delay_valid_160 && _dataflow__delay_valid_161));
reg signed [32-1:0] _dataflow_cond_data_57;
reg _dataflow_cond_valid_57;
wire _dataflow_cond_ready_57;
reg signed [32-1:0] _dataflow_cond_data_58;
reg _dataflow_cond_valid_58;
wire _dataflow_cond_ready_58;
assign _dataflow_lessthan_ready_56 = (_dataflow_cond_ready_57 || !_dataflow_cond_valid_57) && (_dataflow_lessthan_valid_56 && _dataflow__delay_valid_171 && _dataflow__delay_valid_170) && ((_dataflow_cond_ready_58 || !_dataflow_cond_valid_58) && (_dataflow_lessthan_valid_56 && _dataflow__delay_valid_170 && _dataflow__delay_valid_171));
assign _dataflow__delay_ready_170 = (_dataflow_cond_ready_57 || !_dataflow_cond_valid_57) && (_dataflow_lessthan_valid_56 && _dataflow__delay_valid_171 && _dataflow__delay_valid_170) && ((_dataflow_cond_ready_58 || !_dataflow_cond_valid_58) && (_dataflow_lessthan_valid_56 && _dataflow__delay_valid_170 && _dataflow__delay_valid_171));
assign _dataflow__delay_ready_171 = (_dataflow_cond_ready_57 || !_dataflow_cond_valid_57) && (_dataflow_lessthan_valid_56 && _dataflow__delay_valid_171 && _dataflow__delay_valid_170) && ((_dataflow_cond_ready_58 || !_dataflow_cond_valid_58) && (_dataflow_lessthan_valid_56 && _dataflow__delay_valid_170 && _dataflow__delay_valid_171));
reg signed [32-1:0] _dataflow_cond_data_66;
reg _dataflow_cond_valid_66;
wire _dataflow_cond_ready_66;
reg signed [32-1:0] _dataflow_cond_data_67;
reg _dataflow_cond_valid_67;
wire _dataflow_cond_ready_67;
assign _dataflow_lessthan_ready_65 = (_dataflow_cond_ready_66 || !_dataflow_cond_valid_66) && (_dataflow_lessthan_valid_65 && _dataflow__delay_valid_179 && _dataflow__delay_valid_178) && ((_dataflow_cond_ready_67 || !_dataflow_cond_valid_67) && (_dataflow_lessthan_valid_65 && _dataflow__delay_valid_178 && _dataflow__delay_valid_179));
assign _dataflow__delay_ready_178 = (_dataflow_cond_ready_66 || !_dataflow_cond_valid_66) && (_dataflow_lessthan_valid_65 && _dataflow__delay_valid_179 && _dataflow__delay_valid_178) && ((_dataflow_cond_ready_67 || !_dataflow_cond_valid_67) && (_dataflow_lessthan_valid_65 && _dataflow__delay_valid_178 && _dataflow__delay_valid_179));
assign _dataflow__delay_ready_179 = (_dataflow_cond_ready_66 || !_dataflow_cond_valid_66) && (_dataflow_lessthan_valid_65 && _dataflow__delay_valid_179 && _dataflow__delay_valid_178) && ((_dataflow_cond_ready_67 || !_dataflow_cond_valid_67) && (_dataflow_lessthan_valid_65 && _dataflow__delay_valid_178 && _dataflow__delay_valid_179));
reg signed [32-1:0] _dataflow__delay_data_185;
reg _dataflow__delay_valid_185;
wire _dataflow__delay_ready_185;
assign _dataflow__delay_ready_184 = (_dataflow__delay_ready_185 || !_dataflow__delay_valid_185) && _dataflow__delay_valid_184;
reg signed [32-1:0] _dataflow__delay_data_203;
reg _dataflow__delay_valid_203;
wire _dataflow__delay_ready_203;
assign _dataflow__delay_ready_202 = (_dataflow__delay_ready_203 || !_dataflow__delay_valid_203) && _dataflow__delay_valid_202;
reg [1-1:0] _dataflow_lessthan_data_59;
reg _dataflow_lessthan_valid_59;
wire _dataflow_lessthan_ready_59;
reg [1-1:0] _dataflow_lessthan_data_68;
reg _dataflow_lessthan_valid_68;
wire _dataflow_lessthan_ready_68;
reg [1-1:0] _dataflow_lessthan_data_74;
reg _dataflow_lessthan_valid_74;
wire _dataflow_lessthan_ready_74;
reg signed [32-1:0] _dataflow__delay_data_172;
reg _dataflow__delay_valid_172;
wire _dataflow__delay_ready_172;
assign _dataflow_cond_ready_45 = (_dataflow_lessthan_ready_59 || !_dataflow_lessthan_valid_59) && (_dataflow_cond_valid_58 && _dataflow_cond_valid_45) && ((_dataflow__delay_ready_172 || !_dataflow__delay_valid_172) && _dataflow_cond_valid_45);
reg signed [32-1:0] _dataflow__delay_data_173;
reg _dataflow__delay_valid_173;
wire _dataflow__delay_ready_173;
assign _dataflow_cond_ready_58 = (_dataflow_lessthan_ready_59 || !_dataflow_lessthan_valid_59) && (_dataflow_cond_valid_58 && _dataflow_cond_valid_45) && ((_dataflow__delay_ready_173 || !_dataflow__delay_valid_173) && _dataflow_cond_valid_58);
reg signed [32-1:0] _dataflow__delay_data_180;
reg _dataflow__delay_valid_180;
wire _dataflow__delay_ready_180;
assign _dataflow_cond_ready_57 = (_dataflow_lessthan_ready_68 || !_dataflow_lessthan_valid_68) && (_dataflow_cond_valid_67 && _dataflow_cond_valid_57) && ((_dataflow__delay_ready_180 || !_dataflow__delay_valid_180) && _dataflow_cond_valid_57);
reg signed [32-1:0] _dataflow__delay_data_181;
reg _dataflow__delay_valid_181;
wire _dataflow__delay_ready_181;
assign _dataflow_cond_ready_67 = (_dataflow_lessthan_ready_68 || !_dataflow_lessthan_valid_68) && (_dataflow_cond_valid_67 && _dataflow_cond_valid_57) && ((_dataflow__delay_ready_181 || !_dataflow__delay_valid_181) && _dataflow_cond_valid_67);
reg signed [32-1:0] _dataflow__delay_data_186;
reg _dataflow__delay_valid_186;
wire _dataflow__delay_ready_186;
assign _dataflow_cond_ready_66 = (_dataflow_lessthan_ready_74 || !_dataflow_lessthan_valid_74) && (_dataflow__delay_valid_185 && _dataflow_cond_valid_66) && ((_dataflow__delay_ready_186 || !_dataflow__delay_valid_186) && _dataflow_cond_valid_66);
reg signed [32-1:0] _dataflow__delay_data_187;
reg _dataflow__delay_valid_187;
wire _dataflow__delay_ready_187;
assign _dataflow__delay_ready_185 = (_dataflow_lessthan_ready_74 || !_dataflow_lessthan_valid_74) && (_dataflow__delay_valid_185 && _dataflow_cond_valid_66) && ((_dataflow__delay_ready_187 || !_dataflow__delay_valid_187) && _dataflow__delay_valid_185);
reg signed [32-1:0] _dataflow__delay_data_204;
reg _dataflow__delay_valid_204;
wire _dataflow__delay_ready_204;
assign _dataflow__delay_ready_203 = (_dataflow__delay_ready_204 || !_dataflow__delay_valid_204) && _dataflow__delay_valid_203;
reg signed [32-1:0] _dataflow__delay_data_214;
reg _dataflow__delay_valid_214;
wire _dataflow__delay_ready_214;
assign _dataflow_cond_ready_46 = (_dataflow__delay_ready_214 || !_dataflow__delay_valid_214) && _dataflow_cond_valid_46;
reg signed [32-1:0] _dataflow_cond_data_60;
reg _dataflow_cond_valid_60;
wire _dataflow_cond_ready_60;
reg signed [32-1:0] _dataflow_cond_data_61;
reg _dataflow_cond_valid_61;
wire _dataflow_cond_ready_61;
assign _dataflow_lessthan_ready_59 = (_dataflow_cond_ready_60 || !_dataflow_cond_valid_60) && (_dataflow_lessthan_valid_59 && _dataflow__delay_valid_173 && _dataflow__delay_valid_172) && ((_dataflow_cond_ready_61 || !_dataflow_cond_valid_61) && (_dataflow_lessthan_valid_59 && _dataflow__delay_valid_172 && _dataflow__delay_valid_173));
assign _dataflow__delay_ready_172 = (_dataflow_cond_ready_60 || !_dataflow_cond_valid_60) && (_dataflow_lessthan_valid_59 && _dataflow__delay_valid_173 && _dataflow__delay_valid_172) && ((_dataflow_cond_ready_61 || !_dataflow_cond_valid_61) && (_dataflow_lessthan_valid_59 && _dataflow__delay_valid_172 && _dataflow__delay_valid_173));
assign _dataflow__delay_ready_173 = (_dataflow_cond_ready_60 || !_dataflow_cond_valid_60) && (_dataflow_lessthan_valid_59 && _dataflow__delay_valid_173 && _dataflow__delay_valid_172) && ((_dataflow_cond_ready_61 || !_dataflow_cond_valid_61) && (_dataflow_lessthan_valid_59 && _dataflow__delay_valid_172 && _dataflow__delay_valid_173));
reg signed [32-1:0] _dataflow_cond_data_69;
reg _dataflow_cond_valid_69;
wire _dataflow_cond_ready_69;
reg signed [32-1:0] _dataflow_cond_data_70;
reg _dataflow_cond_valid_70;
wire _dataflow_cond_ready_70;
assign _dataflow_lessthan_ready_68 = (_dataflow_cond_ready_69 || !_dataflow_cond_valid_69) && (_dataflow_lessthan_valid_68 && _dataflow__delay_valid_181 && _dataflow__delay_valid_180) && ((_dataflow_cond_ready_70 || !_dataflow_cond_valid_70) && (_dataflow_lessthan_valid_68 && _dataflow__delay_valid_180 && _dataflow__delay_valid_181));
assign _dataflow__delay_ready_180 = (_dataflow_cond_ready_69 || !_dataflow_cond_valid_69) && (_dataflow_lessthan_valid_68 && _dataflow__delay_valid_181 && _dataflow__delay_valid_180) && ((_dataflow_cond_ready_70 || !_dataflow_cond_valid_70) && (_dataflow_lessthan_valid_68 && _dataflow__delay_valid_180 && _dataflow__delay_valid_181));
assign _dataflow__delay_ready_181 = (_dataflow_cond_ready_69 || !_dataflow_cond_valid_69) && (_dataflow_lessthan_valid_68 && _dataflow__delay_valid_181 && _dataflow__delay_valid_180) && ((_dataflow_cond_ready_70 || !_dataflow_cond_valid_70) && (_dataflow_lessthan_valid_68 && _dataflow__delay_valid_180 && _dataflow__delay_valid_181));
reg signed [32-1:0] _dataflow_cond_data_75;
reg _dataflow_cond_valid_75;
wire _dataflow_cond_ready_75;
reg signed [32-1:0] _dataflow_cond_data_76;
reg _dataflow_cond_valid_76;
wire _dataflow_cond_ready_76;
assign _dataflow_lessthan_ready_74 = (_dataflow_cond_ready_75 || !_dataflow_cond_valid_75) && (_dataflow_lessthan_valid_74 && _dataflow__delay_valid_187 && _dataflow__delay_valid_186) && ((_dataflow_cond_ready_76 || !_dataflow_cond_valid_76) && (_dataflow_lessthan_valid_74 && _dataflow__delay_valid_186 && _dataflow__delay_valid_187));
assign _dataflow__delay_ready_186 = (_dataflow_cond_ready_75 || !_dataflow_cond_valid_75) && (_dataflow_lessthan_valid_74 && _dataflow__delay_valid_187 && _dataflow__delay_valid_186) && ((_dataflow_cond_ready_76 || !_dataflow_cond_valid_76) && (_dataflow_lessthan_valid_74 && _dataflow__delay_valid_186 && _dataflow__delay_valid_187));
assign _dataflow__delay_ready_187 = (_dataflow_cond_ready_75 || !_dataflow_cond_valid_75) && (_dataflow_lessthan_valid_74 && _dataflow__delay_valid_187 && _dataflow__delay_valid_186) && ((_dataflow_cond_ready_76 || !_dataflow_cond_valid_76) && (_dataflow_lessthan_valid_74 && _dataflow__delay_valid_186 && _dataflow__delay_valid_187));
reg signed [32-1:0] _dataflow__delay_data_205;
reg _dataflow__delay_valid_205;
wire _dataflow__delay_ready_205;
assign _dataflow__delay_ready_204 = (_dataflow__delay_ready_205 || !_dataflow__delay_valid_205) && _dataflow__delay_valid_204;
reg signed [32-1:0] _dataflow__delay_data_215;
reg _dataflow__delay_valid_215;
wire _dataflow__delay_ready_215;
assign _dataflow__delay_ready_214 = (_dataflow__delay_ready_215 || !_dataflow__delay_valid_215) && _dataflow__delay_valid_214;
reg [1-1:0] _dataflow_lessthan_data_71;
reg _dataflow_lessthan_valid_71;
wire _dataflow_lessthan_ready_71;
reg [1-1:0] _dataflow_lessthan_data_77;
reg _dataflow_lessthan_valid_77;
wire _dataflow_lessthan_ready_77;
reg signed [32-1:0] _dataflow__delay_data_182;
reg _dataflow__delay_valid_182;
wire _dataflow__delay_ready_182;
assign _dataflow_cond_ready_60 = (_dataflow_lessthan_ready_71 || !_dataflow_lessthan_valid_71) && (_dataflow_cond_valid_70 && _dataflow_cond_valid_60) && ((_dataflow__delay_ready_182 || !_dataflow__delay_valid_182) && _dataflow_cond_valid_60);
reg signed [32-1:0] _dataflow__delay_data_183;
reg _dataflow__delay_valid_183;
wire _dataflow__delay_ready_183;
assign _dataflow_cond_ready_70 = (_dataflow_lessthan_ready_71 || !_dataflow_lessthan_valid_71) && (_dataflow_cond_valid_70 && _dataflow_cond_valid_60) && ((_dataflow__delay_ready_183 || !_dataflow__delay_valid_183) && _dataflow_cond_valid_70);
reg signed [32-1:0] _dataflow__delay_data_188;
reg _dataflow__delay_valid_188;
wire _dataflow__delay_ready_188;
assign _dataflow_cond_ready_69 = (_dataflow_lessthan_ready_77 || !_dataflow_lessthan_valid_77) && (_dataflow_cond_valid_76 && _dataflow_cond_valid_69) && ((_dataflow__delay_ready_188 || !_dataflow__delay_valid_188) && _dataflow_cond_valid_69);
reg signed [32-1:0] _dataflow__delay_data_189;
reg _dataflow__delay_valid_189;
wire _dataflow__delay_ready_189;
assign _dataflow_cond_ready_76 = (_dataflow_lessthan_ready_77 || !_dataflow_lessthan_valid_77) && (_dataflow_cond_valid_76 && _dataflow_cond_valid_69) && ((_dataflow__delay_ready_189 || !_dataflow__delay_valid_189) && _dataflow_cond_valid_76);
reg signed [32-1:0] _dataflow__delay_data_192;
reg _dataflow__delay_valid_192;
wire _dataflow__delay_ready_192;
assign _dataflow_cond_ready_75 = (_dataflow__delay_ready_192 || !_dataflow__delay_valid_192) && _dataflow_cond_valid_75;
reg signed [32-1:0] _dataflow__delay_data_206;
reg _dataflow__delay_valid_206;
wire _dataflow__delay_ready_206;
assign _dataflow__delay_ready_205 = (_dataflow__delay_ready_206 || !_dataflow__delay_valid_206) && _dataflow__delay_valid_205;
reg signed [32-1:0] _dataflow__delay_data_216;
reg _dataflow__delay_valid_216;
wire _dataflow__delay_ready_216;
assign _dataflow__delay_ready_215 = (_dataflow__delay_ready_216 || !_dataflow__delay_valid_216) && _dataflow__delay_valid_215;
reg signed [32-1:0] _dataflow__delay_data_224;
reg _dataflow__delay_valid_224;
wire _dataflow__delay_ready_224;
assign _dataflow_cond_ready_61 = (_dataflow__delay_ready_224 || !_dataflow__delay_valid_224) && _dataflow_cond_valid_61;
reg signed [32-1:0] _dataflow_cond_data_72;
reg _dataflow_cond_valid_72;
wire _dataflow_cond_ready_72;
reg signed [32-1:0] _dataflow_cond_data_73;
reg _dataflow_cond_valid_73;
wire _dataflow_cond_ready_73;
assign _dataflow_lessthan_ready_71 = (_dataflow_cond_ready_72 || !_dataflow_cond_valid_72) && (_dataflow_lessthan_valid_71 && _dataflow__delay_valid_183 && _dataflow__delay_valid_182) && ((_dataflow_cond_ready_73 || !_dataflow_cond_valid_73) && (_dataflow_lessthan_valid_71 && _dataflow__delay_valid_182 && _dataflow__delay_valid_183));
assign _dataflow__delay_ready_182 = (_dataflow_cond_ready_72 || !_dataflow_cond_valid_72) && (_dataflow_lessthan_valid_71 && _dataflow__delay_valid_183 && _dataflow__delay_valid_182) && ((_dataflow_cond_ready_73 || !_dataflow_cond_valid_73) && (_dataflow_lessthan_valid_71 && _dataflow__delay_valid_182 && _dataflow__delay_valid_183));
assign _dataflow__delay_ready_183 = (_dataflow_cond_ready_72 || !_dataflow_cond_valid_72) && (_dataflow_lessthan_valid_71 && _dataflow__delay_valid_183 && _dataflow__delay_valid_182) && ((_dataflow_cond_ready_73 || !_dataflow_cond_valid_73) && (_dataflow_lessthan_valid_71 && _dataflow__delay_valid_182 && _dataflow__delay_valid_183));
reg signed [32-1:0] _dataflow_cond_data_78;
reg _dataflow_cond_valid_78;
wire _dataflow_cond_ready_78;
reg signed [32-1:0] _dataflow_cond_data_79;
reg _dataflow_cond_valid_79;
wire _dataflow_cond_ready_79;
assign _dataflow_lessthan_ready_77 = (_dataflow_cond_ready_78 || !_dataflow_cond_valid_78) && (_dataflow_lessthan_valid_77 && _dataflow__delay_valid_189 && _dataflow__delay_valid_188) && ((_dataflow_cond_ready_79 || !_dataflow_cond_valid_79) && (_dataflow_lessthan_valid_77 && _dataflow__delay_valid_188 && _dataflow__delay_valid_189));
assign _dataflow__delay_ready_188 = (_dataflow_cond_ready_78 || !_dataflow_cond_valid_78) && (_dataflow_lessthan_valid_77 && _dataflow__delay_valid_189 && _dataflow__delay_valid_188) && ((_dataflow_cond_ready_79 || !_dataflow_cond_valid_79) && (_dataflow_lessthan_valid_77 && _dataflow__delay_valid_188 && _dataflow__delay_valid_189));
assign _dataflow__delay_ready_189 = (_dataflow_cond_ready_78 || !_dataflow_cond_valid_78) && (_dataflow_lessthan_valid_77 && _dataflow__delay_valid_189 && _dataflow__delay_valid_188) && ((_dataflow_cond_ready_79 || !_dataflow_cond_valid_79) && (_dataflow_lessthan_valid_77 && _dataflow__delay_valid_188 && _dataflow__delay_valid_189));
reg signed [32-1:0] _dataflow__delay_data_193;
reg _dataflow__delay_valid_193;
wire _dataflow__delay_ready_193;
assign _dataflow__delay_ready_192 = (_dataflow__delay_ready_193 || !_dataflow__delay_valid_193) && _dataflow__delay_valid_192;
reg signed [32-1:0] _dataflow__delay_data_207;
reg _dataflow__delay_valid_207;
wire _dataflow__delay_ready_207;
assign _dataflow__delay_ready_206 = (_dataflow__delay_ready_207 || !_dataflow__delay_valid_207) && _dataflow__delay_valid_206;
reg signed [32-1:0] _dataflow__delay_data_217;
reg _dataflow__delay_valid_217;
wire _dataflow__delay_ready_217;
assign _dataflow__delay_ready_216 = (_dataflow__delay_ready_217 || !_dataflow__delay_valid_217) && _dataflow__delay_valid_216;
reg signed [32-1:0] _dataflow__delay_data_225;
reg _dataflow__delay_valid_225;
wire _dataflow__delay_ready_225;
assign _dataflow__delay_ready_224 = (_dataflow__delay_ready_225 || !_dataflow__delay_valid_225) && _dataflow__delay_valid_224;
reg [1-1:0] _dataflow_lessthan_data_80;
reg _dataflow_lessthan_valid_80;
wire _dataflow_lessthan_ready_80;
reg [1-1:0] _dataflow_lessthan_data_83;
reg _dataflow_lessthan_valid_83;
wire _dataflow_lessthan_ready_83;
reg signed [32-1:0] _dataflow__delay_data_190;
reg _dataflow__delay_valid_190;
wire _dataflow__delay_ready_190;
assign _dataflow_cond_ready_72 = (_dataflow_lessthan_ready_80 || !_dataflow_lessthan_valid_80) && (_dataflow_cond_valid_79 && _dataflow_cond_valid_72) && ((_dataflow__delay_ready_190 || !_dataflow__delay_valid_190) && _dataflow_cond_valid_72);
reg signed [32-1:0] _dataflow__delay_data_191;
reg _dataflow__delay_valid_191;
wire _dataflow__delay_ready_191;
assign _dataflow_cond_ready_79 = (_dataflow_lessthan_ready_80 || !_dataflow_lessthan_valid_80) && (_dataflow_cond_valid_79 && _dataflow_cond_valid_72) && ((_dataflow__delay_ready_191 || !_dataflow__delay_valid_191) && _dataflow_cond_valid_79);
reg signed [32-1:0] _dataflow__delay_data_194;
reg _dataflow__delay_valid_194;
wire _dataflow__delay_ready_194;
assign _dataflow_cond_ready_78 = (_dataflow_lessthan_ready_83 || !_dataflow_lessthan_valid_83) && (_dataflow__delay_valid_193 && _dataflow_cond_valid_78) && ((_dataflow__delay_ready_194 || !_dataflow__delay_valid_194) && _dataflow_cond_valid_78);
reg signed [32-1:0] _dataflow__delay_data_195;
reg _dataflow__delay_valid_195;
wire _dataflow__delay_ready_195;
assign _dataflow__delay_ready_193 = (_dataflow_lessthan_ready_83 || !_dataflow_lessthan_valid_83) && (_dataflow__delay_valid_193 && _dataflow_cond_valid_78) && ((_dataflow__delay_ready_195 || !_dataflow__delay_valid_195) && _dataflow__delay_valid_193);
reg signed [32-1:0] _dataflow__delay_data_208;
reg _dataflow__delay_valid_208;
wire _dataflow__delay_ready_208;
assign _dataflow__delay_ready_207 = (_dataflow__delay_ready_208 || !_dataflow__delay_valid_208) && _dataflow__delay_valid_207;
reg signed [32-1:0] _dataflow__delay_data_218;
reg _dataflow__delay_valid_218;
wire _dataflow__delay_ready_218;
assign _dataflow__delay_ready_217 = (_dataflow__delay_ready_218 || !_dataflow__delay_valid_218) && _dataflow__delay_valid_217;
reg signed [32-1:0] _dataflow__delay_data_226;
reg _dataflow__delay_valid_226;
wire _dataflow__delay_ready_226;
assign _dataflow__delay_ready_225 = (_dataflow__delay_ready_226 || !_dataflow__delay_valid_226) && _dataflow__delay_valid_225;
reg signed [32-1:0] _dataflow__delay_data_232;
reg _dataflow__delay_valid_232;
wire _dataflow__delay_ready_232;
assign _dataflow_cond_ready_73 = (_dataflow__delay_ready_232 || !_dataflow__delay_valid_232) && _dataflow_cond_valid_73;
reg signed [32-1:0] _dataflow_cond_data_81;
reg _dataflow_cond_valid_81;
wire _dataflow_cond_ready_81;
reg signed [32-1:0] _dataflow_cond_data_82;
reg _dataflow_cond_valid_82;
wire _dataflow_cond_ready_82;
assign _dataflow_lessthan_ready_80 = (_dataflow_cond_ready_81 || !_dataflow_cond_valid_81) && (_dataflow_lessthan_valid_80 && _dataflow__delay_valid_191 && _dataflow__delay_valid_190) && ((_dataflow_cond_ready_82 || !_dataflow_cond_valid_82) && (_dataflow_lessthan_valid_80 && _dataflow__delay_valid_190 && _dataflow__delay_valid_191));
assign _dataflow__delay_ready_190 = (_dataflow_cond_ready_81 || !_dataflow_cond_valid_81) && (_dataflow_lessthan_valid_80 && _dataflow__delay_valid_191 && _dataflow__delay_valid_190) && ((_dataflow_cond_ready_82 || !_dataflow_cond_valid_82) && (_dataflow_lessthan_valid_80 && _dataflow__delay_valid_190 && _dataflow__delay_valid_191));
assign _dataflow__delay_ready_191 = (_dataflow_cond_ready_81 || !_dataflow_cond_valid_81) && (_dataflow_lessthan_valid_80 && _dataflow__delay_valid_191 && _dataflow__delay_valid_190) && ((_dataflow_cond_ready_82 || !_dataflow_cond_valid_82) && (_dataflow_lessthan_valid_80 && _dataflow__delay_valid_190 && _dataflow__delay_valid_191));
reg signed [32-1:0] _dataflow_cond_data_84;
reg _dataflow_cond_valid_84;
wire _dataflow_cond_ready_84;
reg signed [32-1:0] _dataflow_cond_data_85;
reg _dataflow_cond_valid_85;
wire _dataflow_cond_ready_85;
assign _dataflow_lessthan_ready_83 = (_dataflow_cond_ready_84 || !_dataflow_cond_valid_84) && (_dataflow_lessthan_valid_83 && _dataflow__delay_valid_195 && _dataflow__delay_valid_194) && ((_dataflow_cond_ready_85 || !_dataflow_cond_valid_85) && (_dataflow_lessthan_valid_83 && _dataflow__delay_valid_194 && _dataflow__delay_valid_195));
assign _dataflow__delay_ready_194 = (_dataflow_cond_ready_84 || !_dataflow_cond_valid_84) && (_dataflow_lessthan_valid_83 && _dataflow__delay_valid_195 && _dataflow__delay_valid_194) && ((_dataflow_cond_ready_85 || !_dataflow_cond_valid_85) && (_dataflow_lessthan_valid_83 && _dataflow__delay_valid_194 && _dataflow__delay_valid_195));
assign _dataflow__delay_ready_195 = (_dataflow_cond_ready_84 || !_dataflow_cond_valid_84) && (_dataflow_lessthan_valid_83 && _dataflow__delay_valid_195 && _dataflow__delay_valid_194) && ((_dataflow_cond_ready_85 || !_dataflow_cond_valid_85) && (_dataflow_lessthan_valid_83 && _dataflow__delay_valid_194 && _dataflow__delay_valid_195));
reg signed [32-1:0] _dataflow__delay_data_209;
reg _dataflow__delay_valid_209;
wire _dataflow__delay_ready_209;
assign _dataflow__delay_ready_208 = (_dataflow__delay_ready_209 || !_dataflow__delay_valid_209) && _dataflow__delay_valid_208;
reg signed [32-1:0] _dataflow__delay_data_219;
reg _dataflow__delay_valid_219;
wire _dataflow__delay_ready_219;
assign _dataflow__delay_ready_218 = (_dataflow__delay_ready_219 || !_dataflow__delay_valid_219) && _dataflow__delay_valid_218;
reg signed [32-1:0] _dataflow__delay_data_227;
reg _dataflow__delay_valid_227;
wire _dataflow__delay_ready_227;
assign _dataflow__delay_ready_226 = (_dataflow__delay_ready_227 || !_dataflow__delay_valid_227) && _dataflow__delay_valid_226;
reg signed [32-1:0] _dataflow__delay_data_233;
reg _dataflow__delay_valid_233;
wire _dataflow__delay_ready_233;
assign _dataflow__delay_ready_232 = (_dataflow__delay_ready_233 || !_dataflow__delay_valid_233) && _dataflow__delay_valid_232;
reg [1-1:0] _dataflow_lessthan_data_86;
reg _dataflow_lessthan_valid_86;
wire _dataflow_lessthan_ready_86;
reg signed [32-1:0] _dataflow__delay_data_196;
reg _dataflow__delay_valid_196;
wire _dataflow__delay_ready_196;
assign _dataflow_cond_ready_81 = (_dataflow_lessthan_ready_86 || !_dataflow_lessthan_valid_86) && (_dataflow_cond_valid_85 && _dataflow_cond_valid_81) && ((_dataflow__delay_ready_196 || !_dataflow__delay_valid_196) && _dataflow_cond_valid_81);
reg signed [32-1:0] _dataflow__delay_data_197;
reg _dataflow__delay_valid_197;
wire _dataflow__delay_ready_197;
assign _dataflow_cond_ready_85 = (_dataflow_lessthan_ready_86 || !_dataflow_lessthan_valid_86) && (_dataflow_cond_valid_85 && _dataflow_cond_valid_81) && ((_dataflow__delay_ready_197 || !_dataflow__delay_valid_197) && _dataflow_cond_valid_85);
reg signed [32-1:0] _dataflow__delay_data_198;
reg _dataflow__delay_valid_198;
wire _dataflow__delay_ready_198;
assign _dataflow_cond_ready_84 = (_dataflow__delay_ready_198 || !_dataflow__delay_valid_198) && _dataflow_cond_valid_84;
reg signed [32-1:0] _dataflow__delay_data_210;
reg _dataflow__delay_valid_210;
wire _dataflow__delay_ready_210;
assign _dataflow__delay_ready_209 = (_dataflow__delay_ready_210 || !_dataflow__delay_valid_210) && _dataflow__delay_valid_209;
reg signed [32-1:0] _dataflow__delay_data_220;
reg _dataflow__delay_valid_220;
wire _dataflow__delay_ready_220;
assign _dataflow__delay_ready_219 = (_dataflow__delay_ready_220 || !_dataflow__delay_valid_220) && _dataflow__delay_valid_219;
reg signed [32-1:0] _dataflow__delay_data_228;
reg _dataflow__delay_valid_228;
wire _dataflow__delay_ready_228;
assign _dataflow__delay_ready_227 = (_dataflow__delay_ready_228 || !_dataflow__delay_valid_228) && _dataflow__delay_valid_227;
reg signed [32-1:0] _dataflow__delay_data_234;
reg _dataflow__delay_valid_234;
wire _dataflow__delay_ready_234;
assign _dataflow__delay_ready_233 = (_dataflow__delay_ready_234 || !_dataflow__delay_valid_234) && _dataflow__delay_valid_233;
reg signed [32-1:0] _dataflow__delay_data_238;
reg _dataflow__delay_valid_238;
wire _dataflow__delay_ready_238;
assign _dataflow_cond_ready_82 = (_dataflow__delay_ready_238 || !_dataflow__delay_valid_238) && _dataflow_cond_valid_82;
reg signed [32-1:0] _dataflow_cond_data_87;
reg _dataflow_cond_valid_87;
wire _dataflow_cond_ready_87;
reg signed [32-1:0] _dataflow_cond_data_88;
reg _dataflow_cond_valid_88;
wire _dataflow_cond_ready_88;
assign _dataflow_lessthan_ready_86 = (_dataflow_cond_ready_87 || !_dataflow_cond_valid_87) && (_dataflow_lessthan_valid_86 && _dataflow__delay_valid_197 && _dataflow__delay_valid_196) && ((_dataflow_cond_ready_88 || !_dataflow_cond_valid_88) && (_dataflow_lessthan_valid_86 && _dataflow__delay_valid_196 && _dataflow__delay_valid_197));
assign _dataflow__delay_ready_196 = (_dataflow_cond_ready_87 || !_dataflow_cond_valid_87) && (_dataflow_lessthan_valid_86 && _dataflow__delay_valid_197 && _dataflow__delay_valid_196) && ((_dataflow_cond_ready_88 || !_dataflow_cond_valid_88) && (_dataflow_lessthan_valid_86 && _dataflow__delay_valid_196 && _dataflow__delay_valid_197));
assign _dataflow__delay_ready_197 = (_dataflow_cond_ready_87 || !_dataflow_cond_valid_87) && (_dataflow_lessthan_valid_86 && _dataflow__delay_valid_197 && _dataflow__delay_valid_196) && ((_dataflow_cond_ready_88 || !_dataflow_cond_valid_88) && (_dataflow_lessthan_valid_86 && _dataflow__delay_valid_196 && _dataflow__delay_valid_197));
reg signed [32-1:0] _dataflow__delay_data_199;
reg _dataflow__delay_valid_199;
wire _dataflow__delay_ready_199;
assign _dataflow__delay_ready_198 = (_dataflow__delay_ready_199 || !_dataflow__delay_valid_199) && _dataflow__delay_valid_198;
reg signed [32-1:0] _dataflow__delay_data_211;
reg _dataflow__delay_valid_211;
wire _dataflow__delay_ready_211;
assign _dataflow__delay_ready_210 = (_dataflow__delay_ready_211 || !_dataflow__delay_valid_211) && _dataflow__delay_valid_210;
reg signed [32-1:0] _dataflow__delay_data_221;
reg _dataflow__delay_valid_221;
wire _dataflow__delay_ready_221;
assign _dataflow__delay_ready_220 = (_dataflow__delay_ready_221 || !_dataflow__delay_valid_221) && _dataflow__delay_valid_220;
reg signed [32-1:0] _dataflow__delay_data_229;
reg _dataflow__delay_valid_229;
wire _dataflow__delay_ready_229;
assign _dataflow__delay_ready_228 = (_dataflow__delay_ready_229 || !_dataflow__delay_valid_229) && _dataflow__delay_valid_228;
reg signed [32-1:0] _dataflow__delay_data_235;
reg _dataflow__delay_valid_235;
wire _dataflow__delay_ready_235;
assign _dataflow__delay_ready_234 = (_dataflow__delay_ready_235 || !_dataflow__delay_valid_235) && _dataflow__delay_valid_234;
reg signed [32-1:0] _dataflow__delay_data_239;
reg _dataflow__delay_valid_239;
wire _dataflow__delay_ready_239;
assign _dataflow__delay_ready_238 = (_dataflow__delay_ready_239 || !_dataflow__delay_valid_239) && _dataflow__delay_valid_238;
reg [1-1:0] _dataflow_lessthan_data_89;
reg _dataflow_lessthan_valid_89;
wire _dataflow_lessthan_ready_89;
reg signed [32-1:0] _dataflow__delay_data_200;
reg _dataflow__delay_valid_200;
wire _dataflow__delay_ready_200;
assign _dataflow__delay_ready_199 = (_dataflow_lessthan_ready_89 || !_dataflow_lessthan_valid_89) && (_dataflow__delay_valid_199 && _dataflow_cond_valid_87) && ((_dataflow__delay_ready_200 || !_dataflow__delay_valid_200) && _dataflow__delay_valid_199);
reg signed [32-1:0] _dataflow__delay_data_201;
reg _dataflow__delay_valid_201;
wire _dataflow__delay_ready_201;
assign _dataflow_cond_ready_87 = (_dataflow_lessthan_ready_89 || !_dataflow_lessthan_valid_89) && (_dataflow__delay_valid_199 && _dataflow_cond_valid_87) && ((_dataflow__delay_ready_201 || !_dataflow__delay_valid_201) && _dataflow_cond_valid_87);
reg signed [32-1:0] _dataflow__delay_data_212;
reg _dataflow__delay_valid_212;
wire _dataflow__delay_ready_212;
assign _dataflow__delay_ready_211 = (_dataflow__delay_ready_212 || !_dataflow__delay_valid_212) && _dataflow__delay_valid_211;
reg signed [32-1:0] _dataflow__delay_data_222;
reg _dataflow__delay_valid_222;
wire _dataflow__delay_ready_222;
assign _dataflow__delay_ready_221 = (_dataflow__delay_ready_222 || !_dataflow__delay_valid_222) && _dataflow__delay_valid_221;
reg signed [32-1:0] _dataflow__delay_data_230;
reg _dataflow__delay_valid_230;
wire _dataflow__delay_ready_230;
assign _dataflow__delay_ready_229 = (_dataflow__delay_ready_230 || !_dataflow__delay_valid_230) && _dataflow__delay_valid_229;
reg signed [32-1:0] _dataflow__delay_data_236;
reg _dataflow__delay_valid_236;
wire _dataflow__delay_ready_236;
assign _dataflow__delay_ready_235 = (_dataflow__delay_ready_236 || !_dataflow__delay_valid_236) && _dataflow__delay_valid_235;
reg signed [32-1:0] _dataflow__delay_data_240;
reg _dataflow__delay_valid_240;
wire _dataflow__delay_ready_240;
assign _dataflow__delay_ready_239 = (_dataflow__delay_ready_240 || !_dataflow__delay_valid_240) && _dataflow__delay_valid_239;
reg signed [32-1:0] _dataflow__delay_data_242;
reg _dataflow__delay_valid_242;
wire _dataflow__delay_ready_242;
assign _dataflow_cond_ready_88 = (_dataflow__delay_ready_242 || !_dataflow__delay_valid_242) && _dataflow_cond_valid_88;
reg signed [32-1:0] _dataflow_cond_data_90;
reg _dataflow_cond_valid_90;
wire _dataflow_cond_ready_90;
reg signed [32-1:0] _dataflow_cond_data_91;
reg _dataflow_cond_valid_91;
wire _dataflow_cond_ready_91;
assign _dataflow_lessthan_ready_89 = (_dataflow_cond_ready_90 || !_dataflow_cond_valid_90) && (_dataflow_lessthan_valid_89 && _dataflow__delay_valid_200 && _dataflow__delay_valid_201) && ((_dataflow_cond_ready_91 || !_dataflow_cond_valid_91) && (_dataflow_lessthan_valid_89 && _dataflow__delay_valid_201 && _dataflow__delay_valid_200));
assign _dataflow__delay_ready_201 = (_dataflow_cond_ready_90 || !_dataflow_cond_valid_90) && (_dataflow_lessthan_valid_89 && _dataflow__delay_valid_200 && _dataflow__delay_valid_201) && ((_dataflow_cond_ready_91 || !_dataflow_cond_valid_91) && (_dataflow_lessthan_valid_89 && _dataflow__delay_valid_201 && _dataflow__delay_valid_200));
assign _dataflow__delay_ready_200 = (_dataflow_cond_ready_90 || !_dataflow_cond_valid_90) && (_dataflow_lessthan_valid_89 && _dataflow__delay_valid_200 && _dataflow__delay_valid_201) && ((_dataflow_cond_ready_91 || !_dataflow_cond_valid_91) && (_dataflow_lessthan_valid_89 && _dataflow__delay_valid_201 && _dataflow__delay_valid_200));
reg signed [32-1:0] _dataflow__delay_data_213;
reg _dataflow__delay_valid_213;
wire _dataflow__delay_ready_213;
assign _dataflow__delay_ready_212 = (_dataflow__delay_ready_213 || !_dataflow__delay_valid_213) && _dataflow__delay_valid_212;
reg signed [32-1:0] _dataflow__delay_data_223;
reg _dataflow__delay_valid_223;
wire _dataflow__delay_ready_223;
assign _dataflow__delay_ready_222 = (_dataflow__delay_ready_223 || !_dataflow__delay_valid_223) && _dataflow__delay_valid_222;
reg signed [32-1:0] _dataflow__delay_data_231;
reg _dataflow__delay_valid_231;
wire _dataflow__delay_ready_231;
assign _dataflow__delay_ready_230 = (_dataflow__delay_ready_231 || !_dataflow__delay_valid_231) && _dataflow__delay_valid_230;
reg signed [32-1:0] _dataflow__delay_data_237;
reg _dataflow__delay_valid_237;
wire _dataflow__delay_ready_237;
assign _dataflow__delay_ready_236 = (_dataflow__delay_ready_237 || !_dataflow__delay_valid_237) && _dataflow__delay_valid_236;
reg signed [32-1:0] _dataflow__delay_data_241;
reg _dataflow__delay_valid_241;
wire _dataflow__delay_ready_241;
assign _dataflow__delay_ready_240 = (_dataflow__delay_ready_241 || !_dataflow__delay_valid_241) && _dataflow__delay_valid_240;
reg signed [32-1:0] _dataflow__delay_data_243;
reg _dataflow__delay_valid_243;
wire _dataflow__delay_ready_243;
assign _dataflow__delay_ready_242 = (_dataflow__delay_ready_243 || !_dataflow__delay_valid_243) && _dataflow__delay_valid_242;
assign dout0 = _dataflow_cond_data_90;
assign _dataflow_cond_ready_90 = 1;
assign dout1 = _dataflow_cond_data_91;
assign _dataflow_cond_ready_91 = 1;
assign dout7 = _dataflow__delay_data_213;
assign _dataflow__delay_ready_213 = 1;
assign dout6 = _dataflow__delay_data_223;
assign _dataflow__delay_ready_223 = 1;
assign dout5 = _dataflow__delay_data_231;
assign _dataflow__delay_ready_231 = 1;
assign dout4 = _dataflow__delay_data_237;
assign _dataflow__delay_ready_237 = 1;
assign dout3 = _dataflow__delay_data_241;
assign _dataflow__delay_ready_241 = 1;
assign dout2 = _dataflow__delay_data_243;
assign _dataflow__delay_ready_243 = 1;
always @(posedge CLK) begin
if(RST) begin
_dataflow_lessthan_data_8 <= 0;
_dataflow_lessthan_valid_8 <= 0;
_dataflow__delay_data_92 <= 0;
_dataflow__delay_valid_92 <= 0;
_dataflow__delay_data_93 <= 0;
_dataflow__delay_valid_93 <= 0;
_dataflow__delay_data_94 <= 0;
_dataflow__delay_valid_94 <= 0;
_dataflow__delay_data_98 <= 0;
_dataflow__delay_valid_98 <= 0;
_dataflow__delay_data_104 <= 0;
_dataflow__delay_valid_104 <= 0;
_dataflow__delay_data_112 <= 0;
_dataflow__delay_valid_112 <= 0;
_dataflow__delay_data_122 <= 0;
_dataflow__delay_valid_122 <= 0;
_dataflow__delay_data_134 <= 0;
_dataflow__delay_valid_134 <= 0;
_dataflow_cond_data_9 <= 0;
_dataflow_cond_valid_9 <= 0;
_dataflow_cond_data_10 <= 0;
_dataflow_cond_valid_10 <= 0;
_dataflow__delay_data_95 <= 0;
_dataflow__delay_valid_95 <= 0;
_dataflow__delay_data_99 <= 0;
_dataflow__delay_valid_99 <= 0;
_dataflow__delay_data_105 <= 0;
_dataflow__delay_valid_105 <= 0;
_dataflow__delay_data_113 <= 0;
_dataflow__delay_valid_113 <= 0;
_dataflow__delay_data_123 <= 0;
_dataflow__delay_valid_123 <= 0;
_dataflow__delay_data_135 <= 0;
_dataflow__delay_valid_135 <= 0;
_dataflow_lessthan_data_11 <= 0;
_dataflow_lessthan_valid_11 <= 0;
_dataflow__delay_data_96 <= 0;
_dataflow__delay_valid_96 <= 0;
_dataflow__delay_data_97 <= 0;
_dataflow__delay_valid_97 <= 0;
_dataflow__delay_data_100 <= 0;
_dataflow__delay_valid_100 <= 0;
_dataflow__delay_data_106 <= 0;
_dataflow__delay_valid_106 <= 0;
_dataflow__delay_data_114 <= 0;
_dataflow__delay_valid_114 <= 0;
_dataflow__delay_data_124 <= 0;
_dataflow__delay_valid_124 <= 0;
_dataflow__delay_data_136 <= 0;
_dataflow__delay_valid_136 <= 0;
_dataflow__delay_data_148 <= 0;
_dataflow__delay_valid_148 <= 0;
_dataflow_cond_data_12 <= 0;
_dataflow_cond_valid_12 <= 0;
_dataflow_cond_data_13 <= 0;
_dataflow_cond_valid_13 <= 0;
_dataflow__delay_data_101 <= 0;
_dataflow__delay_valid_101 <= 0;
_dataflow__delay_data_107 <= 0;
_dataflow__delay_valid_107 <= 0;
_dataflow__delay_data_115 <= 0;
_dataflow__delay_valid_115 <= 0;
_dataflow__delay_data_125 <= 0;
_dataflow__delay_valid_125 <= 0;
_dataflow__delay_data_137 <= 0;
_dataflow__delay_valid_137 <= 0;
_dataflow__delay_data_149 <= 0;
_dataflow__delay_valid_149 <= 0;
_dataflow_lessthan_data_14 <= 0;
_dataflow_lessthan_valid_14 <= 0;
_dataflow_lessthan_data_29 <= 0;
_dataflow_lessthan_valid_29 <= 0;
_dataflow__delay_data_102 <= 0;
_dataflow__delay_valid_102 <= 0;
_dataflow__delay_data_103 <= 0;
_dataflow__delay_valid_103 <= 0;
_dataflow__delay_data_108 <= 0;
_dataflow__delay_valid_108 <= 0;
_dataflow__delay_data_116 <= 0;
_dataflow__delay_valid_116 <= 0;
_dataflow__delay_data_126 <= 0;
_dataflow__delay_valid_126 <= 0;
_dataflow__delay_data_138 <= 0;
_dataflow__delay_valid_138 <= 0;
_dataflow__delay_data_150 <= 0;
_dataflow__delay_valid_150 <= 0;
_dataflow__delay_data_151 <= 0;
_dataflow__delay_valid_151 <= 0;
_dataflow_cond_data_15 <= 0;
_dataflow_cond_valid_15 <= 0;
_dataflow_cond_data_16 <= 0;
_dataflow_cond_valid_16 <= 0;
_dataflow_cond_data_30 <= 0;
_dataflow_cond_valid_30 <= 0;
_dataflow_cond_data_31 <= 0;
_dataflow_cond_valid_31 <= 0;
_dataflow__delay_data_109 <= 0;
_dataflow__delay_valid_109 <= 0;
_dataflow__delay_data_117 <= 0;
_dataflow__delay_valid_117 <= 0;
_dataflow__delay_data_127 <= 0;
_dataflow__delay_valid_127 <= 0;
_dataflow__delay_data_139 <= 0;
_dataflow__delay_valid_139 <= 0;
_dataflow_lessthan_data_17 <= 0;
_dataflow_lessthan_valid_17 <= 0;
_dataflow_lessthan_data_32 <= 0;
_dataflow_lessthan_valid_32 <= 0;
_dataflow__delay_data_110 <= 0;
_dataflow__delay_valid_110 <= 0;
_dataflow__delay_data_111 <= 0;
_dataflow__delay_valid_111 <= 0;
_dataflow__delay_data_118 <= 0;
_dataflow__delay_valid_118 <= 0;
_dataflow__delay_data_128 <= 0;
_dataflow__delay_valid_128 <= 0;
_dataflow__delay_data_140 <= 0;
_dataflow__delay_valid_140 <= 0;
_dataflow__delay_data_152 <= 0;
_dataflow__delay_valid_152 <= 0;
_dataflow__delay_data_153 <= 0;
_dataflow__delay_valid_153 <= 0;
_dataflow__delay_data_162 <= 0;
_dataflow__delay_valid_162 <= 0;
_dataflow_cond_data_18 <= 0;
_dataflow_cond_valid_18 <= 0;
_dataflow_cond_data_19 <= 0;
_dataflow_cond_valid_19 <= 0;
_dataflow_cond_data_33 <= 0;
_dataflow_cond_valid_33 <= 0;
_dataflow_cond_data_34 <= 0;
_dataflow_cond_valid_34 <= 0;
_dataflow__delay_data_119 <= 0;
_dataflow__delay_valid_119 <= 0;
_dataflow__delay_data_129 <= 0;
_dataflow__delay_valid_129 <= 0;
_dataflow__delay_data_141 <= 0;
_dataflow__delay_valid_141 <= 0;
_dataflow__delay_data_163 <= 0;
_dataflow__delay_valid_163 <= 0;
_dataflow_lessthan_data_20 <= 0;
_dataflow_lessthan_valid_20 <= 0;
_dataflow_lessthan_data_35 <= 0;
_dataflow_lessthan_valid_35 <= 0;
_dataflow_lessthan_data_47 <= 0;
_dataflow_lessthan_valid_47 <= 0;
_dataflow__delay_data_120 <= 0;
_dataflow__delay_valid_120 <= 0;
_dataflow__delay_data_121 <= 0;
_dataflow__delay_valid_121 <= 0;
_dataflow__delay_data_130 <= 0;
_dataflow__delay_valid_130 <= 0;
_dataflow__delay_data_142 <= 0;
_dataflow__delay_valid_142 <= 0;
_dataflow__delay_data_154 <= 0;
_dataflow__delay_valid_154 <= 0;
_dataflow__delay_data_155 <= 0;
_dataflow__delay_valid_155 <= 0;
_dataflow__delay_data_164 <= 0;
_dataflow__delay_valid_164 <= 0;
_dataflow__delay_data_165 <= 0;
_dataflow__delay_valid_165 <= 0;
_dataflow_cond_data_21 <= 0;
_dataflow_cond_valid_21 <= 0;
_dataflow_cond_data_22 <= 0;
_dataflow_cond_valid_22 <= 0;
_dataflow_cond_data_36 <= 0;
_dataflow_cond_valid_36 <= 0;
_dataflow_cond_data_37 <= 0;
_dataflow_cond_valid_37 <= 0;
_dataflow_cond_data_48 <= 0;
_dataflow_cond_valid_48 <= 0;
_dataflow_cond_data_49 <= 0;
_dataflow_cond_valid_49 <= 0;
_dataflow__delay_data_131 <= 0;
_dataflow__delay_valid_131 <= 0;
_dataflow__delay_data_143 <= 0;
_dataflow__delay_valid_143 <= 0;
_dataflow_lessthan_data_23 <= 0;
_dataflow_lessthan_valid_23 <= 0;
_dataflow_lessthan_data_38 <= 0;
_dataflow_lessthan_valid_38 <= 0;
_dataflow_lessthan_data_50 <= 0;
_dataflow_lessthan_valid_50 <= 0;
_dataflow__delay_data_132 <= 0;
_dataflow__delay_valid_132 <= 0;
_dataflow__delay_data_133 <= 0;
_dataflow__delay_valid_133 <= 0;
_dataflow__delay_data_144 <= 0;
_dataflow__delay_valid_144 <= 0;
_dataflow__delay_data_156 <= 0;
_dataflow__delay_valid_156 <= 0;
_dataflow__delay_data_157 <= 0;
_dataflow__delay_valid_157 <= 0;
_dataflow__delay_data_166 <= 0;
_dataflow__delay_valid_166 <= 0;
_dataflow__delay_data_167 <= 0;
_dataflow__delay_valid_167 <= 0;
_dataflow__delay_data_174 <= 0;
_dataflow__delay_valid_174 <= 0;
_dataflow_cond_data_24 <= 0;
_dataflow_cond_valid_24 <= 0;
_dataflow_cond_data_25 <= 0;
_dataflow_cond_valid_25 <= 0;
_dataflow_cond_data_39 <= 0;
_dataflow_cond_valid_39 <= 0;
_dataflow_cond_data_40 <= 0;
_dataflow_cond_valid_40 <= 0;
_dataflow_cond_data_51 <= 0;
_dataflow_cond_valid_51 <= 0;
_dataflow_cond_data_52 <= 0;
_dataflow_cond_valid_52 <= 0;
_dataflow__delay_data_145 <= 0;
_dataflow__delay_valid_145 <= 0;
_dataflow__delay_data_175 <= 0;
_dataflow__delay_valid_175 <= 0;
_dataflow_lessthan_data_26 <= 0;
_dataflow_lessthan_valid_26 <= 0;
_dataflow_lessthan_data_41 <= 0;
_dataflow_lessthan_valid_41 <= 0;
_dataflow_lessthan_data_53 <= 0;
_dataflow_lessthan_valid_53 <= 0;
_dataflow_lessthan_data_62 <= 0;
_dataflow_lessthan_valid_62 <= 0;
_dataflow__delay_data_146 <= 0;
_dataflow__delay_valid_146 <= 0;
_dataflow__delay_data_147 <= 0;
_dataflow__delay_valid_147 <= 0;
_dataflow__delay_data_158 <= 0;
_dataflow__delay_valid_158 <= 0;
_dataflow__delay_data_159 <= 0;
_dataflow__delay_valid_159 <= 0;
_dataflow__delay_data_168 <= 0;
_dataflow__delay_valid_168 <= 0;
_dataflow__delay_data_169 <= 0;
_dataflow__delay_valid_169 <= 0;
_dataflow__delay_data_176 <= 0;
_dataflow__delay_valid_176 <= 0;
_dataflow__delay_data_177 <= 0;
_dataflow__delay_valid_177 <= 0;
_dataflow_cond_data_27 <= 0;
_dataflow_cond_valid_27 <= 0;
_dataflow_cond_data_28 <= 0;
_dataflow_cond_valid_28 <= 0;
_dataflow_cond_data_42 <= 0;
_dataflow_cond_valid_42 <= 0;
_dataflow_cond_data_43 <= 0;
_dataflow_cond_valid_43 <= 0;
_dataflow_cond_data_54 <= 0;
_dataflow_cond_valid_54 <= 0;
_dataflow_cond_data_55 <= 0;
_dataflow_cond_valid_55 <= 0;
_dataflow_cond_data_63 <= 0;
_dataflow_cond_valid_63 <= 0;
_dataflow_cond_data_64 <= 0;
_dataflow_cond_valid_64 <= 0;
_dataflow_lessthan_data_44 <= 0;
_dataflow_lessthan_valid_44 <= 0;
_dataflow_lessthan_data_56 <= 0;
_dataflow_lessthan_valid_56 <= 0;
_dataflow_lessthan_data_65 <= 0;
_dataflow_lessthan_valid_65 <= 0;
_dataflow__delay_data_160 <= 0;
_dataflow__delay_valid_160 <= 0;
_dataflow__delay_data_161 <= 0;
_dataflow__delay_valid_161 <= 0;
_dataflow__delay_data_170 <= 0;
_dataflow__delay_valid_170 <= 0;
_dataflow__delay_data_171 <= 0;
_dataflow__delay_valid_171 <= 0;
_dataflow__delay_data_178 <= 0;
_dataflow__delay_valid_178 <= 0;
_dataflow__delay_data_179 <= 0;
_dataflow__delay_valid_179 <= 0;
_dataflow__delay_data_184 <= 0;
_dataflow__delay_valid_184 <= 0;
_dataflow__delay_data_202 <= 0;
_dataflow__delay_valid_202 <= 0;
_dataflow_cond_data_45 <= 0;
_dataflow_cond_valid_45 <= 0;
_dataflow_cond_data_46 <= 0;
_dataflow_cond_valid_46 <= 0;
_dataflow_cond_data_57 <= 0;
_dataflow_cond_valid_57 <= 0;
_dataflow_cond_data_58 <= 0;
_dataflow_cond_valid_58 <= 0;
_dataflow_cond_data_66 <= 0;
_dataflow_cond_valid_66 <= 0;
_dataflow_cond_data_67 <= 0;
_dataflow_cond_valid_67 <= 0;
_dataflow__delay_data_185 <= 0;
_dataflow__delay_valid_185 <= 0;
_dataflow__delay_data_203 <= 0;
_dataflow__delay_valid_203 <= 0;
_dataflow_lessthan_data_59 <= 0;
_dataflow_lessthan_valid_59 <= 0;
_dataflow_lessthan_data_68 <= 0;
_dataflow_lessthan_valid_68 <= 0;
_dataflow_lessthan_data_74 <= 0;
_dataflow_lessthan_valid_74 <= 0;
_dataflow__delay_data_172 <= 0;
_dataflow__delay_valid_172 <= 0;
_dataflow__delay_data_173 <= 0;
_dataflow__delay_valid_173 <= 0;
_dataflow__delay_data_180 <= 0;
_dataflow__delay_valid_180 <= 0;
_dataflow__delay_data_181 <= 0;
_dataflow__delay_valid_181 <= 0;
_dataflow__delay_data_186 <= 0;
_dataflow__delay_valid_186 <= 0;
_dataflow__delay_data_187 <= 0;
_dataflow__delay_valid_187 <= 0;
_dataflow__delay_data_204 <= 0;
_dataflow__delay_valid_204 <= 0;
_dataflow__delay_data_214 <= 0;
_dataflow__delay_valid_214 <= 0;
_dataflow_cond_data_60 <= 0;
_dataflow_cond_valid_60 <= 0;
_dataflow_cond_data_61 <= 0;
_dataflow_cond_valid_61 <= 0;
_dataflow_cond_data_69 <= 0;
_dataflow_cond_valid_69 <= 0;
_dataflow_cond_data_70 <= 0;
_dataflow_cond_valid_70 <= 0;
_dataflow_cond_data_75 <= 0;
_dataflow_cond_valid_75 <= 0;
_dataflow_cond_data_76 <= 0;
_dataflow_cond_valid_76 <= 0;
_dataflow__delay_data_205 <= 0;
_dataflow__delay_valid_205 <= 0;
_dataflow__delay_data_215 <= 0;
_dataflow__delay_valid_215 <= 0;
_dataflow_lessthan_data_71 <= 0;
_dataflow_lessthan_valid_71 <= 0;
_dataflow_lessthan_data_77 <= 0;
_dataflow_lessthan_valid_77 <= 0;
_dataflow__delay_data_182 <= 0;
_dataflow__delay_valid_182 <= 0;
_dataflow__delay_data_183 <= 0;
_dataflow__delay_valid_183 <= 0;
_dataflow__delay_data_188 <= 0;
_dataflow__delay_valid_188 <= 0;
_dataflow__delay_data_189 <= 0;
_dataflow__delay_valid_189 <= 0;
_dataflow__delay_data_192 <= 0;
_dataflow__delay_valid_192 <= 0;
_dataflow__delay_data_206 <= 0;
_dataflow__delay_valid_206 <= 0;
_dataflow__delay_data_216 <= 0;
_dataflow__delay_valid_216 <= 0;
_dataflow__delay_data_224 <= 0;
_dataflow__delay_valid_224 <= 0;
_dataflow_cond_data_72 <= 0;
_dataflow_cond_valid_72 <= 0;
_dataflow_cond_data_73 <= 0;
_dataflow_cond_valid_73 <= 0;
_dataflow_cond_data_78 <= 0;
_dataflow_cond_valid_78 <= 0;
_dataflow_cond_data_79 <= 0;
_dataflow_cond_valid_79 <= 0;
_dataflow__delay_data_193 <= 0;
_dataflow__delay_valid_193 <= 0;
_dataflow__delay_data_207 <= 0;
_dataflow__delay_valid_207 <= 0;
_dataflow__delay_data_217 <= 0;
_dataflow__delay_valid_217 <= 0;
_dataflow__delay_data_225 <= 0;
_dataflow__delay_valid_225 <= 0;
_dataflow_lessthan_data_80 <= 0;
_dataflow_lessthan_valid_80 <= 0;
_dataflow_lessthan_data_83 <= 0;
_dataflow_lessthan_valid_83 <= 0;
_dataflow__delay_data_190 <= 0;
_dataflow__delay_valid_190 <= 0;
_dataflow__delay_data_191 <= 0;
_dataflow__delay_valid_191 <= 0;
_dataflow__delay_data_194 <= 0;
_dataflow__delay_valid_194 <= 0;
_dataflow__delay_data_195 <= 0;
_dataflow__delay_valid_195 <= 0;
_dataflow__delay_data_208 <= 0;
_dataflow__delay_valid_208 <= 0;
_dataflow__delay_data_218 <= 0;
_dataflow__delay_valid_218 <= 0;
_dataflow__delay_data_226 <= 0;
_dataflow__delay_valid_226 <= 0;
_dataflow__delay_data_232 <= 0;
_dataflow__delay_valid_232 <= 0;
_dataflow_cond_data_81 <= 0;
_dataflow_cond_valid_81 <= 0;
_dataflow_cond_data_82 <= 0;
_dataflow_cond_valid_82 <= 0;
_dataflow_cond_data_84 <= 0;
_dataflow_cond_valid_84 <= 0;
_dataflow_cond_data_85 <= 0;
_dataflow_cond_valid_85 <= 0;
_dataflow__delay_data_209 <= 0;
_dataflow__delay_valid_209 <= 0;
_dataflow__delay_data_219 <= 0;
_dataflow__delay_valid_219 <= 0;
_dataflow__delay_data_227 <= 0;
_dataflow__delay_valid_227 <= 0;
_dataflow__delay_data_233 <= 0;
_dataflow__delay_valid_233 <= 0;
_dataflow_lessthan_data_86 <= 0;
_dataflow_lessthan_valid_86 <= 0;
_dataflow__delay_data_196 <= 0;
_dataflow__delay_valid_196 <= 0;
_dataflow__delay_data_197 <= 0;
_dataflow__delay_valid_197 <= 0;
_dataflow__delay_data_198 <= 0;
_dataflow__delay_valid_198 <= 0;
_dataflow__delay_data_210 <= 0;
_dataflow__delay_valid_210 <= 0;
_dataflow__delay_data_220 <= 0;
_dataflow__delay_valid_220 <= 0;
_dataflow__delay_data_228 <= 0;
_dataflow__delay_valid_228 <= 0;
_dataflow__delay_data_234 <= 0;
_dataflow__delay_valid_234 <= 0;
_dataflow__delay_data_238 <= 0;
_dataflow__delay_valid_238 <= 0;
_dataflow_cond_data_87 <= 0;
_dataflow_cond_valid_87 <= 0;
_dataflow_cond_data_88 <= 0;
_dataflow_cond_valid_88 <= 0;
_dataflow__delay_data_199 <= 0;
_dataflow__delay_valid_199 <= 0;
_dataflow__delay_data_211 <= 0;
_dataflow__delay_valid_211 <= 0;
_dataflow__delay_data_221 <= 0;
_dataflow__delay_valid_221 <= 0;
_dataflow__delay_data_229 <= 0;
_dataflow__delay_valid_229 <= 0;
_dataflow__delay_data_235 <= 0;
_dataflow__delay_valid_235 <= 0;
_dataflow__delay_data_239 <= 0;
_dataflow__delay_valid_239 <= 0;
_dataflow_lessthan_data_89 <= 0;
_dataflow_lessthan_valid_89 <= 0;
_dataflow__delay_data_200 <= 0;
_dataflow__delay_valid_200 <= 0;
_dataflow__delay_data_201 <= 0;
_dataflow__delay_valid_201 <= 0;
_dataflow__delay_data_212 <= 0;
_dataflow__delay_valid_212 <= 0;
_dataflow__delay_data_222 <= 0;
_dataflow__delay_valid_222 <= 0;
_dataflow__delay_data_230 <= 0;
_dataflow__delay_valid_230 <= 0;
_dataflow__delay_data_236 <= 0;
_dataflow__delay_valid_236 <= 0;
_dataflow__delay_data_240 <= 0;
_dataflow__delay_valid_240 <= 0;
_dataflow__delay_data_242 <= 0;
_dataflow__delay_valid_242 <= 0;
_dataflow_cond_data_90 <= 0;
_dataflow_cond_valid_90 <= 0;
_dataflow_cond_data_91 <= 0;
_dataflow_cond_valid_91 <= 0;
_dataflow__delay_data_213 <= 0;
_dataflow__delay_valid_213 <= 0;
_dataflow__delay_data_223 <= 0;
_dataflow__delay_valid_223 <= 0;
_dataflow__delay_data_231 <= 0;
_dataflow__delay_valid_231 <= 0;
_dataflow__delay_data_237 <= 0;
_dataflow__delay_valid_237 <= 0;
_dataflow__delay_data_241 <= 0;
_dataflow__delay_valid_241 <= 0;
_dataflow__delay_data_243 <= 0;
_dataflow__delay_valid_243 <= 0;
end else begin
if((_dataflow_lessthan_ready_8 || !_dataflow_lessthan_valid_8) && 1 && 1) begin
_dataflow_lessthan_data_8 <= din0 < din1;
end
if(_dataflow_lessthan_valid_8 && _dataflow_lessthan_ready_8) begin
_dataflow_lessthan_valid_8 <= 0;
end
if((_dataflow_lessthan_ready_8 || !_dataflow_lessthan_valid_8) && 1) begin
_dataflow_lessthan_valid_8 <= 1;
end
if((_dataflow__delay_ready_92 || !_dataflow__delay_valid_92) && 1 && 1) begin
_dataflow__delay_data_92 <= din1;
end
if(_dataflow__delay_valid_92 && _dataflow__delay_ready_92) begin
_dataflow__delay_valid_92 <= 0;
end
if((_dataflow__delay_ready_92 || !_dataflow__delay_valid_92) && 1) begin
_dataflow__delay_valid_92 <= 1;
end
if((_dataflow__delay_ready_93 || !_dataflow__delay_valid_93) && 1 && 1) begin
_dataflow__delay_data_93 <= din0;
end
if(_dataflow__delay_valid_93 && _dataflow__delay_ready_93) begin
_dataflow__delay_valid_93 <= 0;
end
if((_dataflow__delay_ready_93 || !_dataflow__delay_valid_93) && 1) begin
_dataflow__delay_valid_93 <= 1;
end
if((_dataflow__delay_ready_94 || !_dataflow__delay_valid_94) && 1 && 1) begin
_dataflow__delay_data_94 <= din2;
end
if(_dataflow__delay_valid_94 && _dataflow__delay_ready_94) begin
_dataflow__delay_valid_94 <= 0;
end
if((_dataflow__delay_ready_94 || !_dataflow__delay_valid_94) && 1) begin
_dataflow__delay_valid_94 <= 1;
end
if((_dataflow__delay_ready_98 || !_dataflow__delay_valid_98) && 1 && 1) begin
_dataflow__delay_data_98 <= din3;
end
if(_dataflow__delay_valid_98 && _dataflow__delay_ready_98) begin
_dataflow__delay_valid_98 <= 0;
end
if((_dataflow__delay_ready_98 || !_dataflow__delay_valid_98) && 1) begin
_dataflow__delay_valid_98 <= 1;
end
if((_dataflow__delay_ready_104 || !_dataflow__delay_valid_104) && 1 && 1) begin
_dataflow__delay_data_104 <= din4;
end
if(_dataflow__delay_valid_104 && _dataflow__delay_ready_104) begin
_dataflow__delay_valid_104 <= 0;
end
if((_dataflow__delay_ready_104 || !_dataflow__delay_valid_104) && 1) begin
_dataflow__delay_valid_104 <= 1;
end
if((_dataflow__delay_ready_112 || !_dataflow__delay_valid_112) && 1 && 1) begin
_dataflow__delay_data_112 <= din5;
end
if(_dataflow__delay_valid_112 && _dataflow__delay_ready_112) begin
_dataflow__delay_valid_112 <= 0;
end
if((_dataflow__delay_ready_112 || !_dataflow__delay_valid_112) && 1) begin
_dataflow__delay_valid_112 <= 1;
end
if((_dataflow__delay_ready_122 || !_dataflow__delay_valid_122) && 1 && 1) begin
_dataflow__delay_data_122 <= din6;
end
if(_dataflow__delay_valid_122 && _dataflow__delay_ready_122) begin
_dataflow__delay_valid_122 <= 0;
end
if((_dataflow__delay_ready_122 || !_dataflow__delay_valid_122) && 1) begin
_dataflow__delay_valid_122 <= 1;
end
if((_dataflow__delay_ready_134 || !_dataflow__delay_valid_134) && 1 && 1) begin
_dataflow__delay_data_134 <= din7;
end
if(_dataflow__delay_valid_134 && _dataflow__delay_ready_134) begin
_dataflow__delay_valid_134 <= 0;
end
if((_dataflow__delay_ready_134 || !_dataflow__delay_valid_134) && 1) begin
_dataflow__delay_valid_134 <= 1;
end
if((_dataflow_cond_ready_9 || !_dataflow_cond_valid_9) && (_dataflow_lessthan_ready_8 && _dataflow__delay_ready_93 && _dataflow__delay_ready_92) && (_dataflow_lessthan_valid_8 && _dataflow__delay_valid_93 && _dataflow__delay_valid_92)) begin
_dataflow_cond_data_9 <= (_dataflow_lessthan_data_8)? _dataflow__delay_data_93 : _dataflow__delay_data_92;
end
if(_dataflow_cond_valid_9 && _dataflow_cond_ready_9) begin
_dataflow_cond_valid_9 <= 0;
end
if((_dataflow_cond_ready_9 || !_dataflow_cond_valid_9) && (_dataflow_lessthan_ready_8 && _dataflow__delay_ready_93 && _dataflow__delay_ready_92)) begin
_dataflow_cond_valid_9 <= _dataflow_lessthan_valid_8 && _dataflow__delay_valid_93 && _dataflow__delay_valid_92;
end
if((_dataflow_cond_ready_10 || !_dataflow_cond_valid_10) && (_dataflow_lessthan_ready_8 && _dataflow__delay_ready_92 && _dataflow__delay_ready_93) && (_dataflow_lessthan_valid_8 && _dataflow__delay_valid_92 && _dataflow__delay_valid_93)) begin
_dataflow_cond_data_10 <= (_dataflow_lessthan_data_8)? _dataflow__delay_data_92 : _dataflow__delay_data_93;
end
if(_dataflow_cond_valid_10 && _dataflow_cond_ready_10) begin
_dataflow_cond_valid_10 <= 0;
end
if((_dataflow_cond_ready_10 || !_dataflow_cond_valid_10) && (_dataflow_lessthan_ready_8 && _dataflow__delay_ready_92 && _dataflow__delay_ready_93)) begin
_dataflow_cond_valid_10 <= _dataflow_lessthan_valid_8 && _dataflow__delay_valid_92 && _dataflow__delay_valid_93;
end
if((_dataflow__delay_ready_95 || !_dataflow__delay_valid_95) && _dataflow__delay_ready_94 && _dataflow__delay_valid_94) begin
_dataflow__delay_data_95 <= _dataflow__delay_data_94;
end
if(_dataflow__delay_valid_95 && _dataflow__delay_ready_95) begin
_dataflow__delay_valid_95 <= 0;
end
if((_dataflow__delay_ready_95 || !_dataflow__delay_valid_95) && _dataflow__delay_ready_94) begin
_dataflow__delay_valid_95 <= _dataflow__delay_valid_94;
end
if((_dataflow__delay_ready_99 || !_dataflow__delay_valid_99) && _dataflow__delay_ready_98 && _dataflow__delay_valid_98) begin
_dataflow__delay_data_99 <= _dataflow__delay_data_98;
end
if(_dataflow__delay_valid_99 && _dataflow__delay_ready_99) begin
_dataflow__delay_valid_99 <= 0;
end
if((_dataflow__delay_ready_99 || !_dataflow__delay_valid_99) && _dataflow__delay_ready_98) begin
_dataflow__delay_valid_99 <= _dataflow__delay_valid_98;
end
if((_dataflow__delay_ready_105 || !_dataflow__delay_valid_105) && _dataflow__delay_ready_104 && _dataflow__delay_valid_104) begin
_dataflow__delay_data_105 <= _dataflow__delay_data_104;
end
if(_dataflow__delay_valid_105 && _dataflow__delay_ready_105) begin
_dataflow__delay_valid_105 <= 0;
end
if((_dataflow__delay_ready_105 || !_dataflow__delay_valid_105) && _dataflow__delay_ready_104) begin
_dataflow__delay_valid_105 <= _dataflow__delay_valid_104;
end
if((_dataflow__delay_ready_113 || !_dataflow__delay_valid_113) && _dataflow__delay_ready_112 && _dataflow__delay_valid_112) begin
_dataflow__delay_data_113 <= _dataflow__delay_data_112;
end
if(_dataflow__delay_valid_113 && _dataflow__delay_ready_113) begin
_dataflow__delay_valid_113 <= 0;
end
if((_dataflow__delay_ready_113 || !_dataflow__delay_valid_113) && _dataflow__delay_ready_112) begin
_dataflow__delay_valid_113 <= _dataflow__delay_valid_112;
end
if((_dataflow__delay_ready_123 || !_dataflow__delay_valid_123) && _dataflow__delay_ready_122 && _dataflow__delay_valid_122) begin
_dataflow__delay_data_123 <= _dataflow__delay_data_122;
end
if(_dataflow__delay_valid_123 && _dataflow__delay_ready_123) begin
_dataflow__delay_valid_123 <= 0;
end
if((_dataflow__delay_ready_123 || !_dataflow__delay_valid_123) && _dataflow__delay_ready_122) begin
_dataflow__delay_valid_123 <= _dataflow__delay_valid_122;
end
if((_dataflow__delay_ready_135 || !_dataflow__delay_valid_135) && _dataflow__delay_ready_134 && _dataflow__delay_valid_134) begin
_dataflow__delay_data_135 <= _dataflow__delay_data_134;
end
if(_dataflow__delay_valid_135 && _dataflow__delay_ready_135) begin
_dataflow__delay_valid_135 <= 0;
end
if((_dataflow__delay_ready_135 || !_dataflow__delay_valid_135) && _dataflow__delay_ready_134) begin
_dataflow__delay_valid_135 <= _dataflow__delay_valid_134;
end
if((_dataflow_lessthan_ready_11 || !_dataflow_lessthan_valid_11) && (_dataflow_cond_ready_10 && _dataflow__delay_ready_95) && (_dataflow_cond_valid_10 && _dataflow__delay_valid_95)) begin
_dataflow_lessthan_data_11 <= _dataflow_cond_data_10 < _dataflow__delay_data_95;
end
if(_dataflow_lessthan_valid_11 && _dataflow_lessthan_ready_11) begin
_dataflow_lessthan_valid_11 <= 0;
end
if((_dataflow_lessthan_ready_11 || !_dataflow_lessthan_valid_11) && (_dataflow_cond_ready_10 && _dataflow__delay_ready_95)) begin
_dataflow_lessthan_valid_11 <= _dataflow_cond_valid_10 && _dataflow__delay_valid_95;
end
if((_dataflow__delay_ready_96 || !_dataflow__delay_valid_96) && _dataflow__delay_ready_95 && _dataflow__delay_valid_95) begin
_dataflow__delay_data_96 <= _dataflow__delay_data_95;
end
if(_dataflow__delay_valid_96 && _dataflow__delay_ready_96) begin
_dataflow__delay_valid_96 <= 0;
end
if((_dataflow__delay_ready_96 || !_dataflow__delay_valid_96) && _dataflow__delay_ready_95) begin
_dataflow__delay_valid_96 <= _dataflow__delay_valid_95;
end
if((_dataflow__delay_ready_97 || !_dataflow__delay_valid_97) && _dataflow_cond_ready_10 && _dataflow_cond_valid_10) begin
_dataflow__delay_data_97 <= _dataflow_cond_data_10;
end
if(_dataflow__delay_valid_97 && _dataflow__delay_ready_97) begin
_dataflow__delay_valid_97 <= 0;
end
if((_dataflow__delay_ready_97 || !_dataflow__delay_valid_97) && _dataflow_cond_ready_10) begin
_dataflow__delay_valid_97 <= _dataflow_cond_valid_10;
end
if((_dataflow__delay_ready_100 || !_dataflow__delay_valid_100) && _dataflow__delay_ready_99 && _dataflow__delay_valid_99) begin
_dataflow__delay_data_100 <= _dataflow__delay_data_99;
end
if(_dataflow__delay_valid_100 && _dataflow__delay_ready_100) begin
_dataflow__delay_valid_100 <= 0;
end
if((_dataflow__delay_ready_100 || !_dataflow__delay_valid_100) && _dataflow__delay_ready_99) begin
_dataflow__delay_valid_100 <= _dataflow__delay_valid_99;
end
if((_dataflow__delay_ready_106 || !_dataflow__delay_valid_106) && _dataflow__delay_ready_105 && _dataflow__delay_valid_105) begin
_dataflow__delay_data_106 <= _dataflow__delay_data_105;
end
if(_dataflow__delay_valid_106 && _dataflow__delay_ready_106) begin
_dataflow__delay_valid_106 <= 0;
end
if((_dataflow__delay_ready_106 || !_dataflow__delay_valid_106) && _dataflow__delay_ready_105) begin
_dataflow__delay_valid_106 <= _dataflow__delay_valid_105;
end
if((_dataflow__delay_ready_114 || !_dataflow__delay_valid_114) && _dataflow__delay_ready_113 && _dataflow__delay_valid_113) begin
_dataflow__delay_data_114 <= _dataflow__delay_data_113;
end
if(_dataflow__delay_valid_114 && _dataflow__delay_ready_114) begin
_dataflow__delay_valid_114 <= 0;
end
if((_dataflow__delay_ready_114 || !_dataflow__delay_valid_114) && _dataflow__delay_ready_113) begin
_dataflow__delay_valid_114 <= _dataflow__delay_valid_113;
end
if((_dataflow__delay_ready_124 || !_dataflow__delay_valid_124) && _dataflow__delay_ready_123 && _dataflow__delay_valid_123) begin
_dataflow__delay_data_124 <= _dataflow__delay_data_123;
end
if(_dataflow__delay_valid_124 && _dataflow__delay_ready_124) begin
_dataflow__delay_valid_124 <= 0;
end
if((_dataflow__delay_ready_124 || !_dataflow__delay_valid_124) && _dataflow__delay_ready_123) begin
_dataflow__delay_valid_124 <= _dataflow__delay_valid_123;
end
if((_dataflow__delay_ready_136 || !_dataflow__delay_valid_136) && _dataflow__delay_ready_135 && _dataflow__delay_valid_135) begin
_dataflow__delay_data_136 <= _dataflow__delay_data_135;
end
if(_dataflow__delay_valid_136 && _dataflow__delay_ready_136) begin
_dataflow__delay_valid_136 <= 0;
end
if((_dataflow__delay_ready_136 || !_dataflow__delay_valid_136) && _dataflow__delay_ready_135) begin
_dataflow__delay_valid_136 <= _dataflow__delay_valid_135;
end
if((_dataflow__delay_ready_148 || !_dataflow__delay_valid_148) && _dataflow_cond_ready_9 && _dataflow_cond_valid_9) begin
_dataflow__delay_data_148 <= _dataflow_cond_data_9;
end
if(_dataflow__delay_valid_148 && _dataflow__delay_ready_148) begin
_dataflow__delay_valid_148 <= 0;
end
if((_dataflow__delay_ready_148 || !_dataflow__delay_valid_148) && _dataflow_cond_ready_9) begin
_dataflow__delay_valid_148 <= _dataflow_cond_valid_9;
end
if((_dataflow_cond_ready_12 || !_dataflow_cond_valid_12) && (_dataflow_lessthan_ready_11 && _dataflow__delay_ready_97 && _dataflow__delay_ready_96) && (_dataflow_lessthan_valid_11 && _dataflow__delay_valid_97 && _dataflow__delay_valid_96)) begin
_dataflow_cond_data_12 <= (_dataflow_lessthan_data_11)? _dataflow__delay_data_97 : _dataflow__delay_data_96;
end
if(_dataflow_cond_valid_12 && _dataflow_cond_ready_12) begin
_dataflow_cond_valid_12 <= 0;
end
if((_dataflow_cond_ready_12 || !_dataflow_cond_valid_12) && (_dataflow_lessthan_ready_11 && _dataflow__delay_ready_97 && _dataflow__delay_ready_96)) begin
_dataflow_cond_valid_12 <= _dataflow_lessthan_valid_11 && _dataflow__delay_valid_97 && _dataflow__delay_valid_96;
end
if((_dataflow_cond_ready_13 || !_dataflow_cond_valid_13) && (_dataflow_lessthan_ready_11 && _dataflow__delay_ready_96 && _dataflow__delay_ready_97) && (_dataflow_lessthan_valid_11 && _dataflow__delay_valid_96 && _dataflow__delay_valid_97)) begin
_dataflow_cond_data_13 <= (_dataflow_lessthan_data_11)? _dataflow__delay_data_96 : _dataflow__delay_data_97;
end
if(_dataflow_cond_valid_13 && _dataflow_cond_ready_13) begin
_dataflow_cond_valid_13 <= 0;
end
if((_dataflow_cond_ready_13 || !_dataflow_cond_valid_13) && (_dataflow_lessthan_ready_11 && _dataflow__delay_ready_96 && _dataflow__delay_ready_97)) begin
_dataflow_cond_valid_13 <= _dataflow_lessthan_valid_11 && _dataflow__delay_valid_96 && _dataflow__delay_valid_97;
end
if((_dataflow__delay_ready_101 || !_dataflow__delay_valid_101) && _dataflow__delay_ready_100 && _dataflow__delay_valid_100) begin
_dataflow__delay_data_101 <= _dataflow__delay_data_100;
end
if(_dataflow__delay_valid_101 && _dataflow__delay_ready_101) begin
_dataflow__delay_valid_101 <= 0;
end
if((_dataflow__delay_ready_101 || !_dataflow__delay_valid_101) && _dataflow__delay_ready_100) begin
_dataflow__delay_valid_101 <= _dataflow__delay_valid_100;
end
if((_dataflow__delay_ready_107 || !_dataflow__delay_valid_107) && _dataflow__delay_ready_106 && _dataflow__delay_valid_106) begin
_dataflow__delay_data_107 <= _dataflow__delay_data_106;
end
if(_dataflow__delay_valid_107 && _dataflow__delay_ready_107) begin
_dataflow__delay_valid_107 <= 0;
end
if((_dataflow__delay_ready_107 || !_dataflow__delay_valid_107) && _dataflow__delay_ready_106) begin
_dataflow__delay_valid_107 <= _dataflow__delay_valid_106;
end
if((_dataflow__delay_ready_115 || !_dataflow__delay_valid_115) && _dataflow__delay_ready_114 && _dataflow__delay_valid_114) begin
_dataflow__delay_data_115 <= _dataflow__delay_data_114;
end
if(_dataflow__delay_valid_115 && _dataflow__delay_ready_115) begin
_dataflow__delay_valid_115 <= 0;
end
if((_dataflow__delay_ready_115 || !_dataflow__delay_valid_115) && _dataflow__delay_ready_114) begin
_dataflow__delay_valid_115 <= _dataflow__delay_valid_114;
end
if((_dataflow__delay_ready_125 || !_dataflow__delay_valid_125) && _dataflow__delay_ready_124 && _dataflow__delay_valid_124) begin
_dataflow__delay_data_125 <= _dataflow__delay_data_124;
end
if(_dataflow__delay_valid_125 && _dataflow__delay_ready_125) begin
_dataflow__delay_valid_125 <= 0;
end
if((_dataflow__delay_ready_125 || !_dataflow__delay_valid_125) && _dataflow__delay_ready_124) begin
_dataflow__delay_valid_125 <= _dataflow__delay_valid_124;
end
if((_dataflow__delay_ready_137 || !_dataflow__delay_valid_137) && _dataflow__delay_ready_136 && _dataflow__delay_valid_136) begin
_dataflow__delay_data_137 <= _dataflow__delay_data_136;
end
if(_dataflow__delay_valid_137 && _dataflow__delay_ready_137) begin
_dataflow__delay_valid_137 <= 0;
end
if((_dataflow__delay_ready_137 || !_dataflow__delay_valid_137) && _dataflow__delay_ready_136) begin
_dataflow__delay_valid_137 <= _dataflow__delay_valid_136;
end
if((_dataflow__delay_ready_149 || !_dataflow__delay_valid_149) && _dataflow__delay_ready_148 && _dataflow__delay_valid_148) begin
_dataflow__delay_data_149 <= _dataflow__delay_data_148;
end
if(_dataflow__delay_valid_149 && _dataflow__delay_ready_149) begin
_dataflow__delay_valid_149 <= 0;
end
if((_dataflow__delay_ready_149 || !_dataflow__delay_valid_149) && _dataflow__delay_ready_148) begin
_dataflow__delay_valid_149 <= _dataflow__delay_valid_148;
end
if((_dataflow_lessthan_ready_14 || !_dataflow_lessthan_valid_14) && (_dataflow_cond_ready_13 && _dataflow__delay_ready_101) && (_dataflow_cond_valid_13 && _dataflow__delay_valid_101)) begin
_dataflow_lessthan_data_14 <= _dataflow_cond_data_13 < _dataflow__delay_data_101;
end
if(_dataflow_lessthan_valid_14 && _dataflow_lessthan_ready_14) begin
_dataflow_lessthan_valid_14 <= 0;
end
if((_dataflow_lessthan_ready_14 || !_dataflow_lessthan_valid_14) && (_dataflow_cond_ready_13 && _dataflow__delay_ready_101)) begin
_dataflow_lessthan_valid_14 <= _dataflow_cond_valid_13 && _dataflow__delay_valid_101;
end
if((_dataflow_lessthan_ready_29 || !_dataflow_lessthan_valid_29) && (_dataflow__delay_ready_149 && _dataflow_cond_ready_12) && (_dataflow__delay_valid_149 && _dataflow_cond_valid_12)) begin
_dataflow_lessthan_data_29 <= _dataflow__delay_data_149 < _dataflow_cond_data_12;
end
if(_dataflow_lessthan_valid_29 && _dataflow_lessthan_ready_29) begin
_dataflow_lessthan_valid_29 <= 0;
end
if((_dataflow_lessthan_ready_29 || !_dataflow_lessthan_valid_29) && (_dataflow__delay_ready_149 && _dataflow_cond_ready_12)) begin
_dataflow_lessthan_valid_29 <= _dataflow__delay_valid_149 && _dataflow_cond_valid_12;
end
if((_dataflow__delay_ready_102 || !_dataflow__delay_valid_102) && _dataflow__delay_ready_101 && _dataflow__delay_valid_101) begin
_dataflow__delay_data_102 <= _dataflow__delay_data_101;
end
if(_dataflow__delay_valid_102 && _dataflow__delay_ready_102) begin
_dataflow__delay_valid_102 <= 0;
end
if((_dataflow__delay_ready_102 || !_dataflow__delay_valid_102) && _dataflow__delay_ready_101) begin
_dataflow__delay_valid_102 <= _dataflow__delay_valid_101;
end
if((_dataflow__delay_ready_103 || !_dataflow__delay_valid_103) && _dataflow_cond_ready_13 && _dataflow_cond_valid_13) begin
_dataflow__delay_data_103 <= _dataflow_cond_data_13;
end
if(_dataflow__delay_valid_103 && _dataflow__delay_ready_103) begin
_dataflow__delay_valid_103 <= 0;
end
if((_dataflow__delay_ready_103 || !_dataflow__delay_valid_103) && _dataflow_cond_ready_13) begin
_dataflow__delay_valid_103 <= _dataflow_cond_valid_13;
end
if((_dataflow__delay_ready_108 || !_dataflow__delay_valid_108) && _dataflow__delay_ready_107 && _dataflow__delay_valid_107) begin
_dataflow__delay_data_108 <= _dataflow__delay_data_107;
end
if(_dataflow__delay_valid_108 && _dataflow__delay_ready_108) begin
_dataflow__delay_valid_108 <= 0;
end
if((_dataflow__delay_ready_108 || !_dataflow__delay_valid_108) && _dataflow__delay_ready_107) begin
_dataflow__delay_valid_108 <= _dataflow__delay_valid_107;
end
if((_dataflow__delay_ready_116 || !_dataflow__delay_valid_116) && _dataflow__delay_ready_115 && _dataflow__delay_valid_115) begin
_dataflow__delay_data_116 <= _dataflow__delay_data_115;
end
if(_dataflow__delay_valid_116 && _dataflow__delay_ready_116) begin
_dataflow__delay_valid_116 <= 0;
end
if((_dataflow__delay_ready_116 || !_dataflow__delay_valid_116) && _dataflow__delay_ready_115) begin
_dataflow__delay_valid_116 <= _dataflow__delay_valid_115;
end
if((_dataflow__delay_ready_126 || !_dataflow__delay_valid_126) && _dataflow__delay_ready_125 && _dataflow__delay_valid_125) begin
_dataflow__delay_data_126 <= _dataflow__delay_data_125;
end
if(_dataflow__delay_valid_126 && _dataflow__delay_ready_126) begin
_dataflow__delay_valid_126 <= 0;
end
if((_dataflow__delay_ready_126 || !_dataflow__delay_valid_126) && _dataflow__delay_ready_125) begin
_dataflow__delay_valid_126 <= _dataflow__delay_valid_125;
end
if((_dataflow__delay_ready_138 || !_dataflow__delay_valid_138) && _dataflow__delay_ready_137 && _dataflow__delay_valid_137) begin
_dataflow__delay_data_138 <= _dataflow__delay_data_137;
end
if(_dataflow__delay_valid_138 && _dataflow__delay_ready_138) begin
_dataflow__delay_valid_138 <= 0;
end
if((_dataflow__delay_ready_138 || !_dataflow__delay_valid_138) && _dataflow__delay_ready_137) begin
_dataflow__delay_valid_138 <= _dataflow__delay_valid_137;
end
if((_dataflow__delay_ready_150 || !_dataflow__delay_valid_150) && _dataflow_cond_ready_12 && _dataflow_cond_valid_12) begin
_dataflow__delay_data_150 <= _dataflow_cond_data_12;
end
if(_dataflow__delay_valid_150 && _dataflow__delay_ready_150) begin
_dataflow__delay_valid_150 <= 0;
end
if((_dataflow__delay_ready_150 || !_dataflow__delay_valid_150) && _dataflow_cond_ready_12) begin
_dataflow__delay_valid_150 <= _dataflow_cond_valid_12;
end
if((_dataflow__delay_ready_151 || !_dataflow__delay_valid_151) && _dataflow__delay_ready_149 && _dataflow__delay_valid_149) begin
_dataflow__delay_data_151 <= _dataflow__delay_data_149;
end
if(_dataflow__delay_valid_151 && _dataflow__delay_ready_151) begin
_dataflow__delay_valid_151 <= 0;
end
if((_dataflow__delay_ready_151 || !_dataflow__delay_valid_151) && _dataflow__delay_ready_149) begin
_dataflow__delay_valid_151 <= _dataflow__delay_valid_149;
end
if((_dataflow_cond_ready_15 || !_dataflow_cond_valid_15) && (_dataflow_lessthan_ready_14 && _dataflow__delay_ready_103 && _dataflow__delay_ready_102) && (_dataflow_lessthan_valid_14 && _dataflow__delay_valid_103 && _dataflow__delay_valid_102)) begin
_dataflow_cond_data_15 <= (_dataflow_lessthan_data_14)? _dataflow__delay_data_103 : _dataflow__delay_data_102;
end
if(_dataflow_cond_valid_15 && _dataflow_cond_ready_15) begin
_dataflow_cond_valid_15 <= 0;
end
if((_dataflow_cond_ready_15 || !_dataflow_cond_valid_15) && (_dataflow_lessthan_ready_14 && _dataflow__delay_ready_103 && _dataflow__delay_ready_102)) begin
_dataflow_cond_valid_15 <= _dataflow_lessthan_valid_14 && _dataflow__delay_valid_103 && _dataflow__delay_valid_102;
end
if((_dataflow_cond_ready_16 || !_dataflow_cond_valid_16) && (_dataflow_lessthan_ready_14 && _dataflow__delay_ready_102 && _dataflow__delay_ready_103) && (_dataflow_lessthan_valid_14 && _dataflow__delay_valid_102 && _dataflow__delay_valid_103)) begin
_dataflow_cond_data_16 <= (_dataflow_lessthan_data_14)? _dataflow__delay_data_102 : _dataflow__delay_data_103;
end
if(_dataflow_cond_valid_16 && _dataflow_cond_ready_16) begin
_dataflow_cond_valid_16 <= 0;
end
if((_dataflow_cond_ready_16 || !_dataflow_cond_valid_16) && (_dataflow_lessthan_ready_14 && _dataflow__delay_ready_102 && _dataflow__delay_ready_103)) begin
_dataflow_cond_valid_16 <= _dataflow_lessthan_valid_14 && _dataflow__delay_valid_102 && _dataflow__delay_valid_103;
end
if((_dataflow_cond_ready_30 || !_dataflow_cond_valid_30) && (_dataflow_lessthan_ready_29 && _dataflow__delay_ready_151 && _dataflow__delay_ready_150) && (_dataflow_lessthan_valid_29 && _dataflow__delay_valid_151 && _dataflow__delay_valid_150)) begin
_dataflow_cond_data_30 <= (_dataflow_lessthan_data_29)? _dataflow__delay_data_151 : _dataflow__delay_data_150;
end
if(_dataflow_cond_valid_30 && _dataflow_cond_ready_30) begin
_dataflow_cond_valid_30 <= 0;
end
if((_dataflow_cond_ready_30 || !_dataflow_cond_valid_30) && (_dataflow_lessthan_ready_29 && _dataflow__delay_ready_151 && _dataflow__delay_ready_150)) begin
_dataflow_cond_valid_30 <= _dataflow_lessthan_valid_29 && _dataflow__delay_valid_151 && _dataflow__delay_valid_150;
end
if((_dataflow_cond_ready_31 || !_dataflow_cond_valid_31) && (_dataflow_lessthan_ready_29 && _dataflow__delay_ready_150 && _dataflow__delay_ready_151) && (_dataflow_lessthan_valid_29 && _dataflow__delay_valid_150 && _dataflow__delay_valid_151)) begin
_dataflow_cond_data_31 <= (_dataflow_lessthan_data_29)? _dataflow__delay_data_150 : _dataflow__delay_data_151;
end
if(_dataflow_cond_valid_31 && _dataflow_cond_ready_31) begin
_dataflow_cond_valid_31 <= 0;
end
if((_dataflow_cond_ready_31 || !_dataflow_cond_valid_31) && (_dataflow_lessthan_ready_29 && _dataflow__delay_ready_150 && _dataflow__delay_ready_151)) begin
_dataflow_cond_valid_31 <= _dataflow_lessthan_valid_29 && _dataflow__delay_valid_150 && _dataflow__delay_valid_151;
end
if((_dataflow__delay_ready_109 || !_dataflow__delay_valid_109) && _dataflow__delay_ready_108 && _dataflow__delay_valid_108) begin
_dataflow__delay_data_109 <= _dataflow__delay_data_108;
end
if(_dataflow__delay_valid_109 && _dataflow__delay_ready_109) begin
_dataflow__delay_valid_109 <= 0;
end
if((_dataflow__delay_ready_109 || !_dataflow__delay_valid_109) && _dataflow__delay_ready_108) begin
_dataflow__delay_valid_109 <= _dataflow__delay_valid_108;
end
if((_dataflow__delay_ready_117 || !_dataflow__delay_valid_117) && _dataflow__delay_ready_116 && _dataflow__delay_valid_116) begin
_dataflow__delay_data_117 <= _dataflow__delay_data_116;
end
if(_dataflow__delay_valid_117 && _dataflow__delay_ready_117) begin
_dataflow__delay_valid_117 <= 0;
end
if((_dataflow__delay_ready_117 || !_dataflow__delay_valid_117) && _dataflow__delay_ready_116) begin
_dataflow__delay_valid_117 <= _dataflow__delay_valid_116;
end
if((_dataflow__delay_ready_127 || !_dataflow__delay_valid_127) && _dataflow__delay_ready_126 && _dataflow__delay_valid_126) begin
_dataflow__delay_data_127 <= _dataflow__delay_data_126;
end
if(_dataflow__delay_valid_127 && _dataflow__delay_ready_127) begin
_dataflow__delay_valid_127 <= 0;
end
if((_dataflow__delay_ready_127 || !_dataflow__delay_valid_127) && _dataflow__delay_ready_126) begin
_dataflow__delay_valid_127 <= _dataflow__delay_valid_126;
end
if((_dataflow__delay_ready_139 || !_dataflow__delay_valid_139) && _dataflow__delay_ready_138 && _dataflow__delay_valid_138) begin
_dataflow__delay_data_139 <= _dataflow__delay_data_138;
end
if(_dataflow__delay_valid_139 && _dataflow__delay_ready_139) begin
_dataflow__delay_valid_139 <= 0;
end
if((_dataflow__delay_ready_139 || !_dataflow__delay_valid_139) && _dataflow__delay_ready_138) begin
_dataflow__delay_valid_139 <= _dataflow__delay_valid_138;
end
if((_dataflow_lessthan_ready_17 || !_dataflow_lessthan_valid_17) && (_dataflow_cond_ready_16 && _dataflow__delay_ready_109) && (_dataflow_cond_valid_16 && _dataflow__delay_valid_109)) begin
_dataflow_lessthan_data_17 <= _dataflow_cond_data_16 < _dataflow__delay_data_109;
end
if(_dataflow_lessthan_valid_17 && _dataflow_lessthan_ready_17) begin
_dataflow_lessthan_valid_17 <= 0;
end
if((_dataflow_lessthan_ready_17 || !_dataflow_lessthan_valid_17) && (_dataflow_cond_ready_16 && _dataflow__delay_ready_109)) begin
_dataflow_lessthan_valid_17 <= _dataflow_cond_valid_16 && _dataflow__delay_valid_109;
end
if((_dataflow_lessthan_ready_32 || !_dataflow_lessthan_valid_32) && (_dataflow_cond_ready_31 && _dataflow_cond_ready_15) && (_dataflow_cond_valid_31 && _dataflow_cond_valid_15)) begin
_dataflow_lessthan_data_32 <= _dataflow_cond_data_31 < _dataflow_cond_data_15;
end
if(_dataflow_lessthan_valid_32 && _dataflow_lessthan_ready_32) begin
_dataflow_lessthan_valid_32 <= 0;
end
if((_dataflow_lessthan_ready_32 || !_dataflow_lessthan_valid_32) && (_dataflow_cond_ready_31 && _dataflow_cond_ready_15)) begin
_dataflow_lessthan_valid_32 <= _dataflow_cond_valid_31 && _dataflow_cond_valid_15;
end
if((_dataflow__delay_ready_110 || !_dataflow__delay_valid_110) && _dataflow__delay_ready_109 && _dataflow__delay_valid_109) begin
_dataflow__delay_data_110 <= _dataflow__delay_data_109;
end
if(_dataflow__delay_valid_110 && _dataflow__delay_ready_110) begin
_dataflow__delay_valid_110 <= 0;
end
if((_dataflow__delay_ready_110 || !_dataflow__delay_valid_110) && _dataflow__delay_ready_109) begin
_dataflow__delay_valid_110 <= _dataflow__delay_valid_109;
end
if((_dataflow__delay_ready_111 || !_dataflow__delay_valid_111) && _dataflow_cond_ready_16 && _dataflow_cond_valid_16) begin
_dataflow__delay_data_111 <= _dataflow_cond_data_16;
end
if(_dataflow__delay_valid_111 && _dataflow__delay_ready_111) begin
_dataflow__delay_valid_111 <= 0;
end
if((_dataflow__delay_ready_111 || !_dataflow__delay_valid_111) && _dataflow_cond_ready_16) begin
_dataflow__delay_valid_111 <= _dataflow_cond_valid_16;
end
if((_dataflow__delay_ready_118 || !_dataflow__delay_valid_118) && _dataflow__delay_ready_117 && _dataflow__delay_valid_117) begin
_dataflow__delay_data_118 <= _dataflow__delay_data_117;
end
if(_dataflow__delay_valid_118 && _dataflow__delay_ready_118) begin
_dataflow__delay_valid_118 <= 0;
end
if((_dataflow__delay_ready_118 || !_dataflow__delay_valid_118) && _dataflow__delay_ready_117) begin
_dataflow__delay_valid_118 <= _dataflow__delay_valid_117;
end
if((_dataflow__delay_ready_128 || !_dataflow__delay_valid_128) && _dataflow__delay_ready_127 && _dataflow__delay_valid_127) begin
_dataflow__delay_data_128 <= _dataflow__delay_data_127;
end
if(_dataflow__delay_valid_128 && _dataflow__delay_ready_128) begin
_dataflow__delay_valid_128 <= 0;
end
if((_dataflow__delay_ready_128 || !_dataflow__delay_valid_128) && _dataflow__delay_ready_127) begin
_dataflow__delay_valid_128 <= _dataflow__delay_valid_127;
end
if((_dataflow__delay_ready_140 || !_dataflow__delay_valid_140) && _dataflow__delay_ready_139 && _dataflow__delay_valid_139) begin
_dataflow__delay_data_140 <= _dataflow__delay_data_139;
end
if(_dataflow__delay_valid_140 && _dataflow__delay_ready_140) begin
_dataflow__delay_valid_140 <= 0;
end
if((_dataflow__delay_ready_140 || !_dataflow__delay_valid_140) && _dataflow__delay_ready_139) begin
_dataflow__delay_valid_140 <= _dataflow__delay_valid_139;
end
if((_dataflow__delay_ready_152 || !_dataflow__delay_valid_152) && _dataflow_cond_ready_15 && _dataflow_cond_valid_15) begin
_dataflow__delay_data_152 <= _dataflow_cond_data_15;
end
if(_dataflow__delay_valid_152 && _dataflow__delay_ready_152) begin
_dataflow__delay_valid_152 <= 0;
end
if((_dataflow__delay_ready_152 || !_dataflow__delay_valid_152) && _dataflow_cond_ready_15) begin
_dataflow__delay_valid_152 <= _dataflow_cond_valid_15;
end
if((_dataflow__delay_ready_153 || !_dataflow__delay_valid_153) && _dataflow_cond_ready_31 && _dataflow_cond_valid_31) begin
_dataflow__delay_data_153 <= _dataflow_cond_data_31;
end
if(_dataflow__delay_valid_153 && _dataflow__delay_ready_153) begin
_dataflow__delay_valid_153 <= 0;
end
if((_dataflow__delay_ready_153 || !_dataflow__delay_valid_153) && _dataflow_cond_ready_31) begin
_dataflow__delay_valid_153 <= _dataflow_cond_valid_31;
end
if((_dataflow__delay_ready_162 || !_dataflow__delay_valid_162) && _dataflow_cond_ready_30 && _dataflow_cond_valid_30) begin
_dataflow__delay_data_162 <= _dataflow_cond_data_30;
end
if(_dataflow__delay_valid_162 && _dataflow__delay_ready_162) begin
_dataflow__delay_valid_162 <= 0;
end
if((_dataflow__delay_ready_162 || !_dataflow__delay_valid_162) && _dataflow_cond_ready_30) begin
_dataflow__delay_valid_162 <= _dataflow_cond_valid_30;
end
if((_dataflow_cond_ready_18 || !_dataflow_cond_valid_18) && (_dataflow_lessthan_ready_17 && _dataflow__delay_ready_111 && _dataflow__delay_ready_110) && (_dataflow_lessthan_valid_17 && _dataflow__delay_valid_111 && _dataflow__delay_valid_110)) begin
_dataflow_cond_data_18 <= (_dataflow_lessthan_data_17)? _dataflow__delay_data_111 : _dataflow__delay_data_110;
end
if(_dataflow_cond_valid_18 && _dataflow_cond_ready_18) begin
_dataflow_cond_valid_18 <= 0;
end
if((_dataflow_cond_ready_18 || !_dataflow_cond_valid_18) && (_dataflow_lessthan_ready_17 && _dataflow__delay_ready_111 && _dataflow__delay_ready_110)) begin
_dataflow_cond_valid_18 <= _dataflow_lessthan_valid_17 && _dataflow__delay_valid_111 && _dataflow__delay_valid_110;
end
if((_dataflow_cond_ready_19 || !_dataflow_cond_valid_19) && (_dataflow_lessthan_ready_17 && _dataflow__delay_ready_110 && _dataflow__delay_ready_111) && (_dataflow_lessthan_valid_17 && _dataflow__delay_valid_110 && _dataflow__delay_valid_111)) begin
_dataflow_cond_data_19 <= (_dataflow_lessthan_data_17)? _dataflow__delay_data_110 : _dataflow__delay_data_111;
end
if(_dataflow_cond_valid_19 && _dataflow_cond_ready_19) begin
_dataflow_cond_valid_19 <= 0;
end
if((_dataflow_cond_ready_19 || !_dataflow_cond_valid_19) && (_dataflow_lessthan_ready_17 && _dataflow__delay_ready_110 && _dataflow__delay_ready_111)) begin
_dataflow_cond_valid_19 <= _dataflow_lessthan_valid_17 && _dataflow__delay_valid_110 && _dataflow__delay_valid_111;
end
if((_dataflow_cond_ready_33 || !_dataflow_cond_valid_33) && (_dataflow_lessthan_ready_32 && _dataflow__delay_ready_153 && _dataflow__delay_ready_152) && (_dataflow_lessthan_valid_32 && _dataflow__delay_valid_153 && _dataflow__delay_valid_152)) begin
_dataflow_cond_data_33 <= (_dataflow_lessthan_data_32)? _dataflow__delay_data_153 : _dataflow__delay_data_152;
end
if(_dataflow_cond_valid_33 && _dataflow_cond_ready_33) begin
_dataflow_cond_valid_33 <= 0;
end
if((_dataflow_cond_ready_33 || !_dataflow_cond_valid_33) && (_dataflow_lessthan_ready_32 && _dataflow__delay_ready_153 && _dataflow__delay_ready_152)) begin
_dataflow_cond_valid_33 <= _dataflow_lessthan_valid_32 && _dataflow__delay_valid_153 && _dataflow__delay_valid_152;
end
if((_dataflow_cond_ready_34 || !_dataflow_cond_valid_34) && (_dataflow_lessthan_ready_32 && _dataflow__delay_ready_152 && _dataflow__delay_ready_153) && (_dataflow_lessthan_valid_32 && _dataflow__delay_valid_152 && _dataflow__delay_valid_153)) begin
_dataflow_cond_data_34 <= (_dataflow_lessthan_data_32)? _dataflow__delay_data_152 : _dataflow__delay_data_153;
end
if(_dataflow_cond_valid_34 && _dataflow_cond_ready_34) begin
_dataflow_cond_valid_34 <= 0;
end
if((_dataflow_cond_ready_34 || !_dataflow_cond_valid_34) && (_dataflow_lessthan_ready_32 && _dataflow__delay_ready_152 && _dataflow__delay_ready_153)) begin
_dataflow_cond_valid_34 <= _dataflow_lessthan_valid_32 && _dataflow__delay_valid_152 && _dataflow__delay_valid_153;
end
if((_dataflow__delay_ready_119 || !_dataflow__delay_valid_119) && _dataflow__delay_ready_118 && _dataflow__delay_valid_118) begin
_dataflow__delay_data_119 <= _dataflow__delay_data_118;
end
if(_dataflow__delay_valid_119 && _dataflow__delay_ready_119) begin
_dataflow__delay_valid_119 <= 0;
end
if((_dataflow__delay_ready_119 || !_dataflow__delay_valid_119) && _dataflow__delay_ready_118) begin
_dataflow__delay_valid_119 <= _dataflow__delay_valid_118;
end
if((_dataflow__delay_ready_129 || !_dataflow__delay_valid_129) && _dataflow__delay_ready_128 && _dataflow__delay_valid_128) begin
_dataflow__delay_data_129 <= _dataflow__delay_data_128;
end
if(_dataflow__delay_valid_129 && _dataflow__delay_ready_129) begin
_dataflow__delay_valid_129 <= 0;
end
if((_dataflow__delay_ready_129 || !_dataflow__delay_valid_129) && _dataflow__delay_ready_128) begin
_dataflow__delay_valid_129 <= _dataflow__delay_valid_128;
end
if((_dataflow__delay_ready_141 || !_dataflow__delay_valid_141) && _dataflow__delay_ready_140 && _dataflow__delay_valid_140) begin
_dataflow__delay_data_141 <= _dataflow__delay_data_140;
end
if(_dataflow__delay_valid_141 && _dataflow__delay_ready_141) begin
_dataflow__delay_valid_141 <= 0;
end
if((_dataflow__delay_ready_141 || !_dataflow__delay_valid_141) && _dataflow__delay_ready_140) begin
_dataflow__delay_valid_141 <= _dataflow__delay_valid_140;
end
if((_dataflow__delay_ready_163 || !_dataflow__delay_valid_163) && _dataflow__delay_ready_162 && _dataflow__delay_valid_162) begin
_dataflow__delay_data_163 <= _dataflow__delay_data_162;
end
if(_dataflow__delay_valid_163 && _dataflow__delay_ready_163) begin
_dataflow__delay_valid_163 <= 0;
end
if((_dataflow__delay_ready_163 || !_dataflow__delay_valid_163) && _dataflow__delay_ready_162) begin
_dataflow__delay_valid_163 <= _dataflow__delay_valid_162;
end
if((_dataflow_lessthan_ready_20 || !_dataflow_lessthan_valid_20) && (_dataflow_cond_ready_19 && _dataflow__delay_ready_119) && (_dataflow_cond_valid_19 && _dataflow__delay_valid_119)) begin
_dataflow_lessthan_data_20 <= _dataflow_cond_data_19 < _dataflow__delay_data_119;
end
if(_dataflow_lessthan_valid_20 && _dataflow_lessthan_ready_20) begin
_dataflow_lessthan_valid_20 <= 0;
end
if((_dataflow_lessthan_ready_20 || !_dataflow_lessthan_valid_20) && (_dataflow_cond_ready_19 && _dataflow__delay_ready_119)) begin
_dataflow_lessthan_valid_20 <= _dataflow_cond_valid_19 && _dataflow__delay_valid_119;
end
if((_dataflow_lessthan_ready_35 || !_dataflow_lessthan_valid_35) && (_dataflow_cond_ready_34 && _dataflow_cond_ready_18) && (_dataflow_cond_valid_34 && _dataflow_cond_valid_18)) begin
_dataflow_lessthan_data_35 <= _dataflow_cond_data_34 < _dataflow_cond_data_18;
end
if(_dataflow_lessthan_valid_35 && _dataflow_lessthan_ready_35) begin
_dataflow_lessthan_valid_35 <= 0;
end
if((_dataflow_lessthan_ready_35 || !_dataflow_lessthan_valid_35) && (_dataflow_cond_ready_34 && _dataflow_cond_ready_18)) begin
_dataflow_lessthan_valid_35 <= _dataflow_cond_valid_34 && _dataflow_cond_valid_18;
end
if((_dataflow_lessthan_ready_47 || !_dataflow_lessthan_valid_47) && (_dataflow__delay_ready_163 && _dataflow_cond_ready_33) && (_dataflow__delay_valid_163 && _dataflow_cond_valid_33)) begin
_dataflow_lessthan_data_47 <= _dataflow__delay_data_163 < _dataflow_cond_data_33;
end
if(_dataflow_lessthan_valid_47 && _dataflow_lessthan_ready_47) begin
_dataflow_lessthan_valid_47 <= 0;
end
if((_dataflow_lessthan_ready_47 || !_dataflow_lessthan_valid_47) && (_dataflow__delay_ready_163 && _dataflow_cond_ready_33)) begin
_dataflow_lessthan_valid_47 <= _dataflow__delay_valid_163 && _dataflow_cond_valid_33;
end
if((_dataflow__delay_ready_120 || !_dataflow__delay_valid_120) && _dataflow__delay_ready_119 && _dataflow__delay_valid_119) begin
_dataflow__delay_data_120 <= _dataflow__delay_data_119;
end
if(_dataflow__delay_valid_120 && _dataflow__delay_ready_120) begin
_dataflow__delay_valid_120 <= 0;
end
if((_dataflow__delay_ready_120 || !_dataflow__delay_valid_120) && _dataflow__delay_ready_119) begin
_dataflow__delay_valid_120 <= _dataflow__delay_valid_119;
end
if((_dataflow__delay_ready_121 || !_dataflow__delay_valid_121) && _dataflow_cond_ready_19 && _dataflow_cond_valid_19) begin
_dataflow__delay_data_121 <= _dataflow_cond_data_19;
end
if(_dataflow__delay_valid_121 && _dataflow__delay_ready_121) begin
_dataflow__delay_valid_121 <= 0;
end
if((_dataflow__delay_ready_121 || !_dataflow__delay_valid_121) && _dataflow_cond_ready_19) begin
_dataflow__delay_valid_121 <= _dataflow_cond_valid_19;
end
if((_dataflow__delay_ready_130 || !_dataflow__delay_valid_130) && _dataflow__delay_ready_129 && _dataflow__delay_valid_129) begin
_dataflow__delay_data_130 <= _dataflow__delay_data_129;
end
if(_dataflow__delay_valid_130 && _dataflow__delay_ready_130) begin
_dataflow__delay_valid_130 <= 0;
end
if((_dataflow__delay_ready_130 || !_dataflow__delay_valid_130) && _dataflow__delay_ready_129) begin
_dataflow__delay_valid_130 <= _dataflow__delay_valid_129;
end
if((_dataflow__delay_ready_142 || !_dataflow__delay_valid_142) && _dataflow__delay_ready_141 && _dataflow__delay_valid_141) begin
_dataflow__delay_data_142 <= _dataflow__delay_data_141;
end
if(_dataflow__delay_valid_142 && _dataflow__delay_ready_142) begin
_dataflow__delay_valid_142 <= 0;
end
if((_dataflow__delay_ready_142 || !_dataflow__delay_valid_142) && _dataflow__delay_ready_141) begin
_dataflow__delay_valid_142 <= _dataflow__delay_valid_141;
end
if((_dataflow__delay_ready_154 || !_dataflow__delay_valid_154) && _dataflow_cond_ready_18 && _dataflow_cond_valid_18) begin
_dataflow__delay_data_154 <= _dataflow_cond_data_18;
end
if(_dataflow__delay_valid_154 && _dataflow__delay_ready_154) begin
_dataflow__delay_valid_154 <= 0;
end
if((_dataflow__delay_ready_154 || !_dataflow__delay_valid_154) && _dataflow_cond_ready_18) begin
_dataflow__delay_valid_154 <= _dataflow_cond_valid_18;
end
if((_dataflow__delay_ready_155 || !_dataflow__delay_valid_155) && _dataflow_cond_ready_34 && _dataflow_cond_valid_34) begin
_dataflow__delay_data_155 <= _dataflow_cond_data_34;
end
if(_dataflow__delay_valid_155 && _dataflow__delay_ready_155) begin
_dataflow__delay_valid_155 <= 0;
end
if((_dataflow__delay_ready_155 || !_dataflow__delay_valid_155) && _dataflow_cond_ready_34) begin
_dataflow__delay_valid_155 <= _dataflow_cond_valid_34;
end
if((_dataflow__delay_ready_164 || !_dataflow__delay_valid_164) && _dataflow_cond_ready_33 && _dataflow_cond_valid_33) begin
_dataflow__delay_data_164 <= _dataflow_cond_data_33;
end
if(_dataflow__delay_valid_164 && _dataflow__delay_ready_164) begin
_dataflow__delay_valid_164 <= 0;
end
if((_dataflow__delay_ready_164 || !_dataflow__delay_valid_164) && _dataflow_cond_ready_33) begin
_dataflow__delay_valid_164 <= _dataflow_cond_valid_33;
end
if((_dataflow__delay_ready_165 || !_dataflow__delay_valid_165) && _dataflow__delay_ready_163 && _dataflow__delay_valid_163) begin
_dataflow__delay_data_165 <= _dataflow__delay_data_163;
end
if(_dataflow__delay_valid_165 && _dataflow__delay_ready_165) begin
_dataflow__delay_valid_165 <= 0;
end
if((_dataflow__delay_ready_165 || !_dataflow__delay_valid_165) && _dataflow__delay_ready_163) begin
_dataflow__delay_valid_165 <= _dataflow__delay_valid_163;
end
if((_dataflow_cond_ready_21 || !_dataflow_cond_valid_21) && (_dataflow_lessthan_ready_20 && _dataflow__delay_ready_121 && _dataflow__delay_ready_120) && (_dataflow_lessthan_valid_20 && _dataflow__delay_valid_121 && _dataflow__delay_valid_120)) begin
_dataflow_cond_data_21 <= (_dataflow_lessthan_data_20)? _dataflow__delay_data_121 : _dataflow__delay_data_120;
end
if(_dataflow_cond_valid_21 && _dataflow_cond_ready_21) begin
_dataflow_cond_valid_21 <= 0;
end
if((_dataflow_cond_ready_21 || !_dataflow_cond_valid_21) && (_dataflow_lessthan_ready_20 && _dataflow__delay_ready_121 && _dataflow__delay_ready_120)) begin
_dataflow_cond_valid_21 <= _dataflow_lessthan_valid_20 && _dataflow__delay_valid_121 && _dataflow__delay_valid_120;
end
if((_dataflow_cond_ready_22 || !_dataflow_cond_valid_22) && (_dataflow_lessthan_ready_20 && _dataflow__delay_ready_120 && _dataflow__delay_ready_121) && (_dataflow_lessthan_valid_20 && _dataflow__delay_valid_120 && _dataflow__delay_valid_121)) begin
_dataflow_cond_data_22 <= (_dataflow_lessthan_data_20)? _dataflow__delay_data_120 : _dataflow__delay_data_121;
end
if(_dataflow_cond_valid_22 && _dataflow_cond_ready_22) begin
_dataflow_cond_valid_22 <= 0;
end
if((_dataflow_cond_ready_22 || !_dataflow_cond_valid_22) && (_dataflow_lessthan_ready_20 && _dataflow__delay_ready_120 && _dataflow__delay_ready_121)) begin
_dataflow_cond_valid_22 <= _dataflow_lessthan_valid_20 && _dataflow__delay_valid_120 && _dataflow__delay_valid_121;
end
if((_dataflow_cond_ready_36 || !_dataflow_cond_valid_36) && (_dataflow_lessthan_ready_35 && _dataflow__delay_ready_155 && _dataflow__delay_ready_154) && (_dataflow_lessthan_valid_35 && _dataflow__delay_valid_155 && _dataflow__delay_valid_154)) begin
_dataflow_cond_data_36 <= (_dataflow_lessthan_data_35)? _dataflow__delay_data_155 : _dataflow__delay_data_154;
end
if(_dataflow_cond_valid_36 && _dataflow_cond_ready_36) begin
_dataflow_cond_valid_36 <= 0;
end
if((_dataflow_cond_ready_36 || !_dataflow_cond_valid_36) && (_dataflow_lessthan_ready_35 && _dataflow__delay_ready_155 && _dataflow__delay_ready_154)) begin
_dataflow_cond_valid_36 <= _dataflow_lessthan_valid_35 && _dataflow__delay_valid_155 && _dataflow__delay_valid_154;
end
if((_dataflow_cond_ready_37 || !_dataflow_cond_valid_37) && (_dataflow_lessthan_ready_35 && _dataflow__delay_ready_154 && _dataflow__delay_ready_155) && (_dataflow_lessthan_valid_35 && _dataflow__delay_valid_154 && _dataflow__delay_valid_155)) begin
_dataflow_cond_data_37 <= (_dataflow_lessthan_data_35)? _dataflow__delay_data_154 : _dataflow__delay_data_155;
end
if(_dataflow_cond_valid_37 && _dataflow_cond_ready_37) begin
_dataflow_cond_valid_37 <= 0;
end
if((_dataflow_cond_ready_37 || !_dataflow_cond_valid_37) && (_dataflow_lessthan_ready_35 && _dataflow__delay_ready_154 && _dataflow__delay_ready_155)) begin
_dataflow_cond_valid_37 <= _dataflow_lessthan_valid_35 && _dataflow__delay_valid_154 && _dataflow__delay_valid_155;
end
if((_dataflow_cond_ready_48 || !_dataflow_cond_valid_48) && (_dataflow_lessthan_ready_47 && _dataflow__delay_ready_165 && _dataflow__delay_ready_164) && (_dataflow_lessthan_valid_47 && _dataflow__delay_valid_165 && _dataflow__delay_valid_164)) begin
_dataflow_cond_data_48 <= (_dataflow_lessthan_data_47)? _dataflow__delay_data_165 : _dataflow__delay_data_164;
end
if(_dataflow_cond_valid_48 && _dataflow_cond_ready_48) begin
_dataflow_cond_valid_48 <= 0;
end
if((_dataflow_cond_ready_48 || !_dataflow_cond_valid_48) && (_dataflow_lessthan_ready_47 && _dataflow__delay_ready_165 && _dataflow__delay_ready_164)) begin
_dataflow_cond_valid_48 <= _dataflow_lessthan_valid_47 && _dataflow__delay_valid_165 && _dataflow__delay_valid_164;
end
if((_dataflow_cond_ready_49 || !_dataflow_cond_valid_49) && (_dataflow_lessthan_ready_47 && _dataflow__delay_ready_164 && _dataflow__delay_ready_165) && (_dataflow_lessthan_valid_47 && _dataflow__delay_valid_164 && _dataflow__delay_valid_165)) begin
_dataflow_cond_data_49 <= (_dataflow_lessthan_data_47)? _dataflow__delay_data_164 : _dataflow__delay_data_165;
end
if(_dataflow_cond_valid_49 && _dataflow_cond_ready_49) begin
_dataflow_cond_valid_49 <= 0;
end
if((_dataflow_cond_ready_49 || !_dataflow_cond_valid_49) && (_dataflow_lessthan_ready_47 && _dataflow__delay_ready_164 && _dataflow__delay_ready_165)) begin
_dataflow_cond_valid_49 <= _dataflow_lessthan_valid_47 && _dataflow__delay_valid_164 && _dataflow__delay_valid_165;
end
if((_dataflow__delay_ready_131 || !_dataflow__delay_valid_131) && _dataflow__delay_ready_130 && _dataflow__delay_valid_130) begin
_dataflow__delay_data_131 <= _dataflow__delay_data_130;
end
if(_dataflow__delay_valid_131 && _dataflow__delay_ready_131) begin
_dataflow__delay_valid_131 <= 0;
end
if((_dataflow__delay_ready_131 || !_dataflow__delay_valid_131) && _dataflow__delay_ready_130) begin
_dataflow__delay_valid_131 <= _dataflow__delay_valid_130;
end
if((_dataflow__delay_ready_143 || !_dataflow__delay_valid_143) && _dataflow__delay_ready_142 && _dataflow__delay_valid_142) begin
_dataflow__delay_data_143 <= _dataflow__delay_data_142;
end
if(_dataflow__delay_valid_143 && _dataflow__delay_ready_143) begin
_dataflow__delay_valid_143 <= 0;
end
if((_dataflow__delay_ready_143 || !_dataflow__delay_valid_143) && _dataflow__delay_ready_142) begin
_dataflow__delay_valid_143 <= _dataflow__delay_valid_142;
end
if((_dataflow_lessthan_ready_23 || !_dataflow_lessthan_valid_23) && (_dataflow_cond_ready_22 && _dataflow__delay_ready_131) && (_dataflow_cond_valid_22 && _dataflow__delay_valid_131)) begin
_dataflow_lessthan_data_23 <= _dataflow_cond_data_22 < _dataflow__delay_data_131;
end
if(_dataflow_lessthan_valid_23 && _dataflow_lessthan_ready_23) begin
_dataflow_lessthan_valid_23 <= 0;
end
if((_dataflow_lessthan_ready_23 || !_dataflow_lessthan_valid_23) && (_dataflow_cond_ready_22 && _dataflow__delay_ready_131)) begin
_dataflow_lessthan_valid_23 <= _dataflow_cond_valid_22 && _dataflow__delay_valid_131;
end
if((_dataflow_lessthan_ready_38 || !_dataflow_lessthan_valid_38) && (_dataflow_cond_ready_37 && _dataflow_cond_ready_21) && (_dataflow_cond_valid_37 && _dataflow_cond_valid_21)) begin
_dataflow_lessthan_data_38 <= _dataflow_cond_data_37 < _dataflow_cond_data_21;
end
if(_dataflow_lessthan_valid_38 && _dataflow_lessthan_ready_38) begin
_dataflow_lessthan_valid_38 <= 0;
end
if((_dataflow_lessthan_ready_38 || !_dataflow_lessthan_valid_38) && (_dataflow_cond_ready_37 && _dataflow_cond_ready_21)) begin
_dataflow_lessthan_valid_38 <= _dataflow_cond_valid_37 && _dataflow_cond_valid_21;
end
if((_dataflow_lessthan_ready_50 || !_dataflow_lessthan_valid_50) && (_dataflow_cond_ready_49 && _dataflow_cond_ready_36) && (_dataflow_cond_valid_49 && _dataflow_cond_valid_36)) begin
_dataflow_lessthan_data_50 <= _dataflow_cond_data_49 < _dataflow_cond_data_36;
end
if(_dataflow_lessthan_valid_50 && _dataflow_lessthan_ready_50) begin
_dataflow_lessthan_valid_50 <= 0;
end
if((_dataflow_lessthan_ready_50 || !_dataflow_lessthan_valid_50) && (_dataflow_cond_ready_49 && _dataflow_cond_ready_36)) begin
_dataflow_lessthan_valid_50 <= _dataflow_cond_valid_49 && _dataflow_cond_valid_36;
end
if((_dataflow__delay_ready_132 || !_dataflow__delay_valid_132) && _dataflow__delay_ready_131 && _dataflow__delay_valid_131) begin
_dataflow__delay_data_132 <= _dataflow__delay_data_131;
end
if(_dataflow__delay_valid_132 && _dataflow__delay_ready_132) begin
_dataflow__delay_valid_132 <= 0;
end
if((_dataflow__delay_ready_132 || !_dataflow__delay_valid_132) && _dataflow__delay_ready_131) begin
_dataflow__delay_valid_132 <= _dataflow__delay_valid_131;
end
if((_dataflow__delay_ready_133 || !_dataflow__delay_valid_133) && _dataflow_cond_ready_22 && _dataflow_cond_valid_22) begin
_dataflow__delay_data_133 <= _dataflow_cond_data_22;
end
if(_dataflow__delay_valid_133 && _dataflow__delay_ready_133) begin
_dataflow__delay_valid_133 <= 0;
end
if((_dataflow__delay_ready_133 || !_dataflow__delay_valid_133) && _dataflow_cond_ready_22) begin
_dataflow__delay_valid_133 <= _dataflow_cond_valid_22;
end
if((_dataflow__delay_ready_144 || !_dataflow__delay_valid_144) && _dataflow__delay_ready_143 && _dataflow__delay_valid_143) begin
_dataflow__delay_data_144 <= _dataflow__delay_data_143;
end
if(_dataflow__delay_valid_144 && _dataflow__delay_ready_144) begin
_dataflow__delay_valid_144 <= 0;
end
if((_dataflow__delay_ready_144 || !_dataflow__delay_valid_144) && _dataflow__delay_ready_143) begin
_dataflow__delay_valid_144 <= _dataflow__delay_valid_143;
end
if((_dataflow__delay_ready_156 || !_dataflow__delay_valid_156) && _dataflow_cond_ready_21 && _dataflow_cond_valid_21) begin
_dataflow__delay_data_156 <= _dataflow_cond_data_21;
end
if(_dataflow__delay_valid_156 && _dataflow__delay_ready_156) begin
_dataflow__delay_valid_156 <= 0;
end
if((_dataflow__delay_ready_156 || !_dataflow__delay_valid_156) && _dataflow_cond_ready_21) begin
_dataflow__delay_valid_156 <= _dataflow_cond_valid_21;
end
if((_dataflow__delay_ready_157 || !_dataflow__delay_valid_157) && _dataflow_cond_ready_37 && _dataflow_cond_valid_37) begin
_dataflow__delay_data_157 <= _dataflow_cond_data_37;
end
if(_dataflow__delay_valid_157 && _dataflow__delay_ready_157) begin
_dataflow__delay_valid_157 <= 0;
end
if((_dataflow__delay_ready_157 || !_dataflow__delay_valid_157) && _dataflow_cond_ready_37) begin
_dataflow__delay_valid_157 <= _dataflow_cond_valid_37;
end
if((_dataflow__delay_ready_166 || !_dataflow__delay_valid_166) && _dataflow_cond_ready_36 && _dataflow_cond_valid_36) begin
_dataflow__delay_data_166 <= _dataflow_cond_data_36;
end
if(_dataflow__delay_valid_166 && _dataflow__delay_ready_166) begin
_dataflow__delay_valid_166 <= 0;
end
if((_dataflow__delay_ready_166 || !_dataflow__delay_valid_166) && _dataflow_cond_ready_36) begin
_dataflow__delay_valid_166 <= _dataflow_cond_valid_36;
end
if((_dataflow__delay_ready_167 || !_dataflow__delay_valid_167) && _dataflow_cond_ready_49 && _dataflow_cond_valid_49) begin
_dataflow__delay_data_167 <= _dataflow_cond_data_49;
end
if(_dataflow__delay_valid_167 && _dataflow__delay_ready_167) begin
_dataflow__delay_valid_167 <= 0;
end
if((_dataflow__delay_ready_167 || !_dataflow__delay_valid_167) && _dataflow_cond_ready_49) begin
_dataflow__delay_valid_167 <= _dataflow_cond_valid_49;
end
if((_dataflow__delay_ready_174 || !_dataflow__delay_valid_174) && _dataflow_cond_ready_48 && _dataflow_cond_valid_48) begin
_dataflow__delay_data_174 <= _dataflow_cond_data_48;
end
if(_dataflow__delay_valid_174 && _dataflow__delay_ready_174) begin
_dataflow__delay_valid_174 <= 0;
end
if((_dataflow__delay_ready_174 || !_dataflow__delay_valid_174) && _dataflow_cond_ready_48) begin
_dataflow__delay_valid_174 <= _dataflow_cond_valid_48;
end
if((_dataflow_cond_ready_24 || !_dataflow_cond_valid_24) && (_dataflow_lessthan_ready_23 && _dataflow__delay_ready_133 && _dataflow__delay_ready_132) && (_dataflow_lessthan_valid_23 && _dataflow__delay_valid_133 && _dataflow__delay_valid_132)) begin
_dataflow_cond_data_24 <= (_dataflow_lessthan_data_23)? _dataflow__delay_data_133 : _dataflow__delay_data_132;
end
if(_dataflow_cond_valid_24 && _dataflow_cond_ready_24) begin
_dataflow_cond_valid_24 <= 0;
end
if((_dataflow_cond_ready_24 || !_dataflow_cond_valid_24) && (_dataflow_lessthan_ready_23 && _dataflow__delay_ready_133 && _dataflow__delay_ready_132)) begin
_dataflow_cond_valid_24 <= _dataflow_lessthan_valid_23 && _dataflow__delay_valid_133 && _dataflow__delay_valid_132;
end
if((_dataflow_cond_ready_25 || !_dataflow_cond_valid_25) && (_dataflow_lessthan_ready_23 && _dataflow__delay_ready_132 && _dataflow__delay_ready_133) && (_dataflow_lessthan_valid_23 && _dataflow__delay_valid_132 && _dataflow__delay_valid_133)) begin
_dataflow_cond_data_25 <= (_dataflow_lessthan_data_23)? _dataflow__delay_data_132 : _dataflow__delay_data_133;
end
if(_dataflow_cond_valid_25 && _dataflow_cond_ready_25) begin
_dataflow_cond_valid_25 <= 0;
end
if((_dataflow_cond_ready_25 || !_dataflow_cond_valid_25) && (_dataflow_lessthan_ready_23 && _dataflow__delay_ready_132 && _dataflow__delay_ready_133)) begin
_dataflow_cond_valid_25 <= _dataflow_lessthan_valid_23 && _dataflow__delay_valid_132 && _dataflow__delay_valid_133;
end
if((_dataflow_cond_ready_39 || !_dataflow_cond_valid_39) && (_dataflow_lessthan_ready_38 && _dataflow__delay_ready_157 && _dataflow__delay_ready_156) && (_dataflow_lessthan_valid_38 && _dataflow__delay_valid_157 && _dataflow__delay_valid_156)) begin
_dataflow_cond_data_39 <= (_dataflow_lessthan_data_38)? _dataflow__delay_data_157 : _dataflow__delay_data_156;
end
if(_dataflow_cond_valid_39 && _dataflow_cond_ready_39) begin
_dataflow_cond_valid_39 <= 0;
end
if((_dataflow_cond_ready_39 || !_dataflow_cond_valid_39) && (_dataflow_lessthan_ready_38 && _dataflow__delay_ready_157 && _dataflow__delay_ready_156)) begin
_dataflow_cond_valid_39 <= _dataflow_lessthan_valid_38 && _dataflow__delay_valid_157 && _dataflow__delay_valid_156;
end
if((_dataflow_cond_ready_40 || !_dataflow_cond_valid_40) && (_dataflow_lessthan_ready_38 && _dataflow__delay_ready_156 && _dataflow__delay_ready_157) && (_dataflow_lessthan_valid_38 && _dataflow__delay_valid_156 && _dataflow__delay_valid_157)) begin
_dataflow_cond_data_40 <= (_dataflow_lessthan_data_38)? _dataflow__delay_data_156 : _dataflow__delay_data_157;
end
if(_dataflow_cond_valid_40 && _dataflow_cond_ready_40) begin
_dataflow_cond_valid_40 <= 0;
end
if((_dataflow_cond_ready_40 || !_dataflow_cond_valid_40) && (_dataflow_lessthan_ready_38 && _dataflow__delay_ready_156 && _dataflow__delay_ready_157)) begin
_dataflow_cond_valid_40 <= _dataflow_lessthan_valid_38 && _dataflow__delay_valid_156 && _dataflow__delay_valid_157;
end
if((_dataflow_cond_ready_51 || !_dataflow_cond_valid_51) && (_dataflow_lessthan_ready_50 && _dataflow__delay_ready_167 && _dataflow__delay_ready_166) && (_dataflow_lessthan_valid_50 && _dataflow__delay_valid_167 && _dataflow__delay_valid_166)) begin
_dataflow_cond_data_51 <= (_dataflow_lessthan_data_50)? _dataflow__delay_data_167 : _dataflow__delay_data_166;
end
if(_dataflow_cond_valid_51 && _dataflow_cond_ready_51) begin
_dataflow_cond_valid_51 <= 0;
end
if((_dataflow_cond_ready_51 || !_dataflow_cond_valid_51) && (_dataflow_lessthan_ready_50 && _dataflow__delay_ready_167 && _dataflow__delay_ready_166)) begin
_dataflow_cond_valid_51 <= _dataflow_lessthan_valid_50 && _dataflow__delay_valid_167 && _dataflow__delay_valid_166;
end
if((_dataflow_cond_ready_52 || !_dataflow_cond_valid_52) && (_dataflow_lessthan_ready_50 && _dataflow__delay_ready_166 && _dataflow__delay_ready_167) && (_dataflow_lessthan_valid_50 && _dataflow__delay_valid_166 && _dataflow__delay_valid_167)) begin
_dataflow_cond_data_52 <= (_dataflow_lessthan_data_50)? _dataflow__delay_data_166 : _dataflow__delay_data_167;
end
if(_dataflow_cond_valid_52 && _dataflow_cond_ready_52) begin
_dataflow_cond_valid_52 <= 0;
end
if((_dataflow_cond_ready_52 || !_dataflow_cond_valid_52) && (_dataflow_lessthan_ready_50 && _dataflow__delay_ready_166 && _dataflow__delay_ready_167)) begin
_dataflow_cond_valid_52 <= _dataflow_lessthan_valid_50 && _dataflow__delay_valid_166 && _dataflow__delay_valid_167;
end
if((_dataflow__delay_ready_145 || !_dataflow__delay_valid_145) && _dataflow__delay_ready_144 && _dataflow__delay_valid_144) begin
_dataflow__delay_data_145 <= _dataflow__delay_data_144;
end
if(_dataflow__delay_valid_145 && _dataflow__delay_ready_145) begin
_dataflow__delay_valid_145 <= 0;
end
if((_dataflow__delay_ready_145 || !_dataflow__delay_valid_145) && _dataflow__delay_ready_144) begin
_dataflow__delay_valid_145 <= _dataflow__delay_valid_144;
end
if((_dataflow__delay_ready_175 || !_dataflow__delay_valid_175) && _dataflow__delay_ready_174 && _dataflow__delay_valid_174) begin
_dataflow__delay_data_175 <= _dataflow__delay_data_174;
end
if(_dataflow__delay_valid_175 && _dataflow__delay_ready_175) begin
_dataflow__delay_valid_175 <= 0;
end
if((_dataflow__delay_ready_175 || !_dataflow__delay_valid_175) && _dataflow__delay_ready_174) begin
_dataflow__delay_valid_175 <= _dataflow__delay_valid_174;
end
if((_dataflow_lessthan_ready_26 || !_dataflow_lessthan_valid_26) && (_dataflow_cond_ready_25 && _dataflow__delay_ready_145) && (_dataflow_cond_valid_25 && _dataflow__delay_valid_145)) begin
_dataflow_lessthan_data_26 <= _dataflow_cond_data_25 < _dataflow__delay_data_145;
end
if(_dataflow_lessthan_valid_26 && _dataflow_lessthan_ready_26) begin
_dataflow_lessthan_valid_26 <= 0;
end
if((_dataflow_lessthan_ready_26 || !_dataflow_lessthan_valid_26) && (_dataflow_cond_ready_25 && _dataflow__delay_ready_145)) begin
_dataflow_lessthan_valid_26 <= _dataflow_cond_valid_25 && _dataflow__delay_valid_145;
end
if((_dataflow_lessthan_ready_41 || !_dataflow_lessthan_valid_41) && (_dataflow_cond_ready_40 && _dataflow_cond_ready_24) && (_dataflow_cond_valid_40 && _dataflow_cond_valid_24)) begin
_dataflow_lessthan_data_41 <= _dataflow_cond_data_40 < _dataflow_cond_data_24;
end
if(_dataflow_lessthan_valid_41 && _dataflow_lessthan_ready_41) begin
_dataflow_lessthan_valid_41 <= 0;
end
if((_dataflow_lessthan_ready_41 || !_dataflow_lessthan_valid_41) && (_dataflow_cond_ready_40 && _dataflow_cond_ready_24)) begin
_dataflow_lessthan_valid_41 <= _dataflow_cond_valid_40 && _dataflow_cond_valid_24;
end
if((_dataflow_lessthan_ready_53 || !_dataflow_lessthan_valid_53) && (_dataflow_cond_ready_52 && _dataflow_cond_ready_39) && (_dataflow_cond_valid_52 && _dataflow_cond_valid_39)) begin
_dataflow_lessthan_data_53 <= _dataflow_cond_data_52 < _dataflow_cond_data_39;
end
if(_dataflow_lessthan_valid_53 && _dataflow_lessthan_ready_53) begin
_dataflow_lessthan_valid_53 <= 0;
end
if((_dataflow_lessthan_ready_53 || !_dataflow_lessthan_valid_53) && (_dataflow_cond_ready_52 && _dataflow_cond_ready_39)) begin
_dataflow_lessthan_valid_53 <= _dataflow_cond_valid_52 && _dataflow_cond_valid_39;
end
if((_dataflow_lessthan_ready_62 || !_dataflow_lessthan_valid_62) && (_dataflow__delay_ready_175 && _dataflow_cond_ready_51) && (_dataflow__delay_valid_175 && _dataflow_cond_valid_51)) begin
_dataflow_lessthan_data_62 <= _dataflow__delay_data_175 < _dataflow_cond_data_51;
end
if(_dataflow_lessthan_valid_62 && _dataflow_lessthan_ready_62) begin
_dataflow_lessthan_valid_62 <= 0;
end
if((_dataflow_lessthan_ready_62 || !_dataflow_lessthan_valid_62) && (_dataflow__delay_ready_175 && _dataflow_cond_ready_51)) begin
_dataflow_lessthan_valid_62 <= _dataflow__delay_valid_175 && _dataflow_cond_valid_51;
end
if((_dataflow__delay_ready_146 || !_dataflow__delay_valid_146) && _dataflow__delay_ready_145 && _dataflow__delay_valid_145) begin
_dataflow__delay_data_146 <= _dataflow__delay_data_145;
end
if(_dataflow__delay_valid_146 && _dataflow__delay_ready_146) begin
_dataflow__delay_valid_146 <= 0;
end
if((_dataflow__delay_ready_146 || !_dataflow__delay_valid_146) && _dataflow__delay_ready_145) begin
_dataflow__delay_valid_146 <= _dataflow__delay_valid_145;
end
if((_dataflow__delay_ready_147 || !_dataflow__delay_valid_147) && _dataflow_cond_ready_25 && _dataflow_cond_valid_25) begin
_dataflow__delay_data_147 <= _dataflow_cond_data_25;
end
if(_dataflow__delay_valid_147 && _dataflow__delay_ready_147) begin
_dataflow__delay_valid_147 <= 0;
end
if((_dataflow__delay_ready_147 || !_dataflow__delay_valid_147) && _dataflow_cond_ready_25) begin
_dataflow__delay_valid_147 <= _dataflow_cond_valid_25;
end
if((_dataflow__delay_ready_158 || !_dataflow__delay_valid_158) && _dataflow_cond_ready_24 && _dataflow_cond_valid_24) begin
_dataflow__delay_data_158 <= _dataflow_cond_data_24;
end
if(_dataflow__delay_valid_158 && _dataflow__delay_ready_158) begin
_dataflow__delay_valid_158 <= 0;
end
if((_dataflow__delay_ready_158 || !_dataflow__delay_valid_158) && _dataflow_cond_ready_24) begin
_dataflow__delay_valid_158 <= _dataflow_cond_valid_24;
end
if((_dataflow__delay_ready_159 || !_dataflow__delay_valid_159) && _dataflow_cond_ready_40 && _dataflow_cond_valid_40) begin
_dataflow__delay_data_159 <= _dataflow_cond_data_40;
end
if(_dataflow__delay_valid_159 && _dataflow__delay_ready_159) begin
_dataflow__delay_valid_159 <= 0;
end
if((_dataflow__delay_ready_159 || !_dataflow__delay_valid_159) && _dataflow_cond_ready_40) begin
_dataflow__delay_valid_159 <= _dataflow_cond_valid_40;
end
if((_dataflow__delay_ready_168 || !_dataflow__delay_valid_168) && _dataflow_cond_ready_39 && _dataflow_cond_valid_39) begin
_dataflow__delay_data_168 <= _dataflow_cond_data_39;
end
if(_dataflow__delay_valid_168 && _dataflow__delay_ready_168) begin
_dataflow__delay_valid_168 <= 0;
end
if((_dataflow__delay_ready_168 || !_dataflow__delay_valid_168) && _dataflow_cond_ready_39) begin
_dataflow__delay_valid_168 <= _dataflow_cond_valid_39;
end
if((_dataflow__delay_ready_169 || !_dataflow__delay_valid_169) && _dataflow_cond_ready_52 && _dataflow_cond_valid_52) begin
_dataflow__delay_data_169 <= _dataflow_cond_data_52;
end
if(_dataflow__delay_valid_169 && _dataflow__delay_ready_169) begin
_dataflow__delay_valid_169 <= 0;
end
if((_dataflow__delay_ready_169 || !_dataflow__delay_valid_169) && _dataflow_cond_ready_52) begin
_dataflow__delay_valid_169 <= _dataflow_cond_valid_52;
end
if((_dataflow__delay_ready_176 || !_dataflow__delay_valid_176) && _dataflow_cond_ready_51 && _dataflow_cond_valid_51) begin
_dataflow__delay_data_176 <= _dataflow_cond_data_51;
end
if(_dataflow__delay_valid_176 && _dataflow__delay_ready_176) begin
_dataflow__delay_valid_176 <= 0;
end
if((_dataflow__delay_ready_176 || !_dataflow__delay_valid_176) && _dataflow_cond_ready_51) begin
_dataflow__delay_valid_176 <= _dataflow_cond_valid_51;
end
if((_dataflow__delay_ready_177 || !_dataflow__delay_valid_177) && _dataflow__delay_ready_175 && _dataflow__delay_valid_175) begin
_dataflow__delay_data_177 <= _dataflow__delay_data_175;
end
if(_dataflow__delay_valid_177 && _dataflow__delay_ready_177) begin
_dataflow__delay_valid_177 <= 0;
end
if((_dataflow__delay_ready_177 || !_dataflow__delay_valid_177) && _dataflow__delay_ready_175) begin
_dataflow__delay_valid_177 <= _dataflow__delay_valid_175;
end
if((_dataflow_cond_ready_27 || !_dataflow_cond_valid_27) && (_dataflow_lessthan_ready_26 && _dataflow__delay_ready_147 && _dataflow__delay_ready_146) && (_dataflow_lessthan_valid_26 && _dataflow__delay_valid_147 && _dataflow__delay_valid_146)) begin
_dataflow_cond_data_27 <= (_dataflow_lessthan_data_26)? _dataflow__delay_data_147 : _dataflow__delay_data_146;
end
if(_dataflow_cond_valid_27 && _dataflow_cond_ready_27) begin
_dataflow_cond_valid_27 <= 0;
end
if((_dataflow_cond_ready_27 || !_dataflow_cond_valid_27) && (_dataflow_lessthan_ready_26 && _dataflow__delay_ready_147 && _dataflow__delay_ready_146)) begin
_dataflow_cond_valid_27 <= _dataflow_lessthan_valid_26 && _dataflow__delay_valid_147 && _dataflow__delay_valid_146;
end
if((_dataflow_cond_ready_28 || !_dataflow_cond_valid_28) && (_dataflow_lessthan_ready_26 && _dataflow__delay_ready_146 && _dataflow__delay_ready_147) && (_dataflow_lessthan_valid_26 && _dataflow__delay_valid_146 && _dataflow__delay_valid_147)) begin
_dataflow_cond_data_28 <= (_dataflow_lessthan_data_26)? _dataflow__delay_data_146 : _dataflow__delay_data_147;
end
if(_dataflow_cond_valid_28 && _dataflow_cond_ready_28) begin
_dataflow_cond_valid_28 <= 0;
end
if((_dataflow_cond_ready_28 || !_dataflow_cond_valid_28) && (_dataflow_lessthan_ready_26 && _dataflow__delay_ready_146 && _dataflow__delay_ready_147)) begin
_dataflow_cond_valid_28 <= _dataflow_lessthan_valid_26 && _dataflow__delay_valid_146 && _dataflow__delay_valid_147;
end
if((_dataflow_cond_ready_42 || !_dataflow_cond_valid_42) && (_dataflow_lessthan_ready_41 && _dataflow__delay_ready_159 && _dataflow__delay_ready_158) && (_dataflow_lessthan_valid_41 && _dataflow__delay_valid_159 && _dataflow__delay_valid_158)) begin
_dataflow_cond_data_42 <= (_dataflow_lessthan_data_41)? _dataflow__delay_data_159 : _dataflow__delay_data_158;
end
if(_dataflow_cond_valid_42 && _dataflow_cond_ready_42) begin
_dataflow_cond_valid_42 <= 0;
end
if((_dataflow_cond_ready_42 || !_dataflow_cond_valid_42) && (_dataflow_lessthan_ready_41 && _dataflow__delay_ready_159 && _dataflow__delay_ready_158)) begin
_dataflow_cond_valid_42 <= _dataflow_lessthan_valid_41 && _dataflow__delay_valid_159 && _dataflow__delay_valid_158;
end
if((_dataflow_cond_ready_43 || !_dataflow_cond_valid_43) && (_dataflow_lessthan_ready_41 && _dataflow__delay_ready_158 && _dataflow__delay_ready_159) && (_dataflow_lessthan_valid_41 && _dataflow__delay_valid_158 && _dataflow__delay_valid_159)) begin
_dataflow_cond_data_43 <= (_dataflow_lessthan_data_41)? _dataflow__delay_data_158 : _dataflow__delay_data_159;
end
if(_dataflow_cond_valid_43 && _dataflow_cond_ready_43) begin
_dataflow_cond_valid_43 <= 0;
end
if((_dataflow_cond_ready_43 || !_dataflow_cond_valid_43) && (_dataflow_lessthan_ready_41 && _dataflow__delay_ready_158 && _dataflow__delay_ready_159)) begin
_dataflow_cond_valid_43 <= _dataflow_lessthan_valid_41 && _dataflow__delay_valid_158 && _dataflow__delay_valid_159;
end
if((_dataflow_cond_ready_54 || !_dataflow_cond_valid_54) && (_dataflow_lessthan_ready_53 && _dataflow__delay_ready_169 && _dataflow__delay_ready_168) && (_dataflow_lessthan_valid_53 && _dataflow__delay_valid_169 && _dataflow__delay_valid_168)) begin
_dataflow_cond_data_54 <= (_dataflow_lessthan_data_53)? _dataflow__delay_data_169 : _dataflow__delay_data_168;
end
if(_dataflow_cond_valid_54 && _dataflow_cond_ready_54) begin
_dataflow_cond_valid_54 <= 0;
end
if((_dataflow_cond_ready_54 || !_dataflow_cond_valid_54) && (_dataflow_lessthan_ready_53 && _dataflow__delay_ready_169 && _dataflow__delay_ready_168)) begin
_dataflow_cond_valid_54 <= _dataflow_lessthan_valid_53 && _dataflow__delay_valid_169 && _dataflow__delay_valid_168;
end
if((_dataflow_cond_ready_55 || !_dataflow_cond_valid_55) && (_dataflow_lessthan_ready_53 && _dataflow__delay_ready_168 && _dataflow__delay_ready_169) && (_dataflow_lessthan_valid_53 && _dataflow__delay_valid_168 && _dataflow__delay_valid_169)) begin
_dataflow_cond_data_55 <= (_dataflow_lessthan_data_53)? _dataflow__delay_data_168 : _dataflow__delay_data_169;
end
if(_dataflow_cond_valid_55 && _dataflow_cond_ready_55) begin
_dataflow_cond_valid_55 <= 0;
end
if((_dataflow_cond_ready_55 || !_dataflow_cond_valid_55) && (_dataflow_lessthan_ready_53 && _dataflow__delay_ready_168 && _dataflow__delay_ready_169)) begin
_dataflow_cond_valid_55 <= _dataflow_lessthan_valid_53 && _dataflow__delay_valid_168 && _dataflow__delay_valid_169;
end
if((_dataflow_cond_ready_63 || !_dataflow_cond_valid_63) && (_dataflow_lessthan_ready_62 && _dataflow__delay_ready_177 && _dataflow__delay_ready_176) && (_dataflow_lessthan_valid_62 && _dataflow__delay_valid_177 && _dataflow__delay_valid_176)) begin
_dataflow_cond_data_63 <= (_dataflow_lessthan_data_62)? _dataflow__delay_data_177 : _dataflow__delay_data_176;
end
if(_dataflow_cond_valid_63 && _dataflow_cond_ready_63) begin
_dataflow_cond_valid_63 <= 0;
end
if((_dataflow_cond_ready_63 || !_dataflow_cond_valid_63) && (_dataflow_lessthan_ready_62 && _dataflow__delay_ready_177 && _dataflow__delay_ready_176)) begin
_dataflow_cond_valid_63 <= _dataflow_lessthan_valid_62 && _dataflow__delay_valid_177 && _dataflow__delay_valid_176;
end
if((_dataflow_cond_ready_64 || !_dataflow_cond_valid_64) && (_dataflow_lessthan_ready_62 && _dataflow__delay_ready_176 && _dataflow__delay_ready_177) && (_dataflow_lessthan_valid_62 && _dataflow__delay_valid_176 && _dataflow__delay_valid_177)) begin
_dataflow_cond_data_64 <= (_dataflow_lessthan_data_62)? _dataflow__delay_data_176 : _dataflow__delay_data_177;
end
if(_dataflow_cond_valid_64 && _dataflow_cond_ready_64) begin
_dataflow_cond_valid_64 <= 0;
end
if((_dataflow_cond_ready_64 || !_dataflow_cond_valid_64) && (_dataflow_lessthan_ready_62 && _dataflow__delay_ready_176 && _dataflow__delay_ready_177)) begin
_dataflow_cond_valid_64 <= _dataflow_lessthan_valid_62 && _dataflow__delay_valid_176 && _dataflow__delay_valid_177;
end
if((_dataflow_lessthan_ready_44 || !_dataflow_lessthan_valid_44) && (_dataflow_cond_ready_43 && _dataflow_cond_ready_27) && (_dataflow_cond_valid_43 && _dataflow_cond_valid_27)) begin
_dataflow_lessthan_data_44 <= _dataflow_cond_data_43 < _dataflow_cond_data_27;
end
if(_dataflow_lessthan_valid_44 && _dataflow_lessthan_ready_44) begin
_dataflow_lessthan_valid_44 <= 0;
end
if((_dataflow_lessthan_ready_44 || !_dataflow_lessthan_valid_44) && (_dataflow_cond_ready_43 && _dataflow_cond_ready_27)) begin
_dataflow_lessthan_valid_44 <= _dataflow_cond_valid_43 && _dataflow_cond_valid_27;
end
if((_dataflow_lessthan_ready_56 || !_dataflow_lessthan_valid_56) && (_dataflow_cond_ready_55 && _dataflow_cond_ready_42) && (_dataflow_cond_valid_55 && _dataflow_cond_valid_42)) begin
_dataflow_lessthan_data_56 <= _dataflow_cond_data_55 < _dataflow_cond_data_42;
end
if(_dataflow_lessthan_valid_56 && _dataflow_lessthan_ready_56) begin
_dataflow_lessthan_valid_56 <= 0;
end
if((_dataflow_lessthan_ready_56 || !_dataflow_lessthan_valid_56) && (_dataflow_cond_ready_55 && _dataflow_cond_ready_42)) begin
_dataflow_lessthan_valid_56 <= _dataflow_cond_valid_55 && _dataflow_cond_valid_42;
end
if((_dataflow_lessthan_ready_65 || !_dataflow_lessthan_valid_65) && (_dataflow_cond_ready_64 && _dataflow_cond_ready_54) && (_dataflow_cond_valid_64 && _dataflow_cond_valid_54)) begin
_dataflow_lessthan_data_65 <= _dataflow_cond_data_64 < _dataflow_cond_data_54;
end
if(_dataflow_lessthan_valid_65 && _dataflow_lessthan_ready_65) begin
_dataflow_lessthan_valid_65 <= 0;
end
if((_dataflow_lessthan_ready_65 || !_dataflow_lessthan_valid_65) && (_dataflow_cond_ready_64 && _dataflow_cond_ready_54)) begin
_dataflow_lessthan_valid_65 <= _dataflow_cond_valid_64 && _dataflow_cond_valid_54;
end
if((_dataflow__delay_ready_160 || !_dataflow__delay_valid_160) && _dataflow_cond_ready_27 && _dataflow_cond_valid_27) begin
_dataflow__delay_data_160 <= _dataflow_cond_data_27;
end
if(_dataflow__delay_valid_160 && _dataflow__delay_ready_160) begin
_dataflow__delay_valid_160 <= 0;
end
if((_dataflow__delay_ready_160 || !_dataflow__delay_valid_160) && _dataflow_cond_ready_27) begin
_dataflow__delay_valid_160 <= _dataflow_cond_valid_27;
end
if((_dataflow__delay_ready_161 || !_dataflow__delay_valid_161) && _dataflow_cond_ready_43 && _dataflow_cond_valid_43) begin
_dataflow__delay_data_161 <= _dataflow_cond_data_43;
end
if(_dataflow__delay_valid_161 && _dataflow__delay_ready_161) begin
_dataflow__delay_valid_161 <= 0;
end
if((_dataflow__delay_ready_161 || !_dataflow__delay_valid_161) && _dataflow_cond_ready_43) begin
_dataflow__delay_valid_161 <= _dataflow_cond_valid_43;
end
if((_dataflow__delay_ready_170 || !_dataflow__delay_valid_170) && _dataflow_cond_ready_42 && _dataflow_cond_valid_42) begin
_dataflow__delay_data_170 <= _dataflow_cond_data_42;
end
if(_dataflow__delay_valid_170 && _dataflow__delay_ready_170) begin
_dataflow__delay_valid_170 <= 0;
end
if((_dataflow__delay_ready_170 || !_dataflow__delay_valid_170) && _dataflow_cond_ready_42) begin
_dataflow__delay_valid_170 <= _dataflow_cond_valid_42;
end
if((_dataflow__delay_ready_171 || !_dataflow__delay_valid_171) && _dataflow_cond_ready_55 && _dataflow_cond_valid_55) begin
_dataflow__delay_data_171 <= _dataflow_cond_data_55;
end
if(_dataflow__delay_valid_171 && _dataflow__delay_ready_171) begin
_dataflow__delay_valid_171 <= 0;
end
if((_dataflow__delay_ready_171 || !_dataflow__delay_valid_171) && _dataflow_cond_ready_55) begin
_dataflow__delay_valid_171 <= _dataflow_cond_valid_55;
end
if((_dataflow__delay_ready_178 || !_dataflow__delay_valid_178) && _dataflow_cond_ready_54 && _dataflow_cond_valid_54) begin
_dataflow__delay_data_178 <= _dataflow_cond_data_54;
end
if(_dataflow__delay_valid_178 && _dataflow__delay_ready_178) begin
_dataflow__delay_valid_178 <= 0;
end
if((_dataflow__delay_ready_178 || !_dataflow__delay_valid_178) && _dataflow_cond_ready_54) begin
_dataflow__delay_valid_178 <= _dataflow_cond_valid_54;
end
if((_dataflow__delay_ready_179 || !_dataflow__delay_valid_179) && _dataflow_cond_ready_64 && _dataflow_cond_valid_64) begin
_dataflow__delay_data_179 <= _dataflow_cond_data_64;
end
if(_dataflow__delay_valid_179 && _dataflow__delay_ready_179) begin
_dataflow__delay_valid_179 <= 0;
end
if((_dataflow__delay_ready_179 || !_dataflow__delay_valid_179) && _dataflow_cond_ready_64) begin
_dataflow__delay_valid_179 <= _dataflow_cond_valid_64;
end
if((_dataflow__delay_ready_184 || !_dataflow__delay_valid_184) && _dataflow_cond_ready_63 && _dataflow_cond_valid_63) begin
_dataflow__delay_data_184 <= _dataflow_cond_data_63;
end
if(_dataflow__delay_valid_184 && _dataflow__delay_ready_184) begin
_dataflow__delay_valid_184 <= 0;
end
if((_dataflow__delay_ready_184 || !_dataflow__delay_valid_184) && _dataflow_cond_ready_63) begin
_dataflow__delay_valid_184 <= _dataflow_cond_valid_63;
end
if((_dataflow__delay_ready_202 || !_dataflow__delay_valid_202) && _dataflow_cond_ready_28 && _dataflow_cond_valid_28) begin
_dataflow__delay_data_202 <= _dataflow_cond_data_28;
end
if(_dataflow__delay_valid_202 && _dataflow__delay_ready_202) begin
_dataflow__delay_valid_202 <= 0;
end
if((_dataflow__delay_ready_202 || !_dataflow__delay_valid_202) && _dataflow_cond_ready_28) begin
_dataflow__delay_valid_202 <= _dataflow_cond_valid_28;
end
if((_dataflow_cond_ready_45 || !_dataflow_cond_valid_45) && (_dataflow_lessthan_ready_44 && _dataflow__delay_ready_161 && _dataflow__delay_ready_160) && (_dataflow_lessthan_valid_44 && _dataflow__delay_valid_161 && _dataflow__delay_valid_160)) begin
_dataflow_cond_data_45 <= (_dataflow_lessthan_data_44)? _dataflow__delay_data_161 : _dataflow__delay_data_160;
end
if(_dataflow_cond_valid_45 && _dataflow_cond_ready_45) begin
_dataflow_cond_valid_45 <= 0;
end
if((_dataflow_cond_ready_45 || !_dataflow_cond_valid_45) && (_dataflow_lessthan_ready_44 && _dataflow__delay_ready_161 && _dataflow__delay_ready_160)) begin
_dataflow_cond_valid_45 <= _dataflow_lessthan_valid_44 && _dataflow__delay_valid_161 && _dataflow__delay_valid_160;
end
if((_dataflow_cond_ready_46 || !_dataflow_cond_valid_46) && (_dataflow_lessthan_ready_44 && _dataflow__delay_ready_160 && _dataflow__delay_ready_161) && (_dataflow_lessthan_valid_44 && _dataflow__delay_valid_160 && _dataflow__delay_valid_161)) begin
_dataflow_cond_data_46 <= (_dataflow_lessthan_data_44)? _dataflow__delay_data_160 : _dataflow__delay_data_161;
end
if(_dataflow_cond_valid_46 && _dataflow_cond_ready_46) begin
_dataflow_cond_valid_46 <= 0;
end
if((_dataflow_cond_ready_46 || !_dataflow_cond_valid_46) && (_dataflow_lessthan_ready_44 && _dataflow__delay_ready_160 && _dataflow__delay_ready_161)) begin
_dataflow_cond_valid_46 <= _dataflow_lessthan_valid_44 && _dataflow__delay_valid_160 && _dataflow__delay_valid_161;
end
if((_dataflow_cond_ready_57 || !_dataflow_cond_valid_57) && (_dataflow_lessthan_ready_56 && _dataflow__delay_ready_171 && _dataflow__delay_ready_170) && (_dataflow_lessthan_valid_56 && _dataflow__delay_valid_171 && _dataflow__delay_valid_170)) begin
_dataflow_cond_data_57 <= (_dataflow_lessthan_data_56)? _dataflow__delay_data_171 : _dataflow__delay_data_170;
end
if(_dataflow_cond_valid_57 && _dataflow_cond_ready_57) begin
_dataflow_cond_valid_57 <= 0;
end
if((_dataflow_cond_ready_57 || !_dataflow_cond_valid_57) && (_dataflow_lessthan_ready_56 && _dataflow__delay_ready_171 && _dataflow__delay_ready_170)) begin
_dataflow_cond_valid_57 <= _dataflow_lessthan_valid_56 && _dataflow__delay_valid_171 && _dataflow__delay_valid_170;
end
if((_dataflow_cond_ready_58 || !_dataflow_cond_valid_58) && (_dataflow_lessthan_ready_56 && _dataflow__delay_ready_170 && _dataflow__delay_ready_171) && (_dataflow_lessthan_valid_56 && _dataflow__delay_valid_170 && _dataflow__delay_valid_171)) begin
_dataflow_cond_data_58 <= (_dataflow_lessthan_data_56)? _dataflow__delay_data_170 : _dataflow__delay_data_171;
end
if(_dataflow_cond_valid_58 && _dataflow_cond_ready_58) begin
_dataflow_cond_valid_58 <= 0;
end
if((_dataflow_cond_ready_58 || !_dataflow_cond_valid_58) && (_dataflow_lessthan_ready_56 && _dataflow__delay_ready_170 && _dataflow__delay_ready_171)) begin
_dataflow_cond_valid_58 <= _dataflow_lessthan_valid_56 && _dataflow__delay_valid_170 && _dataflow__delay_valid_171;
end
if((_dataflow_cond_ready_66 || !_dataflow_cond_valid_66) && (_dataflow_lessthan_ready_65 && _dataflow__delay_ready_179 && _dataflow__delay_ready_178) && (_dataflow_lessthan_valid_65 && _dataflow__delay_valid_179 && _dataflow__delay_valid_178)) begin
_dataflow_cond_data_66 <= (_dataflow_lessthan_data_65)? _dataflow__delay_data_179 : _dataflow__delay_data_178;
end
if(_dataflow_cond_valid_66 && _dataflow_cond_ready_66) begin
_dataflow_cond_valid_66 <= 0;
end
if((_dataflow_cond_ready_66 || !_dataflow_cond_valid_66) && (_dataflow_lessthan_ready_65 && _dataflow__delay_ready_179 && _dataflow__delay_ready_178)) begin
_dataflow_cond_valid_66 <= _dataflow_lessthan_valid_65 && _dataflow__delay_valid_179 && _dataflow__delay_valid_178;
end
if((_dataflow_cond_ready_67 || !_dataflow_cond_valid_67) && (_dataflow_lessthan_ready_65 && _dataflow__delay_ready_178 && _dataflow__delay_ready_179) && (_dataflow_lessthan_valid_65 && _dataflow__delay_valid_178 && _dataflow__delay_valid_179)) begin
_dataflow_cond_data_67 <= (_dataflow_lessthan_data_65)? _dataflow__delay_data_178 : _dataflow__delay_data_179;
end
if(_dataflow_cond_valid_67 && _dataflow_cond_ready_67) begin
_dataflow_cond_valid_67 <= 0;
end
if((_dataflow_cond_ready_67 || !_dataflow_cond_valid_67) && (_dataflow_lessthan_ready_65 && _dataflow__delay_ready_178 && _dataflow__delay_ready_179)) begin
_dataflow_cond_valid_67 <= _dataflow_lessthan_valid_65 && _dataflow__delay_valid_178 && _dataflow__delay_valid_179;
end
if((_dataflow__delay_ready_185 || !_dataflow__delay_valid_185) && _dataflow__delay_ready_184 && _dataflow__delay_valid_184) begin
_dataflow__delay_data_185 <= _dataflow__delay_data_184;
end
if(_dataflow__delay_valid_185 && _dataflow__delay_ready_185) begin
_dataflow__delay_valid_185 <= 0;
end
if((_dataflow__delay_ready_185 || !_dataflow__delay_valid_185) && _dataflow__delay_ready_184) begin
_dataflow__delay_valid_185 <= _dataflow__delay_valid_184;
end
if((_dataflow__delay_ready_203 || !_dataflow__delay_valid_203) && _dataflow__delay_ready_202 && _dataflow__delay_valid_202) begin
_dataflow__delay_data_203 <= _dataflow__delay_data_202;
end
if(_dataflow__delay_valid_203 && _dataflow__delay_ready_203) begin
_dataflow__delay_valid_203 <= 0;
end
if((_dataflow__delay_ready_203 || !_dataflow__delay_valid_203) && _dataflow__delay_ready_202) begin
_dataflow__delay_valid_203 <= _dataflow__delay_valid_202;
end
if((_dataflow_lessthan_ready_59 || !_dataflow_lessthan_valid_59) && (_dataflow_cond_ready_58 && _dataflow_cond_ready_45) && (_dataflow_cond_valid_58 && _dataflow_cond_valid_45)) begin
_dataflow_lessthan_data_59 <= _dataflow_cond_data_58 < _dataflow_cond_data_45;
end
if(_dataflow_lessthan_valid_59 && _dataflow_lessthan_ready_59) begin
_dataflow_lessthan_valid_59 <= 0;
end
if((_dataflow_lessthan_ready_59 || !_dataflow_lessthan_valid_59) && (_dataflow_cond_ready_58 && _dataflow_cond_ready_45)) begin
_dataflow_lessthan_valid_59 <= _dataflow_cond_valid_58 && _dataflow_cond_valid_45;
end
if((_dataflow_lessthan_ready_68 || !_dataflow_lessthan_valid_68) && (_dataflow_cond_ready_67 && _dataflow_cond_ready_57) && (_dataflow_cond_valid_67 && _dataflow_cond_valid_57)) begin
_dataflow_lessthan_data_68 <= _dataflow_cond_data_67 < _dataflow_cond_data_57;
end
if(_dataflow_lessthan_valid_68 && _dataflow_lessthan_ready_68) begin
_dataflow_lessthan_valid_68 <= 0;
end
if((_dataflow_lessthan_ready_68 || !_dataflow_lessthan_valid_68) && (_dataflow_cond_ready_67 && _dataflow_cond_ready_57)) begin
_dataflow_lessthan_valid_68 <= _dataflow_cond_valid_67 && _dataflow_cond_valid_57;
end
if((_dataflow_lessthan_ready_74 || !_dataflow_lessthan_valid_74) && (_dataflow__delay_ready_185 && _dataflow_cond_ready_66) && (_dataflow__delay_valid_185 && _dataflow_cond_valid_66)) begin
_dataflow_lessthan_data_74 <= _dataflow__delay_data_185 < _dataflow_cond_data_66;
end
if(_dataflow_lessthan_valid_74 && _dataflow_lessthan_ready_74) begin
_dataflow_lessthan_valid_74 <= 0;
end
if((_dataflow_lessthan_ready_74 || !_dataflow_lessthan_valid_74) && (_dataflow__delay_ready_185 && _dataflow_cond_ready_66)) begin
_dataflow_lessthan_valid_74 <= _dataflow__delay_valid_185 && _dataflow_cond_valid_66;
end
if((_dataflow__delay_ready_172 || !_dataflow__delay_valid_172) && _dataflow_cond_ready_45 && _dataflow_cond_valid_45) begin
_dataflow__delay_data_172 <= _dataflow_cond_data_45;
end
if(_dataflow__delay_valid_172 && _dataflow__delay_ready_172) begin
_dataflow__delay_valid_172 <= 0;
end
if((_dataflow__delay_ready_172 || !_dataflow__delay_valid_172) && _dataflow_cond_ready_45) begin
_dataflow__delay_valid_172 <= _dataflow_cond_valid_45;
end
if((_dataflow__delay_ready_173 || !_dataflow__delay_valid_173) && _dataflow_cond_ready_58 && _dataflow_cond_valid_58) begin
_dataflow__delay_data_173 <= _dataflow_cond_data_58;
end
if(_dataflow__delay_valid_173 && _dataflow__delay_ready_173) begin
_dataflow__delay_valid_173 <= 0;
end
if((_dataflow__delay_ready_173 || !_dataflow__delay_valid_173) && _dataflow_cond_ready_58) begin
_dataflow__delay_valid_173 <= _dataflow_cond_valid_58;
end
if((_dataflow__delay_ready_180 || !_dataflow__delay_valid_180) && _dataflow_cond_ready_57 && _dataflow_cond_valid_57) begin
_dataflow__delay_data_180 <= _dataflow_cond_data_57;
end
if(_dataflow__delay_valid_180 && _dataflow__delay_ready_180) begin
_dataflow__delay_valid_180 <= 0;
end
if((_dataflow__delay_ready_180 || !_dataflow__delay_valid_180) && _dataflow_cond_ready_57) begin
_dataflow__delay_valid_180 <= _dataflow_cond_valid_57;
end
if((_dataflow__delay_ready_181 || !_dataflow__delay_valid_181) && _dataflow_cond_ready_67 && _dataflow_cond_valid_67) begin
_dataflow__delay_data_181 <= _dataflow_cond_data_67;
end
if(_dataflow__delay_valid_181 && _dataflow__delay_ready_181) begin
_dataflow__delay_valid_181 <= 0;
end
if((_dataflow__delay_ready_181 || !_dataflow__delay_valid_181) && _dataflow_cond_ready_67) begin
_dataflow__delay_valid_181 <= _dataflow_cond_valid_67;
end
if((_dataflow__delay_ready_186 || !_dataflow__delay_valid_186) && _dataflow_cond_ready_66 && _dataflow_cond_valid_66) begin
_dataflow__delay_data_186 <= _dataflow_cond_data_66;
end
if(_dataflow__delay_valid_186 && _dataflow__delay_ready_186) begin
_dataflow__delay_valid_186 <= 0;
end
if((_dataflow__delay_ready_186 || !_dataflow__delay_valid_186) && _dataflow_cond_ready_66) begin
_dataflow__delay_valid_186 <= _dataflow_cond_valid_66;
end
if((_dataflow__delay_ready_187 || !_dataflow__delay_valid_187) && _dataflow__delay_ready_185 && _dataflow__delay_valid_185) begin
_dataflow__delay_data_187 <= _dataflow__delay_data_185;
end
if(_dataflow__delay_valid_187 && _dataflow__delay_ready_187) begin
_dataflow__delay_valid_187 <= 0;
end
if((_dataflow__delay_ready_187 || !_dataflow__delay_valid_187) && _dataflow__delay_ready_185) begin
_dataflow__delay_valid_187 <= _dataflow__delay_valid_185;
end
if((_dataflow__delay_ready_204 || !_dataflow__delay_valid_204) && _dataflow__delay_ready_203 && _dataflow__delay_valid_203) begin
_dataflow__delay_data_204 <= _dataflow__delay_data_203;
end
if(_dataflow__delay_valid_204 && _dataflow__delay_ready_204) begin
_dataflow__delay_valid_204 <= 0;
end
if((_dataflow__delay_ready_204 || !_dataflow__delay_valid_204) && _dataflow__delay_ready_203) begin
_dataflow__delay_valid_204 <= _dataflow__delay_valid_203;
end
if((_dataflow__delay_ready_214 || !_dataflow__delay_valid_214) && _dataflow_cond_ready_46 && _dataflow_cond_valid_46) begin
_dataflow__delay_data_214 <= _dataflow_cond_data_46;
end
if(_dataflow__delay_valid_214 && _dataflow__delay_ready_214) begin
_dataflow__delay_valid_214 <= 0;
end
if((_dataflow__delay_ready_214 || !_dataflow__delay_valid_214) && _dataflow_cond_ready_46) begin
_dataflow__delay_valid_214 <= _dataflow_cond_valid_46;
end
if((_dataflow_cond_ready_60 || !_dataflow_cond_valid_60) && (_dataflow_lessthan_ready_59 && _dataflow__delay_ready_173 && _dataflow__delay_ready_172) && (_dataflow_lessthan_valid_59 && _dataflow__delay_valid_173 && _dataflow__delay_valid_172)) begin
_dataflow_cond_data_60 <= (_dataflow_lessthan_data_59)? _dataflow__delay_data_173 : _dataflow__delay_data_172;
end
if(_dataflow_cond_valid_60 && _dataflow_cond_ready_60) begin
_dataflow_cond_valid_60 <= 0;
end
if((_dataflow_cond_ready_60 || !_dataflow_cond_valid_60) && (_dataflow_lessthan_ready_59 && _dataflow__delay_ready_173 && _dataflow__delay_ready_172)) begin
_dataflow_cond_valid_60 <= _dataflow_lessthan_valid_59 && _dataflow__delay_valid_173 && _dataflow__delay_valid_172;
end
if((_dataflow_cond_ready_61 || !_dataflow_cond_valid_61) && (_dataflow_lessthan_ready_59 && _dataflow__delay_ready_172 && _dataflow__delay_ready_173) && (_dataflow_lessthan_valid_59 && _dataflow__delay_valid_172 && _dataflow__delay_valid_173)) begin
_dataflow_cond_data_61 <= (_dataflow_lessthan_data_59)? _dataflow__delay_data_172 : _dataflow__delay_data_173;
end
if(_dataflow_cond_valid_61 && _dataflow_cond_ready_61) begin
_dataflow_cond_valid_61 <= 0;
end
if((_dataflow_cond_ready_61 || !_dataflow_cond_valid_61) && (_dataflow_lessthan_ready_59 && _dataflow__delay_ready_172 && _dataflow__delay_ready_173)) begin
_dataflow_cond_valid_61 <= _dataflow_lessthan_valid_59 && _dataflow__delay_valid_172 && _dataflow__delay_valid_173;
end
if((_dataflow_cond_ready_69 || !_dataflow_cond_valid_69) && (_dataflow_lessthan_ready_68 && _dataflow__delay_ready_181 && _dataflow__delay_ready_180) && (_dataflow_lessthan_valid_68 && _dataflow__delay_valid_181 && _dataflow__delay_valid_180)) begin
_dataflow_cond_data_69 <= (_dataflow_lessthan_data_68)? _dataflow__delay_data_181 : _dataflow__delay_data_180;
end
if(_dataflow_cond_valid_69 && _dataflow_cond_ready_69) begin
_dataflow_cond_valid_69 <= 0;
end
if((_dataflow_cond_ready_69 || !_dataflow_cond_valid_69) && (_dataflow_lessthan_ready_68 && _dataflow__delay_ready_181 && _dataflow__delay_ready_180)) begin
_dataflow_cond_valid_69 <= _dataflow_lessthan_valid_68 && _dataflow__delay_valid_181 && _dataflow__delay_valid_180;
end
if((_dataflow_cond_ready_70 || !_dataflow_cond_valid_70) && (_dataflow_lessthan_ready_68 && _dataflow__delay_ready_180 && _dataflow__delay_ready_181) && (_dataflow_lessthan_valid_68 && _dataflow__delay_valid_180 && _dataflow__delay_valid_181)) begin
_dataflow_cond_data_70 <= (_dataflow_lessthan_data_68)? _dataflow__delay_data_180 : _dataflow__delay_data_181;
end
if(_dataflow_cond_valid_70 && _dataflow_cond_ready_70) begin
_dataflow_cond_valid_70 <= 0;
end
if((_dataflow_cond_ready_70 || !_dataflow_cond_valid_70) && (_dataflow_lessthan_ready_68 && _dataflow__delay_ready_180 && _dataflow__delay_ready_181)) begin
_dataflow_cond_valid_70 <= _dataflow_lessthan_valid_68 && _dataflow__delay_valid_180 && _dataflow__delay_valid_181;
end
if((_dataflow_cond_ready_75 || !_dataflow_cond_valid_75) && (_dataflow_lessthan_ready_74 && _dataflow__delay_ready_187 && _dataflow__delay_ready_186) && (_dataflow_lessthan_valid_74 && _dataflow__delay_valid_187 && _dataflow__delay_valid_186)) begin
_dataflow_cond_data_75 <= (_dataflow_lessthan_data_74)? _dataflow__delay_data_187 : _dataflow__delay_data_186;
end
if(_dataflow_cond_valid_75 && _dataflow_cond_ready_75) begin
_dataflow_cond_valid_75 <= 0;
end
if((_dataflow_cond_ready_75 || !_dataflow_cond_valid_75) && (_dataflow_lessthan_ready_74 && _dataflow__delay_ready_187 && _dataflow__delay_ready_186)) begin
_dataflow_cond_valid_75 <= _dataflow_lessthan_valid_74 && _dataflow__delay_valid_187 && _dataflow__delay_valid_186;
end
if((_dataflow_cond_ready_76 || !_dataflow_cond_valid_76) && (_dataflow_lessthan_ready_74 && _dataflow__delay_ready_186 && _dataflow__delay_ready_187) && (_dataflow_lessthan_valid_74 && _dataflow__delay_valid_186 && _dataflow__delay_valid_187)) begin
_dataflow_cond_data_76 <= (_dataflow_lessthan_data_74)? _dataflow__delay_data_186 : _dataflow__delay_data_187;
end
if(_dataflow_cond_valid_76 && _dataflow_cond_ready_76) begin
_dataflow_cond_valid_76 <= 0;
end
if((_dataflow_cond_ready_76 || !_dataflow_cond_valid_76) && (_dataflow_lessthan_ready_74 && _dataflow__delay_ready_186 && _dataflow__delay_ready_187)) begin
_dataflow_cond_valid_76 <= _dataflow_lessthan_valid_74 && _dataflow__delay_valid_186 && _dataflow__delay_valid_187;
end
if((_dataflow__delay_ready_205 || !_dataflow__delay_valid_205) && _dataflow__delay_ready_204 && _dataflow__delay_valid_204) begin
_dataflow__delay_data_205 <= _dataflow__delay_data_204;
end
if(_dataflow__delay_valid_205 && _dataflow__delay_ready_205) begin
_dataflow__delay_valid_205 <= 0;
end
if((_dataflow__delay_ready_205 || !_dataflow__delay_valid_205) && _dataflow__delay_ready_204) begin
_dataflow__delay_valid_205 <= _dataflow__delay_valid_204;
end
if((_dataflow__delay_ready_215 || !_dataflow__delay_valid_215) && _dataflow__delay_ready_214 && _dataflow__delay_valid_214) begin
_dataflow__delay_data_215 <= _dataflow__delay_data_214;
end
if(_dataflow__delay_valid_215 && _dataflow__delay_ready_215) begin
_dataflow__delay_valid_215 <= 0;
end
if((_dataflow__delay_ready_215 || !_dataflow__delay_valid_215) && _dataflow__delay_ready_214) begin
_dataflow__delay_valid_215 <= _dataflow__delay_valid_214;
end
if((_dataflow_lessthan_ready_71 || !_dataflow_lessthan_valid_71) && (_dataflow_cond_ready_70 && _dataflow_cond_ready_60) && (_dataflow_cond_valid_70 && _dataflow_cond_valid_60)) begin
_dataflow_lessthan_data_71 <= _dataflow_cond_data_70 < _dataflow_cond_data_60;
end
if(_dataflow_lessthan_valid_71 && _dataflow_lessthan_ready_71) begin
_dataflow_lessthan_valid_71 <= 0;
end
if((_dataflow_lessthan_ready_71 || !_dataflow_lessthan_valid_71) && (_dataflow_cond_ready_70 && _dataflow_cond_ready_60)) begin
_dataflow_lessthan_valid_71 <= _dataflow_cond_valid_70 && _dataflow_cond_valid_60;
end
if((_dataflow_lessthan_ready_77 || !_dataflow_lessthan_valid_77) && (_dataflow_cond_ready_76 && _dataflow_cond_ready_69) && (_dataflow_cond_valid_76 && _dataflow_cond_valid_69)) begin
_dataflow_lessthan_data_77 <= _dataflow_cond_data_76 < _dataflow_cond_data_69;
end
if(_dataflow_lessthan_valid_77 && _dataflow_lessthan_ready_77) begin
_dataflow_lessthan_valid_77 <= 0;
end
if((_dataflow_lessthan_ready_77 || !_dataflow_lessthan_valid_77) && (_dataflow_cond_ready_76 && _dataflow_cond_ready_69)) begin
_dataflow_lessthan_valid_77 <= _dataflow_cond_valid_76 && _dataflow_cond_valid_69;
end
if((_dataflow__delay_ready_182 || !_dataflow__delay_valid_182) && _dataflow_cond_ready_60 && _dataflow_cond_valid_60) begin
_dataflow__delay_data_182 <= _dataflow_cond_data_60;
end
if(_dataflow__delay_valid_182 && _dataflow__delay_ready_182) begin
_dataflow__delay_valid_182 <= 0;
end
if((_dataflow__delay_ready_182 || !_dataflow__delay_valid_182) && _dataflow_cond_ready_60) begin
_dataflow__delay_valid_182 <= _dataflow_cond_valid_60;
end
if((_dataflow__delay_ready_183 || !_dataflow__delay_valid_183) && _dataflow_cond_ready_70 && _dataflow_cond_valid_70) begin
_dataflow__delay_data_183 <= _dataflow_cond_data_70;
end
if(_dataflow__delay_valid_183 && _dataflow__delay_ready_183) begin
_dataflow__delay_valid_183 <= 0;
end
if((_dataflow__delay_ready_183 || !_dataflow__delay_valid_183) && _dataflow_cond_ready_70) begin
_dataflow__delay_valid_183 <= _dataflow_cond_valid_70;
end
if((_dataflow__delay_ready_188 || !_dataflow__delay_valid_188) && _dataflow_cond_ready_69 && _dataflow_cond_valid_69) begin
_dataflow__delay_data_188 <= _dataflow_cond_data_69;
end
if(_dataflow__delay_valid_188 && _dataflow__delay_ready_188) begin
_dataflow__delay_valid_188 <= 0;
end
if((_dataflow__delay_ready_188 || !_dataflow__delay_valid_188) && _dataflow_cond_ready_69) begin
_dataflow__delay_valid_188 <= _dataflow_cond_valid_69;
end
if((_dataflow__delay_ready_189 || !_dataflow__delay_valid_189) && _dataflow_cond_ready_76 && _dataflow_cond_valid_76) begin
_dataflow__delay_data_189 <= _dataflow_cond_data_76;
end
if(_dataflow__delay_valid_189 && _dataflow__delay_ready_189) begin
_dataflow__delay_valid_189 <= 0;
end
if((_dataflow__delay_ready_189 || !_dataflow__delay_valid_189) && _dataflow_cond_ready_76) begin
_dataflow__delay_valid_189 <= _dataflow_cond_valid_76;
end
if((_dataflow__delay_ready_192 || !_dataflow__delay_valid_192) && _dataflow_cond_ready_75 && _dataflow_cond_valid_75) begin
_dataflow__delay_data_192 <= _dataflow_cond_data_75;
end
if(_dataflow__delay_valid_192 && _dataflow__delay_ready_192) begin
_dataflow__delay_valid_192 <= 0;
end
if((_dataflow__delay_ready_192 || !_dataflow__delay_valid_192) && _dataflow_cond_ready_75) begin
_dataflow__delay_valid_192 <= _dataflow_cond_valid_75;
end
if((_dataflow__delay_ready_206 || !_dataflow__delay_valid_206) && _dataflow__delay_ready_205 && _dataflow__delay_valid_205) begin
_dataflow__delay_data_206 <= _dataflow__delay_data_205;
end
if(_dataflow__delay_valid_206 && _dataflow__delay_ready_206) begin
_dataflow__delay_valid_206 <= 0;
end
if((_dataflow__delay_ready_206 || !_dataflow__delay_valid_206) && _dataflow__delay_ready_205) begin
_dataflow__delay_valid_206 <= _dataflow__delay_valid_205;
end
if((_dataflow__delay_ready_216 || !_dataflow__delay_valid_216) && _dataflow__delay_ready_215 && _dataflow__delay_valid_215) begin
_dataflow__delay_data_216 <= _dataflow__delay_data_215;
end
if(_dataflow__delay_valid_216 && _dataflow__delay_ready_216) begin
_dataflow__delay_valid_216 <= 0;
end
if((_dataflow__delay_ready_216 || !_dataflow__delay_valid_216) && _dataflow__delay_ready_215) begin
_dataflow__delay_valid_216 <= _dataflow__delay_valid_215;
end
if((_dataflow__delay_ready_224 || !_dataflow__delay_valid_224) && _dataflow_cond_ready_61 && _dataflow_cond_valid_61) begin
_dataflow__delay_data_224 <= _dataflow_cond_data_61;
end
if(_dataflow__delay_valid_224 && _dataflow__delay_ready_224) begin
_dataflow__delay_valid_224 <= 0;
end
if((_dataflow__delay_ready_224 || !_dataflow__delay_valid_224) && _dataflow_cond_ready_61) begin
_dataflow__delay_valid_224 <= _dataflow_cond_valid_61;
end
if((_dataflow_cond_ready_72 || !_dataflow_cond_valid_72) && (_dataflow_lessthan_ready_71 && _dataflow__delay_ready_183 && _dataflow__delay_ready_182) && (_dataflow_lessthan_valid_71 && _dataflow__delay_valid_183 && _dataflow__delay_valid_182)) begin
_dataflow_cond_data_72 <= (_dataflow_lessthan_data_71)? _dataflow__delay_data_183 : _dataflow__delay_data_182;
end
if(_dataflow_cond_valid_72 && _dataflow_cond_ready_72) begin
_dataflow_cond_valid_72 <= 0;
end
if((_dataflow_cond_ready_72 || !_dataflow_cond_valid_72) && (_dataflow_lessthan_ready_71 && _dataflow__delay_ready_183 && _dataflow__delay_ready_182)) begin
_dataflow_cond_valid_72 <= _dataflow_lessthan_valid_71 && _dataflow__delay_valid_183 && _dataflow__delay_valid_182;
end
if((_dataflow_cond_ready_73 || !_dataflow_cond_valid_73) && (_dataflow_lessthan_ready_71 && _dataflow__delay_ready_182 && _dataflow__delay_ready_183) && (_dataflow_lessthan_valid_71 && _dataflow__delay_valid_182 && _dataflow__delay_valid_183)) begin
_dataflow_cond_data_73 <= (_dataflow_lessthan_data_71)? _dataflow__delay_data_182 : _dataflow__delay_data_183;
end
if(_dataflow_cond_valid_73 && _dataflow_cond_ready_73) begin
_dataflow_cond_valid_73 <= 0;
end
if((_dataflow_cond_ready_73 || !_dataflow_cond_valid_73) && (_dataflow_lessthan_ready_71 && _dataflow__delay_ready_182 && _dataflow__delay_ready_183)) begin
_dataflow_cond_valid_73 <= _dataflow_lessthan_valid_71 && _dataflow__delay_valid_182 && _dataflow__delay_valid_183;
end
if((_dataflow_cond_ready_78 || !_dataflow_cond_valid_78) && (_dataflow_lessthan_ready_77 && _dataflow__delay_ready_189 && _dataflow__delay_ready_188) && (_dataflow_lessthan_valid_77 && _dataflow__delay_valid_189 && _dataflow__delay_valid_188)) begin
_dataflow_cond_data_78 <= (_dataflow_lessthan_data_77)? _dataflow__delay_data_189 : _dataflow__delay_data_188;
end
if(_dataflow_cond_valid_78 && _dataflow_cond_ready_78) begin
_dataflow_cond_valid_78 <= 0;
end
if((_dataflow_cond_ready_78 || !_dataflow_cond_valid_78) && (_dataflow_lessthan_ready_77 && _dataflow__delay_ready_189 && _dataflow__delay_ready_188)) begin
_dataflow_cond_valid_78 <= _dataflow_lessthan_valid_77 && _dataflow__delay_valid_189 && _dataflow__delay_valid_188;
end
if((_dataflow_cond_ready_79 || !_dataflow_cond_valid_79) && (_dataflow_lessthan_ready_77 && _dataflow__delay_ready_188 && _dataflow__delay_ready_189) && (_dataflow_lessthan_valid_77 && _dataflow__delay_valid_188 && _dataflow__delay_valid_189)) begin
_dataflow_cond_data_79 <= (_dataflow_lessthan_data_77)? _dataflow__delay_data_188 : _dataflow__delay_data_189;
end
if(_dataflow_cond_valid_79 && _dataflow_cond_ready_79) begin
_dataflow_cond_valid_79 <= 0;
end
if((_dataflow_cond_ready_79 || !_dataflow_cond_valid_79) && (_dataflow_lessthan_ready_77 && _dataflow__delay_ready_188 && _dataflow__delay_ready_189)) begin
_dataflow_cond_valid_79 <= _dataflow_lessthan_valid_77 && _dataflow__delay_valid_188 && _dataflow__delay_valid_189;
end
if((_dataflow__delay_ready_193 || !_dataflow__delay_valid_193) && _dataflow__delay_ready_192 && _dataflow__delay_valid_192) begin
_dataflow__delay_data_193 <= _dataflow__delay_data_192;
end
if(_dataflow__delay_valid_193 && _dataflow__delay_ready_193) begin
_dataflow__delay_valid_193 <= 0;
end
if((_dataflow__delay_ready_193 || !_dataflow__delay_valid_193) && _dataflow__delay_ready_192) begin
_dataflow__delay_valid_193 <= _dataflow__delay_valid_192;
end
if((_dataflow__delay_ready_207 || !_dataflow__delay_valid_207) && _dataflow__delay_ready_206 && _dataflow__delay_valid_206) begin
_dataflow__delay_data_207 <= _dataflow__delay_data_206;
end
if(_dataflow__delay_valid_207 && _dataflow__delay_ready_207) begin
_dataflow__delay_valid_207 <= 0;
end
if((_dataflow__delay_ready_207 || !_dataflow__delay_valid_207) && _dataflow__delay_ready_206) begin
_dataflow__delay_valid_207 <= _dataflow__delay_valid_206;
end
if((_dataflow__delay_ready_217 || !_dataflow__delay_valid_217) && _dataflow__delay_ready_216 && _dataflow__delay_valid_216) begin
_dataflow__delay_data_217 <= _dataflow__delay_data_216;
end
if(_dataflow__delay_valid_217 && _dataflow__delay_ready_217) begin
_dataflow__delay_valid_217 <= 0;
end
if((_dataflow__delay_ready_217 || !_dataflow__delay_valid_217) && _dataflow__delay_ready_216) begin
_dataflow__delay_valid_217 <= _dataflow__delay_valid_216;
end
if((_dataflow__delay_ready_225 || !_dataflow__delay_valid_225) && _dataflow__delay_ready_224 && _dataflow__delay_valid_224) begin
_dataflow__delay_data_225 <= _dataflow__delay_data_224;
end
if(_dataflow__delay_valid_225 && _dataflow__delay_ready_225) begin
_dataflow__delay_valid_225 <= 0;
end
if((_dataflow__delay_ready_225 || !_dataflow__delay_valid_225) && _dataflow__delay_ready_224) begin
_dataflow__delay_valid_225 <= _dataflow__delay_valid_224;
end
if((_dataflow_lessthan_ready_80 || !_dataflow_lessthan_valid_80) && (_dataflow_cond_ready_79 && _dataflow_cond_ready_72) && (_dataflow_cond_valid_79 && _dataflow_cond_valid_72)) begin
_dataflow_lessthan_data_80 <= _dataflow_cond_data_79 < _dataflow_cond_data_72;
end
if(_dataflow_lessthan_valid_80 && _dataflow_lessthan_ready_80) begin
_dataflow_lessthan_valid_80 <= 0;
end
if((_dataflow_lessthan_ready_80 || !_dataflow_lessthan_valid_80) && (_dataflow_cond_ready_79 && _dataflow_cond_ready_72)) begin
_dataflow_lessthan_valid_80 <= _dataflow_cond_valid_79 && _dataflow_cond_valid_72;
end
if((_dataflow_lessthan_ready_83 || !_dataflow_lessthan_valid_83) && (_dataflow__delay_ready_193 && _dataflow_cond_ready_78) && (_dataflow__delay_valid_193 && _dataflow_cond_valid_78)) begin
_dataflow_lessthan_data_83 <= _dataflow__delay_data_193 < _dataflow_cond_data_78;
end
if(_dataflow_lessthan_valid_83 && _dataflow_lessthan_ready_83) begin
_dataflow_lessthan_valid_83 <= 0;
end
if((_dataflow_lessthan_ready_83 || !_dataflow_lessthan_valid_83) && (_dataflow__delay_ready_193 && _dataflow_cond_ready_78)) begin
_dataflow_lessthan_valid_83 <= _dataflow__delay_valid_193 && _dataflow_cond_valid_78;
end
if((_dataflow__delay_ready_190 || !_dataflow__delay_valid_190) && _dataflow_cond_ready_72 && _dataflow_cond_valid_72) begin
_dataflow__delay_data_190 <= _dataflow_cond_data_72;
end
if(_dataflow__delay_valid_190 && _dataflow__delay_ready_190) begin
_dataflow__delay_valid_190 <= 0;
end
if((_dataflow__delay_ready_190 || !_dataflow__delay_valid_190) && _dataflow_cond_ready_72) begin
_dataflow__delay_valid_190 <= _dataflow_cond_valid_72;
end
if((_dataflow__delay_ready_191 || !_dataflow__delay_valid_191) && _dataflow_cond_ready_79 && _dataflow_cond_valid_79) begin
_dataflow__delay_data_191 <= _dataflow_cond_data_79;
end
if(_dataflow__delay_valid_191 && _dataflow__delay_ready_191) begin
_dataflow__delay_valid_191 <= 0;
end
if((_dataflow__delay_ready_191 || !_dataflow__delay_valid_191) && _dataflow_cond_ready_79) begin
_dataflow__delay_valid_191 <= _dataflow_cond_valid_79;
end
if((_dataflow__delay_ready_194 || !_dataflow__delay_valid_194) && _dataflow_cond_ready_78 && _dataflow_cond_valid_78) begin
_dataflow__delay_data_194 <= _dataflow_cond_data_78;
end
if(_dataflow__delay_valid_194 && _dataflow__delay_ready_194) begin
_dataflow__delay_valid_194 <= 0;
end
if((_dataflow__delay_ready_194 || !_dataflow__delay_valid_194) && _dataflow_cond_ready_78) begin
_dataflow__delay_valid_194 <= _dataflow_cond_valid_78;
end
if((_dataflow__delay_ready_195 || !_dataflow__delay_valid_195) && _dataflow__delay_ready_193 && _dataflow__delay_valid_193) begin
_dataflow__delay_data_195 <= _dataflow__delay_data_193;
end
if(_dataflow__delay_valid_195 && _dataflow__delay_ready_195) begin
_dataflow__delay_valid_195 <= 0;
end
if((_dataflow__delay_ready_195 || !_dataflow__delay_valid_195) && _dataflow__delay_ready_193) begin
_dataflow__delay_valid_195 <= _dataflow__delay_valid_193;
end
if((_dataflow__delay_ready_208 || !_dataflow__delay_valid_208) && _dataflow__delay_ready_207 && _dataflow__delay_valid_207) begin
_dataflow__delay_data_208 <= _dataflow__delay_data_207;
end
if(_dataflow__delay_valid_208 && _dataflow__delay_ready_208) begin
_dataflow__delay_valid_208 <= 0;
end
if((_dataflow__delay_ready_208 || !_dataflow__delay_valid_208) && _dataflow__delay_ready_207) begin
_dataflow__delay_valid_208 <= _dataflow__delay_valid_207;
end
if((_dataflow__delay_ready_218 || !_dataflow__delay_valid_218) && _dataflow__delay_ready_217 && _dataflow__delay_valid_217) begin
_dataflow__delay_data_218 <= _dataflow__delay_data_217;
end
if(_dataflow__delay_valid_218 && _dataflow__delay_ready_218) begin
_dataflow__delay_valid_218 <= 0;
end
if((_dataflow__delay_ready_218 || !_dataflow__delay_valid_218) && _dataflow__delay_ready_217) begin
_dataflow__delay_valid_218 <= _dataflow__delay_valid_217;
end
if((_dataflow__delay_ready_226 || !_dataflow__delay_valid_226) && _dataflow__delay_ready_225 && _dataflow__delay_valid_225) begin
_dataflow__delay_data_226 <= _dataflow__delay_data_225;
end
if(_dataflow__delay_valid_226 && _dataflow__delay_ready_226) begin
_dataflow__delay_valid_226 <= 0;
end
if((_dataflow__delay_ready_226 || !_dataflow__delay_valid_226) && _dataflow__delay_ready_225) begin
_dataflow__delay_valid_226 <= _dataflow__delay_valid_225;
end
if((_dataflow__delay_ready_232 || !_dataflow__delay_valid_232) && _dataflow_cond_ready_73 && _dataflow_cond_valid_73) begin
_dataflow__delay_data_232 <= _dataflow_cond_data_73;
end
if(_dataflow__delay_valid_232 && _dataflow__delay_ready_232) begin
_dataflow__delay_valid_232 <= 0;
end
if((_dataflow__delay_ready_232 || !_dataflow__delay_valid_232) && _dataflow_cond_ready_73) begin
_dataflow__delay_valid_232 <= _dataflow_cond_valid_73;
end
if((_dataflow_cond_ready_81 || !_dataflow_cond_valid_81) && (_dataflow_lessthan_ready_80 && _dataflow__delay_ready_191 && _dataflow__delay_ready_190) && (_dataflow_lessthan_valid_80 && _dataflow__delay_valid_191 && _dataflow__delay_valid_190)) begin
_dataflow_cond_data_81 <= (_dataflow_lessthan_data_80)? _dataflow__delay_data_191 : _dataflow__delay_data_190;
end
if(_dataflow_cond_valid_81 && _dataflow_cond_ready_81) begin
_dataflow_cond_valid_81 <= 0;
end
if((_dataflow_cond_ready_81 || !_dataflow_cond_valid_81) && (_dataflow_lessthan_ready_80 && _dataflow__delay_ready_191 && _dataflow__delay_ready_190)) begin
_dataflow_cond_valid_81 <= _dataflow_lessthan_valid_80 && _dataflow__delay_valid_191 && _dataflow__delay_valid_190;
end
if((_dataflow_cond_ready_82 || !_dataflow_cond_valid_82) && (_dataflow_lessthan_ready_80 && _dataflow__delay_ready_190 && _dataflow__delay_ready_191) && (_dataflow_lessthan_valid_80 && _dataflow__delay_valid_190 && _dataflow__delay_valid_191)) begin
_dataflow_cond_data_82 <= (_dataflow_lessthan_data_80)? _dataflow__delay_data_190 : _dataflow__delay_data_191;
end
if(_dataflow_cond_valid_82 && _dataflow_cond_ready_82) begin
_dataflow_cond_valid_82 <= 0;
end
if((_dataflow_cond_ready_82 || !_dataflow_cond_valid_82) && (_dataflow_lessthan_ready_80 && _dataflow__delay_ready_190 && _dataflow__delay_ready_191)) begin
_dataflow_cond_valid_82 <= _dataflow_lessthan_valid_80 && _dataflow__delay_valid_190 && _dataflow__delay_valid_191;
end
if((_dataflow_cond_ready_84 || !_dataflow_cond_valid_84) && (_dataflow_lessthan_ready_83 && _dataflow__delay_ready_195 && _dataflow__delay_ready_194) && (_dataflow_lessthan_valid_83 && _dataflow__delay_valid_195 && _dataflow__delay_valid_194)) begin
_dataflow_cond_data_84 <= (_dataflow_lessthan_data_83)? _dataflow__delay_data_195 : _dataflow__delay_data_194;
end
if(_dataflow_cond_valid_84 && _dataflow_cond_ready_84) begin
_dataflow_cond_valid_84 <= 0;
end
if((_dataflow_cond_ready_84 || !_dataflow_cond_valid_84) && (_dataflow_lessthan_ready_83 && _dataflow__delay_ready_195 && _dataflow__delay_ready_194)) begin
_dataflow_cond_valid_84 <= _dataflow_lessthan_valid_83 && _dataflow__delay_valid_195 && _dataflow__delay_valid_194;
end
if((_dataflow_cond_ready_85 || !_dataflow_cond_valid_85) && (_dataflow_lessthan_ready_83 && _dataflow__delay_ready_194 && _dataflow__delay_ready_195) && (_dataflow_lessthan_valid_83 && _dataflow__delay_valid_194 && _dataflow__delay_valid_195)) begin
_dataflow_cond_data_85 <= (_dataflow_lessthan_data_83)? _dataflow__delay_data_194 : _dataflow__delay_data_195;
end
if(_dataflow_cond_valid_85 && _dataflow_cond_ready_85) begin
_dataflow_cond_valid_85 <= 0;
end
if((_dataflow_cond_ready_85 || !_dataflow_cond_valid_85) && (_dataflow_lessthan_ready_83 && _dataflow__delay_ready_194 && _dataflow__delay_ready_195)) begin
_dataflow_cond_valid_85 <= _dataflow_lessthan_valid_83 && _dataflow__delay_valid_194 && _dataflow__delay_valid_195;
end
if((_dataflow__delay_ready_209 || !_dataflow__delay_valid_209) && _dataflow__delay_ready_208 && _dataflow__delay_valid_208) begin
_dataflow__delay_data_209 <= _dataflow__delay_data_208;
end
if(_dataflow__delay_valid_209 && _dataflow__delay_ready_209) begin
_dataflow__delay_valid_209 <= 0;
end
if((_dataflow__delay_ready_209 || !_dataflow__delay_valid_209) && _dataflow__delay_ready_208) begin
_dataflow__delay_valid_209 <= _dataflow__delay_valid_208;
end
if((_dataflow__delay_ready_219 || !_dataflow__delay_valid_219) && _dataflow__delay_ready_218 && _dataflow__delay_valid_218) begin
_dataflow__delay_data_219 <= _dataflow__delay_data_218;
end
if(_dataflow__delay_valid_219 && _dataflow__delay_ready_219) begin
_dataflow__delay_valid_219 <= 0;
end
if((_dataflow__delay_ready_219 || !_dataflow__delay_valid_219) && _dataflow__delay_ready_218) begin
_dataflow__delay_valid_219 <= _dataflow__delay_valid_218;
end
if((_dataflow__delay_ready_227 || !_dataflow__delay_valid_227) && _dataflow__delay_ready_226 && _dataflow__delay_valid_226) begin
_dataflow__delay_data_227 <= _dataflow__delay_data_226;
end
if(_dataflow__delay_valid_227 && _dataflow__delay_ready_227) begin
_dataflow__delay_valid_227 <= 0;
end
if((_dataflow__delay_ready_227 || !_dataflow__delay_valid_227) && _dataflow__delay_ready_226) begin
_dataflow__delay_valid_227 <= _dataflow__delay_valid_226;
end
if((_dataflow__delay_ready_233 || !_dataflow__delay_valid_233) && _dataflow__delay_ready_232 && _dataflow__delay_valid_232) begin
_dataflow__delay_data_233 <= _dataflow__delay_data_232;
end
if(_dataflow__delay_valid_233 && _dataflow__delay_ready_233) begin
_dataflow__delay_valid_233 <= 0;
end
if((_dataflow__delay_ready_233 || !_dataflow__delay_valid_233) && _dataflow__delay_ready_232) begin
_dataflow__delay_valid_233 <= _dataflow__delay_valid_232;
end
if((_dataflow_lessthan_ready_86 || !_dataflow_lessthan_valid_86) && (_dataflow_cond_ready_85 && _dataflow_cond_ready_81) && (_dataflow_cond_valid_85 && _dataflow_cond_valid_81)) begin
_dataflow_lessthan_data_86 <= _dataflow_cond_data_85 < _dataflow_cond_data_81;
end
if(_dataflow_lessthan_valid_86 && _dataflow_lessthan_ready_86) begin
_dataflow_lessthan_valid_86 <= 0;
end
if((_dataflow_lessthan_ready_86 || !_dataflow_lessthan_valid_86) && (_dataflow_cond_ready_85 && _dataflow_cond_ready_81)) begin
_dataflow_lessthan_valid_86 <= _dataflow_cond_valid_85 && _dataflow_cond_valid_81;
end
if((_dataflow__delay_ready_196 || !_dataflow__delay_valid_196) && _dataflow_cond_ready_81 && _dataflow_cond_valid_81) begin
_dataflow__delay_data_196 <= _dataflow_cond_data_81;
end
if(_dataflow__delay_valid_196 && _dataflow__delay_ready_196) begin
_dataflow__delay_valid_196 <= 0;
end
if((_dataflow__delay_ready_196 || !_dataflow__delay_valid_196) && _dataflow_cond_ready_81) begin
_dataflow__delay_valid_196 <= _dataflow_cond_valid_81;
end
if((_dataflow__delay_ready_197 || !_dataflow__delay_valid_197) && _dataflow_cond_ready_85 && _dataflow_cond_valid_85) begin
_dataflow__delay_data_197 <= _dataflow_cond_data_85;
end
if(_dataflow__delay_valid_197 && _dataflow__delay_ready_197) begin
_dataflow__delay_valid_197 <= 0;
end
if((_dataflow__delay_ready_197 || !_dataflow__delay_valid_197) && _dataflow_cond_ready_85) begin
_dataflow__delay_valid_197 <= _dataflow_cond_valid_85;
end
if((_dataflow__delay_ready_198 || !_dataflow__delay_valid_198) && _dataflow_cond_ready_84 && _dataflow_cond_valid_84) begin
_dataflow__delay_data_198 <= _dataflow_cond_data_84;
end
if(_dataflow__delay_valid_198 && _dataflow__delay_ready_198) begin
_dataflow__delay_valid_198 <= 0;
end
if((_dataflow__delay_ready_198 || !_dataflow__delay_valid_198) && _dataflow_cond_ready_84) begin
_dataflow__delay_valid_198 <= _dataflow_cond_valid_84;
end
if((_dataflow__delay_ready_210 || !_dataflow__delay_valid_210) && _dataflow__delay_ready_209 && _dataflow__delay_valid_209) begin
_dataflow__delay_data_210 <= _dataflow__delay_data_209;
end
if(_dataflow__delay_valid_210 && _dataflow__delay_ready_210) begin
_dataflow__delay_valid_210 <= 0;
end
if((_dataflow__delay_ready_210 || !_dataflow__delay_valid_210) && _dataflow__delay_ready_209) begin
_dataflow__delay_valid_210 <= _dataflow__delay_valid_209;
end
if((_dataflow__delay_ready_220 || !_dataflow__delay_valid_220) && _dataflow__delay_ready_219 && _dataflow__delay_valid_219) begin
_dataflow__delay_data_220 <= _dataflow__delay_data_219;
end
if(_dataflow__delay_valid_220 && _dataflow__delay_ready_220) begin
_dataflow__delay_valid_220 <= 0;
end
if((_dataflow__delay_ready_220 || !_dataflow__delay_valid_220) && _dataflow__delay_ready_219) begin
_dataflow__delay_valid_220 <= _dataflow__delay_valid_219;
end
if((_dataflow__delay_ready_228 || !_dataflow__delay_valid_228) && _dataflow__delay_ready_227 && _dataflow__delay_valid_227) begin
_dataflow__delay_data_228 <= _dataflow__delay_data_227;
end
if(_dataflow__delay_valid_228 && _dataflow__delay_ready_228) begin
_dataflow__delay_valid_228 <= 0;
end
if((_dataflow__delay_ready_228 || !_dataflow__delay_valid_228) && _dataflow__delay_ready_227) begin
_dataflow__delay_valid_228 <= _dataflow__delay_valid_227;
end
if((_dataflow__delay_ready_234 || !_dataflow__delay_valid_234) && _dataflow__delay_ready_233 && _dataflow__delay_valid_233) begin
_dataflow__delay_data_234 <= _dataflow__delay_data_233;
end
if(_dataflow__delay_valid_234 && _dataflow__delay_ready_234) begin
_dataflow__delay_valid_234 <= 0;
end
if((_dataflow__delay_ready_234 || !_dataflow__delay_valid_234) && _dataflow__delay_ready_233) begin
_dataflow__delay_valid_234 <= _dataflow__delay_valid_233;
end
if((_dataflow__delay_ready_238 || !_dataflow__delay_valid_238) && _dataflow_cond_ready_82 && _dataflow_cond_valid_82) begin
_dataflow__delay_data_238 <= _dataflow_cond_data_82;
end
if(_dataflow__delay_valid_238 && _dataflow__delay_ready_238) begin
_dataflow__delay_valid_238 <= 0;
end
if((_dataflow__delay_ready_238 || !_dataflow__delay_valid_238) && _dataflow_cond_ready_82) begin
_dataflow__delay_valid_238 <= _dataflow_cond_valid_82;
end
if((_dataflow_cond_ready_87 || !_dataflow_cond_valid_87) && (_dataflow_lessthan_ready_86 && _dataflow__delay_ready_197 && _dataflow__delay_ready_196) && (_dataflow_lessthan_valid_86 && _dataflow__delay_valid_197 && _dataflow__delay_valid_196)) begin
_dataflow_cond_data_87 <= (_dataflow_lessthan_data_86)? _dataflow__delay_data_197 : _dataflow__delay_data_196;
end
if(_dataflow_cond_valid_87 && _dataflow_cond_ready_87) begin
_dataflow_cond_valid_87 <= 0;
end
if((_dataflow_cond_ready_87 || !_dataflow_cond_valid_87) && (_dataflow_lessthan_ready_86 && _dataflow__delay_ready_197 && _dataflow__delay_ready_196)) begin
_dataflow_cond_valid_87 <= _dataflow_lessthan_valid_86 && _dataflow__delay_valid_197 && _dataflow__delay_valid_196;
end
if((_dataflow_cond_ready_88 || !_dataflow_cond_valid_88) && (_dataflow_lessthan_ready_86 && _dataflow__delay_ready_196 && _dataflow__delay_ready_197) && (_dataflow_lessthan_valid_86 && _dataflow__delay_valid_196 && _dataflow__delay_valid_197)) begin
_dataflow_cond_data_88 <= (_dataflow_lessthan_data_86)? _dataflow__delay_data_196 : _dataflow__delay_data_197;
end
if(_dataflow_cond_valid_88 && _dataflow_cond_ready_88) begin
_dataflow_cond_valid_88 <= 0;
end
if((_dataflow_cond_ready_88 || !_dataflow_cond_valid_88) && (_dataflow_lessthan_ready_86 && _dataflow__delay_ready_196 && _dataflow__delay_ready_197)) begin
_dataflow_cond_valid_88 <= _dataflow_lessthan_valid_86 && _dataflow__delay_valid_196 && _dataflow__delay_valid_197;
end
if((_dataflow__delay_ready_199 || !_dataflow__delay_valid_199) && _dataflow__delay_ready_198 && _dataflow__delay_valid_198) begin
_dataflow__delay_data_199 <= _dataflow__delay_data_198;
end
if(_dataflow__delay_valid_199 && _dataflow__delay_ready_199) begin
_dataflow__delay_valid_199 <= 0;
end
if((_dataflow__delay_ready_199 || !_dataflow__delay_valid_199) && _dataflow__delay_ready_198) begin
_dataflow__delay_valid_199 <= _dataflow__delay_valid_198;
end
if((_dataflow__delay_ready_211 || !_dataflow__delay_valid_211) && _dataflow__delay_ready_210 && _dataflow__delay_valid_210) begin
_dataflow__delay_data_211 <= _dataflow__delay_data_210;
end
if(_dataflow__delay_valid_211 && _dataflow__delay_ready_211) begin
_dataflow__delay_valid_211 <= 0;
end
if((_dataflow__delay_ready_211 || !_dataflow__delay_valid_211) && _dataflow__delay_ready_210) begin
_dataflow__delay_valid_211 <= _dataflow__delay_valid_210;
end
if((_dataflow__delay_ready_221 || !_dataflow__delay_valid_221) && _dataflow__delay_ready_220 && _dataflow__delay_valid_220) begin
_dataflow__delay_data_221 <= _dataflow__delay_data_220;
end
if(_dataflow__delay_valid_221 && _dataflow__delay_ready_221) begin
_dataflow__delay_valid_221 <= 0;
end
if((_dataflow__delay_ready_221 || !_dataflow__delay_valid_221) && _dataflow__delay_ready_220) begin
_dataflow__delay_valid_221 <= _dataflow__delay_valid_220;
end
if((_dataflow__delay_ready_229 || !_dataflow__delay_valid_229) && _dataflow__delay_ready_228 && _dataflow__delay_valid_228) begin
_dataflow__delay_data_229 <= _dataflow__delay_data_228;
end
if(_dataflow__delay_valid_229 && _dataflow__delay_ready_229) begin
_dataflow__delay_valid_229 <= 0;
end
if((_dataflow__delay_ready_229 || !_dataflow__delay_valid_229) && _dataflow__delay_ready_228) begin
_dataflow__delay_valid_229 <= _dataflow__delay_valid_228;
end
if((_dataflow__delay_ready_235 || !_dataflow__delay_valid_235) && _dataflow__delay_ready_234 && _dataflow__delay_valid_234) begin
_dataflow__delay_data_235 <= _dataflow__delay_data_234;
end
if(_dataflow__delay_valid_235 && _dataflow__delay_ready_235) begin
_dataflow__delay_valid_235 <= 0;
end
if((_dataflow__delay_ready_235 || !_dataflow__delay_valid_235) && _dataflow__delay_ready_234) begin
_dataflow__delay_valid_235 <= _dataflow__delay_valid_234;
end
if((_dataflow__delay_ready_239 || !_dataflow__delay_valid_239) && _dataflow__delay_ready_238 && _dataflow__delay_valid_238) begin
_dataflow__delay_data_239 <= _dataflow__delay_data_238;
end
if(_dataflow__delay_valid_239 && _dataflow__delay_ready_239) begin
_dataflow__delay_valid_239 <= 0;
end
if((_dataflow__delay_ready_239 || !_dataflow__delay_valid_239) && _dataflow__delay_ready_238) begin
_dataflow__delay_valid_239 <= _dataflow__delay_valid_238;
end
if((_dataflow_lessthan_ready_89 || !_dataflow_lessthan_valid_89) && (_dataflow__delay_ready_199 && _dataflow_cond_ready_87) && (_dataflow__delay_valid_199 && _dataflow_cond_valid_87)) begin
_dataflow_lessthan_data_89 <= _dataflow__delay_data_199 < _dataflow_cond_data_87;
end
if(_dataflow_lessthan_valid_89 && _dataflow_lessthan_ready_89) begin
_dataflow_lessthan_valid_89 <= 0;
end
if((_dataflow_lessthan_ready_89 || !_dataflow_lessthan_valid_89) && (_dataflow__delay_ready_199 && _dataflow_cond_ready_87)) begin
_dataflow_lessthan_valid_89 <= _dataflow__delay_valid_199 && _dataflow_cond_valid_87;
end
if((_dataflow__delay_ready_200 || !_dataflow__delay_valid_200) && _dataflow__delay_ready_199 && _dataflow__delay_valid_199) begin
_dataflow__delay_data_200 <= _dataflow__delay_data_199;
end
if(_dataflow__delay_valid_200 && _dataflow__delay_ready_200) begin
_dataflow__delay_valid_200 <= 0;
end
if((_dataflow__delay_ready_200 || !_dataflow__delay_valid_200) && _dataflow__delay_ready_199) begin
_dataflow__delay_valid_200 <= _dataflow__delay_valid_199;
end
if((_dataflow__delay_ready_201 || !_dataflow__delay_valid_201) && _dataflow_cond_ready_87 && _dataflow_cond_valid_87) begin
_dataflow__delay_data_201 <= _dataflow_cond_data_87;
end
if(_dataflow__delay_valid_201 && _dataflow__delay_ready_201) begin
_dataflow__delay_valid_201 <= 0;
end
if((_dataflow__delay_ready_201 || !_dataflow__delay_valid_201) && _dataflow_cond_ready_87) begin
_dataflow__delay_valid_201 <= _dataflow_cond_valid_87;
end
if((_dataflow__delay_ready_212 || !_dataflow__delay_valid_212) && _dataflow__delay_ready_211 && _dataflow__delay_valid_211) begin
_dataflow__delay_data_212 <= _dataflow__delay_data_211;
end
if(_dataflow__delay_valid_212 && _dataflow__delay_ready_212) begin
_dataflow__delay_valid_212 <= 0;
end
if((_dataflow__delay_ready_212 || !_dataflow__delay_valid_212) && _dataflow__delay_ready_211) begin
_dataflow__delay_valid_212 <= _dataflow__delay_valid_211;
end
if((_dataflow__delay_ready_222 || !_dataflow__delay_valid_222) && _dataflow__delay_ready_221 && _dataflow__delay_valid_221) begin
_dataflow__delay_data_222 <= _dataflow__delay_data_221;
end
if(_dataflow__delay_valid_222 && _dataflow__delay_ready_222) begin
_dataflow__delay_valid_222 <= 0;
end
if((_dataflow__delay_ready_222 || !_dataflow__delay_valid_222) && _dataflow__delay_ready_221) begin
_dataflow__delay_valid_222 <= _dataflow__delay_valid_221;
end
if((_dataflow__delay_ready_230 || !_dataflow__delay_valid_230) && _dataflow__delay_ready_229 && _dataflow__delay_valid_229) begin
_dataflow__delay_data_230 <= _dataflow__delay_data_229;
end
if(_dataflow__delay_valid_230 && _dataflow__delay_ready_230) begin
_dataflow__delay_valid_230 <= 0;
end
if((_dataflow__delay_ready_230 || !_dataflow__delay_valid_230) && _dataflow__delay_ready_229) begin
_dataflow__delay_valid_230 <= _dataflow__delay_valid_229;
end
if((_dataflow__delay_ready_236 || !_dataflow__delay_valid_236) && _dataflow__delay_ready_235 && _dataflow__delay_valid_235) begin
_dataflow__delay_data_236 <= _dataflow__delay_data_235;
end
if(_dataflow__delay_valid_236 && _dataflow__delay_ready_236) begin
_dataflow__delay_valid_236 <= 0;
end
if((_dataflow__delay_ready_236 || !_dataflow__delay_valid_236) && _dataflow__delay_ready_235) begin
_dataflow__delay_valid_236 <= _dataflow__delay_valid_235;
end
if((_dataflow__delay_ready_240 || !_dataflow__delay_valid_240) && _dataflow__delay_ready_239 && _dataflow__delay_valid_239) begin
_dataflow__delay_data_240 <= _dataflow__delay_data_239;
end
if(_dataflow__delay_valid_240 && _dataflow__delay_ready_240) begin
_dataflow__delay_valid_240 <= 0;
end
if((_dataflow__delay_ready_240 || !_dataflow__delay_valid_240) && _dataflow__delay_ready_239) begin
_dataflow__delay_valid_240 <= _dataflow__delay_valid_239;
end
if((_dataflow__delay_ready_242 || !_dataflow__delay_valid_242) && _dataflow_cond_ready_88 && _dataflow_cond_valid_88) begin
_dataflow__delay_data_242 <= _dataflow_cond_data_88;
end
if(_dataflow__delay_valid_242 && _dataflow__delay_ready_242) begin
_dataflow__delay_valid_242 <= 0;
end
if((_dataflow__delay_ready_242 || !_dataflow__delay_valid_242) && _dataflow_cond_ready_88) begin
_dataflow__delay_valid_242 <= _dataflow_cond_valid_88;
end
if((_dataflow_cond_ready_90 || !_dataflow_cond_valid_90) && (_dataflow_lessthan_ready_89 && _dataflow__delay_ready_200 && _dataflow__delay_ready_201) && (_dataflow_lessthan_valid_89 && _dataflow__delay_valid_200 && _dataflow__delay_valid_201)) begin
_dataflow_cond_data_90 <= (_dataflow_lessthan_data_89)? _dataflow__delay_data_200 : _dataflow__delay_data_201;
end
if(_dataflow_cond_valid_90 && _dataflow_cond_ready_90) begin
_dataflow_cond_valid_90 <= 0;
end
if((_dataflow_cond_ready_90 || !_dataflow_cond_valid_90) && (_dataflow_lessthan_ready_89 && _dataflow__delay_ready_200 && _dataflow__delay_ready_201)) begin
_dataflow_cond_valid_90 <= _dataflow_lessthan_valid_89 && _dataflow__delay_valid_200 && _dataflow__delay_valid_201;
end
if((_dataflow_cond_ready_91 || !_dataflow_cond_valid_91) && (_dataflow_lessthan_ready_89 && _dataflow__delay_ready_201 && _dataflow__delay_ready_200) && (_dataflow_lessthan_valid_89 && _dataflow__delay_valid_201 && _dataflow__delay_valid_200)) begin
_dataflow_cond_data_91 <= (_dataflow_lessthan_data_89)? _dataflow__delay_data_201 : _dataflow__delay_data_200;
end
if(_dataflow_cond_valid_91 && _dataflow_cond_ready_91) begin
_dataflow_cond_valid_91 <= 0;
end
if((_dataflow_cond_ready_91 || !_dataflow_cond_valid_91) && (_dataflow_lessthan_ready_89 && _dataflow__delay_ready_201 && _dataflow__delay_ready_200)) begin
_dataflow_cond_valid_91 <= _dataflow_lessthan_valid_89 && _dataflow__delay_valid_201 && _dataflow__delay_valid_200;
end
if((_dataflow__delay_ready_213 || !_dataflow__delay_valid_213) && _dataflow__delay_ready_212 && _dataflow__delay_valid_212) begin
_dataflow__delay_data_213 <= _dataflow__delay_data_212;
end
if(_dataflow__delay_valid_213 && _dataflow__delay_ready_213) begin
_dataflow__delay_valid_213 <= 0;
end
if((_dataflow__delay_ready_213 || !_dataflow__delay_valid_213) && _dataflow__delay_ready_212) begin
_dataflow__delay_valid_213 <= _dataflow__delay_valid_212;
end
if((_dataflow__delay_ready_223 || !_dataflow__delay_valid_223) && _dataflow__delay_ready_222 && _dataflow__delay_valid_222) begin
_dataflow__delay_data_223 <= _dataflow__delay_data_222;
end
if(_dataflow__delay_valid_223 && _dataflow__delay_ready_223) begin
_dataflow__delay_valid_223 <= 0;
end
if((_dataflow__delay_ready_223 || !_dataflow__delay_valid_223) && _dataflow__delay_ready_222) begin
_dataflow__delay_valid_223 <= _dataflow__delay_valid_222;
end
if((_dataflow__delay_ready_231 || !_dataflow__delay_valid_231) && _dataflow__delay_ready_230 && _dataflow__delay_valid_230) begin
_dataflow__delay_data_231 <= _dataflow__delay_data_230;
end
if(_dataflow__delay_valid_231 && _dataflow__delay_ready_231) begin
_dataflow__delay_valid_231 <= 0;
end
if((_dataflow__delay_ready_231 || !_dataflow__delay_valid_231) && _dataflow__delay_ready_230) begin
_dataflow__delay_valid_231 <= _dataflow__delay_valid_230;
end
if((_dataflow__delay_ready_237 || !_dataflow__delay_valid_237) && _dataflow__delay_ready_236 && _dataflow__delay_valid_236) begin
_dataflow__delay_data_237 <= _dataflow__delay_data_236;
end
if(_dataflow__delay_valid_237 && _dataflow__delay_ready_237) begin
_dataflow__delay_valid_237 <= 0;
end
if((_dataflow__delay_ready_237 || !_dataflow__delay_valid_237) && _dataflow__delay_ready_236) begin
_dataflow__delay_valid_237 <= _dataflow__delay_valid_236;
end
if((_dataflow__delay_ready_241 || !_dataflow__delay_valid_241) && _dataflow__delay_ready_240 && _dataflow__delay_valid_240) begin
_dataflow__delay_data_241 <= _dataflow__delay_data_240;
end
if(_dataflow__delay_valid_241 && _dataflow__delay_ready_241) begin
_dataflow__delay_valid_241 <= 0;
end
if((_dataflow__delay_ready_241 || !_dataflow__delay_valid_241) && _dataflow__delay_ready_240) begin
_dataflow__delay_valid_241 <= _dataflow__delay_valid_240;
end
if((_dataflow__delay_ready_243 || !_dataflow__delay_valid_243) && _dataflow__delay_ready_242 && _dataflow__delay_valid_242) begin
_dataflow__delay_data_243 <= _dataflow__delay_data_242;
end
if(_dataflow__delay_valid_243 && _dataflow__delay_ready_243) begin
_dataflow__delay_valid_243 <= 0;
end
if((_dataflow__delay_ready_243 || !_dataflow__delay_valid_243) && _dataflow__delay_ready_242) begin
_dataflow__delay_valid_243 <= _dataflow__delay_valid_242;
end
end
end
endmodule
"""
def test():
veriloggen.reset()
test_module = dataflow_sort.mkTest()
code = test_module.to_verilog()
from pyverilog.vparser.parser import VerilogParser
from pyverilog.ast_code_generator.codegen import ASTCodeGenerator
parser = VerilogParser()
expected_ast = parser.parse(expected_verilog)
codegen = ASTCodeGenerator()
expected_code = codegen.visit(expected_ast)
assert(expected_code == code)
|
11504778
|
from .algorithm import AStarAlgorithm
from .manager import GridManager
from .memory import AppDatabase
|
11504847
|
import time
import numpy as np
def jacobian_singular_value(args, model, sess, dataset, flag):
print('-- Check Jacobian singular values [{}]'.format(flag))
t_start = time.time()
jsv = []
generator = dataset.get_generator('epoch', 'train', True)
batch_size = 10
nitr = dataset.num_example['train'] // batch_size
remainder = dataset.num_example['train'] % batch_size
if remainder > 0:
nitr += 1
for b in range(nitr):
batch = dataset.get_next_batch(batch_size, generator)
feed_dict = {}
feed_dict.update({model.inputs[key]: batch[key] for key in ['image', 'label']})
input_tensors = [model.jsv]
result = sess.run(input_tensors, feed_dict)
jsv.append(result[-1])
jsv_all = np.concatenate(jsv)
jsv_stat = [func(jsv_all) for func in [np.mean, np.std, np.max, np.min]]
print('Jacobian Singular Value (mean/std/max/min) (t:{:.1f}):'.format(time.time() - t_start))
print(jsv_stat)
|
11504853
|
from .users import UserViewSet, UserSuccessViewSet
from .token import TokenViewSet
from .article import ArticleViewSet
from .equipment import EquipmentViewSet
from .parity import ParityViewSet, ParityLatestViewSet
from .comment import ArticleCommentViewSet, EquipmentCommentViewSet
from .mobile_app import latest_mobile_app
from .likes import LikeViewSet
from .following import FollowingViewSet
from .event import EventViewSet
from .prediction import PredictionViewSet
from .investment import ManualInvestmentViewSet, AssetViewSet, OnlineInvestmentViewSet, ProfitLossViewSet
from .order import BuyOrderViewSet, StopLossOrderViewSet
from .verify_email import verify_email
from .portfolio import PortfolioViewSet, PortfolioItemViewSet
from .search import SearchViewSet
from .notification import NotificationViewSet
from .recommendation import RecommendationViewSet
from .alert import AlertViewSet
|
11504866
|
class Solution:
# @param strs, a list of strings
# @return a list of strings
def anagrams(self, strs):
d = {}
res = []
for i, s in enumerate(strs):
key = self.make_key(s)
# First occurence of an anagram
if key not in d:
d[key] = [s]
else:
d[key].append(s)
for key in d:
if len(d[key]) > 1:
res += d[key]
return res
def make_key(self, s):
"""Generate character-frequency key based on s"""
d = {}
res = []
for c in s:
if c in d:
d[c] += 1
else:
d[c] = 1
# Iterate form 'a' to 'z'
# This make sure the character occurences is ordered
# and thus unique
for i in range(ord('a'), ord('z') + 1):
c = chr(i)
if c in d:
res.append(c)
res.append(str(d[c]))
return ''.join(res)
|
11504922
|
from acqdp.tensor_network import TensorNetwork
import numpy as np
from acqdp.circuit import CNOTGate, CZGate, Circuit, HGate, Measurement, PlusState, State, Trace, ZeroMeas, ZeroState
from demo.QEC.noise_model import add_idle_noise, add_noisy_surface_code
params = {
'T_1_inv': 1 / 30000.0,
'T_phi_inv': 1 / 60000.0,
'p_axis': 1e-4,
'p_plane': 5e-4,
'delta_phi': 0.01,
'T_g_1Q': 20.0,
'T_g_2Q': 40.0,
'tau_m': 300.0,
'tau_d': 300.0,
'gamma': 0,
'alpha0': 4,
'kappa': 1 / 250,
'chi': 1.3 * 1e-3}
Z = TensorNetwork([0, 0])
Z.add_node('PH', [0], np.ones(2))
PlaceHolder = Measurement(1, Z, name='PH_nz')
TraceState = State(1, TensorNetwork([0, 0], bond_dim=2))
qubit_group_name = {
(0, 0): 'D1',
(0, 2): 'D2',
(2, 0): 'D3',
(2, 2): 'D4',
(1, 1): 'X1',
(1, 3): 'Z1',
(3, 1): 'Z2',
(3, 3): 'X2',
(-1, -1): 'dummy'
}
qubit_groups = {group: [] for group in qubit_group_name.values()}
qubit_coords = ([(x * 2, y * 2) for x in range(3) for y in range(3)]
+ [(-1, 3)]
+ [(x * 2 + 1, y * 2 + 1) for x in range(2) for y in (range(3) if x % 2 else range(-1, 2))]
+ [(5, 1)])
for x, y in qubit_coords:
qubit_groups[qubit_group_name[x % 4, y % 4]].append((x, y))
def add_CZ_gates(circuit, high_freq_group, low_freq_group):
for q1 in high_freq_group:
for q2 in low_freq_group:
if abs(q1[0] - q2[0]) == 1 and abs(q1[1] - q2[1]) == 1:
circuit.append(CZGate, [q1, q2])
break
def x_stab_meas(circuit, measure_outcome=None, use_ndcompmeas=False):
"""This is currently only used to add a round of X-stabilier measurement circuit without noise."""
for group in ['D1', 'D2', 'D3', 'D4', 'X1', 'X2']:
for qubit in qubit_groups[group]:
circuit.append(HGate, [qubit])
# Time slot 1 ~ 4
for flux_dance in [('D2', 'X1', 'X2', 'D3', 'D4'),
('D1', 'X1', 'X2', 'D4', 'D3'),
('D1', 'X2', 'X1', 'D4', 'D3'),
('D2', 'X2', 'X1', 'D3', 'D4')]:
for g1, g2 in [flux_dance[0:2], flux_dance[2:4]]:
add_CZ_gates(circuit, qubit_groups[g1], qubit_groups[g2])
# Time slot B
for group in ['D1', 'D2', 'D3', 'D4', 'X1', 'X2']:
for qubit in qubit_groups[group]:
circuit.append(HGate, [qubit])
# Time slot C
if use_ndcompmeas:
for group in ['X1', 'X2']:
for qubit in qubit_groups[group]:
circuit.append(PlaceHolder, [qubit])
circuit.append(TraceState, [qubit])
else:
if measure_outcome is not None:
for group in ['X1', 'X2']:
for qubit in qubit_groups[group]:
circuit.append(measure_outcome[qubit], [qubit])
circuit.append(ZeroState, [qubit])
else:
for group in ['X1', 'X2']:
for qubit in qubit_groups[group]:
circuit.append(ZeroMeas, [qubit])
circuit.append(ZeroState, [qubit])
def z_stab_meas(circuit, measure_outcome=None, use_ndcompmeas=False):
"""
measure_outcome: a dictionary, from qubit_coords, to ZeroMeas/OneMeas
This is currently only used to add a round of Z-stabilier measurement circuit without noise
"""
for group in ['Z1', 'Z2']:
for qubit in qubit_groups[group]:
circuit.append(HGate, [qubit])
# Time slot 5 ~ 8
for flux_dance in [('D1', 'Z1', 'Z2', 'D4', 'D3'),
('D2', 'Z2', 'Z1', 'D3', 'D4'),
('D2', 'Z1', 'Z2', 'D3', 'D4'),
('D1', 'Z2', 'Z1', 'D4', 'D3')]:
for g1, g2 in [flux_dance[0:2], flux_dance[2:4]]:
add_CZ_gates(circuit, qubit_groups[g1], qubit_groups[g2])
for group in ['Z1', 'Z2']:
for qubit in qubit_groups[group]:
circuit.append(HGate, [qubit])
if use_ndcompmeas:
for group in ['Z1', 'Z2']:
for qubit in qubit_groups[group]:
circuit.append(PlaceHolder, [qubit])
circuit.append(TraceState, [qubit])
else:
if measure_outcome is not None:
for group in ['Z1', 'Z2']:
for qubit in qubit_groups[group]:
circuit.append(measure_outcome[qubit], [qubit])
circuit.append(ZeroState, [qubit])
else:
for group in ['Z1', 'Z2']:
for qubit in qubit_groups[group]:
circuit.append(ZeroMeas, [qubit])
circuit.append(ZeroState, [qubit])
def initial_state(coord=(-100, -100)):
"""prepare a maximally entangled state between surface code and ancilla qubit.
First prepare |0>_surf |+>_anc, then do a few CNOT
"""
c = Circuit()
for q in qubit_coords:
c.append(ZeroState, [q])
x_stab_meas(c)
c.append(PlusState, [coord])
c.append(CNOTGate, [coord, (0, 0)])
c.append(CNOTGate, [coord, (2, 0)])
c.append(CNOTGate, [coord, (4, 0)])
return c
def final_measurement(circuit: Circuit):
for q in [(1, 1), (1, -1), (3, 3), (3, 1), (-1, 3), (5, 1), (1, 3), (3, 5)]:
circuit.append(Trace, [q])
circuit.append(ZeroState, [q])
x_stab_meas(circuit, use_ndcompmeas=True) # Add final noiseless X-stabilizer measurements
for qubits in [[(1, 1), (0, 0)], [(3, 3), (4, 4)], [(-1, 3), (0, 4)], [(5, 1), (4, 0)]]:
circuit.append(CZGate, qubits)
circuit.append(Trace, [qubits[0]])
circuit.append(TraceState, [qubits[0]])
z_stab_meas(circuit, use_ndcompmeas=True) # Add final noiseless Z-stabilizer measurements
for qubits in [[(1, -1), (0, 0)], [(3, 1), (4, 0)], [(1, 3), (0, 4)], [(3, 5), (4, 4)]]:
circuit.append(CNOTGate, qubits)
circuit.append(Trace, [qubits[0]])
circuit.append(TraceState, [qubits[0]])
return circuit
def surface_code_tensor_network(num_layers=2, params=params):
noisy_meas_circ = Circuit()
end_time = 0
for _ in range(num_layers):
end_time = add_noisy_surface_code(noisy_meas_circ, qubit_coords, time=end_time, params=params)
add_idle_noise(noisy_meas_circ, params=params)
d = final_measurement(noisy_meas_circ)
init_state = initial_state()
c_prob = init_state | d | initial_state(coord=(-101, -101)).adjoint()
tn = c_prob.tensor_density.expand(recursive=True)
for node_name in tn.nodes_by_name:
if node_name[-1] == 'PH':
tn.remove_node(node_name)
return tn
def surface_code_tensor_network_with_syndrome(syndrome=None, num_layers=2, params=params):
if syndrome is None:
syndrome = [0] * (8 * (num_layers + 1))
noisy_meas_circ = Circuit()
e_ro = params.get('e_ro', 0.01)
butterfly = np.array([[1 - e_ro, e_ro], [e_ro, 1 - e_ro]])
end_time = 0
for _ in range(num_layers):
end_time = add_noisy_surface_code(noisy_meas_circ,
qubit_coords,
time=end_time,
params=params)
add_idle_noise(noisy_meas_circ, params=params)
d = final_measurement(noisy_meas_circ)
init_state = initial_state()
c_prob = init_state | d | initial_state(coord=(-101, -101)).adjoint()
tn = c_prob.tensor_density.expand(recursive=True)
cnt = 0
for node_name in tn.nodes_by_name:
if node_name[0] == 'PH' or node_name[0] == 'PH_nz':
if syndrome is not None:
if node_name[0] == 'PH':
tn.update_node(node_name, butterfly[syndrome[cnt]])
else:
tn.fix_edge(tn.network.nodes[(0, node_name)]['edges'][0][0],
syndrome[cnt])
cnt += 1
return tn
|
11504941
|
import copy
import brian2
import numpy as np
import matplotlib.pyplot as plt
P = {'c_m': 1 * brian2.uF / brian2.cm ** 2,
'g_L': 0.1 * brian2.mS / brian2.cm ** 2,
'e_L': -65 * brian2.mV,
'g_na': 35 * brian2.mS / brian2.cm ** 2,
'e_na': 55 * brian2.mV,
'phi': 5,
'g_k': 9 * brian2.mS / brian2.cm ** 2,
'e_k': -90 * brian2.mV,
'g_syn': 0.1 * brian2.mS / brian2.cm ** 2,
'e_syn': -75 * brian2.mV,
'theta_syn': 0 * brian2.mV,
'alpha': 12 / brian2.ms,
'beta': 0.1 / brian2.ms,
'n': 100,
'dt': 0.05 * brian2.ms,
'run_t': 0.5 * brian2.second,
'p': 1.0,
'i_app': 1.2 * brian2.uA / brian2.cm ** 2,
'thresh': -52 * brian2.mV}
def alpha_h(v):
return 0.07 * np.exp(-(v / brian2.mV + 58) / 20)
def beta_h(v):
return 1 / (np.exp(-0.1 * (v / brian2.mV + 28)) + 1)
def alpha_n(v):
return -0.01 * (v / brian2.mV + 34) / (np.exp(-0.1 *
(v / brian2.mV + 34)) - 1)
def beta_n(v):
return 0.125 * np.exp(-(v / brian2.mV + 44) / 80)
def f(v_pre):
return 1 / (1 + np.exp(-(v_pre - P['theta_syn']) / brian2.mV / 2))
def generate_connectivity(n, p):
n_cells = n * n
adj = np.zeros(n_cells)
n_conns = n_cells * p
np.testing.assert_almost_equal(n_conns, int(n_conns))
adj[:int(n_conns)] = 1
np.random.shuffle(adj)
return adj.reshape((n, n))
CONN = generate_connectivity(P['n'], P['p'])
def alpha_m(v):
return -0.1 * (v / brian2.mV + 35) / (np.exp(-0.1 *
(v / brian2.mV + 35)) - 1)
def beta_m(v):
return 4 * np.exp(-(v / brian2.mV + 60) / 18)
def m_inf(v):
return alpha_m(v) / (alpha_m(v) + beta_m(v))
def multiply_by_dt(x):
new_x = copy.deepcopy(x)
for i in range(len(new_x)):
new_x[i] = new_x[i] * P['dt']
return new_x
def add_k(state, k, divider):
new_state = copy.deepcopy(state)
for i in range(len(new_state)):
new_state[i] = new_state[i] + k[i] / divider
return new_state
def rk4(state):
k1 = multiply_by_dt(system_dot(state))
k2 = multiply_by_dt(system_dot(add_k(state, k1, 2)))
k3 = multiply_by_dt(system_dot(add_k(state, k2, 2)))
k4 = multiply_by_dt(system_dot(add_k(state, k3, 1)))
new_state = add_k(state, add_k(add_k(add_k(k1, k2, 0.5), k3, 0.5), k4, 1),
6)
return new_state
def system_dot(state):
v, h, n, s = state
i_na = P['g_na'] * m_inf(v) ** 3 * h * (v - P['e_na'])
i_k = P['g_k'] * n ** 4 * (v - P['e_k'])
i_L = P['g_L'] * (v - P['e_L'])
i_syn = P['g_syn'] * s.dot(CONN) * (v - P['e_syn'])
v_dot = (-i_na - i_k - i_L - i_syn + P['i_app']) / P['c_m']
h_dot = P['phi'] * (alpha_h(v) * (1 - h) - beta_h(v) * h) * brian2.kHz
n_dot = P['phi'] * (alpha_n(v) * (1 - n) - beta_n(v) * n) * brian2.kHz
s_dot = P['alpha'] * f(v) * (1 - s) - P['beta'] * s
return [v_dot, h_dot, n_dot, s_dot]
def run():
# Define initial state.
v_init = -70 * brian2.mV + np.random.rand(P['n']) * (-50 * brian2.mV -
-70 * brian2.mV)
h_init = alpha_h(v_init) / (beta_h(v_init) + alpha_h(v_init))
n_init = alpha_n(v_init) / (beta_n(v_init) + alpha_n(v_init))
s_init = P['alpha'] * f(v_init) / (P['beta'] + P['alpha'] * f(v_init))
# Define time.
t_plot = np.linspace(0, P['run_t'], int(P['run_t'] / P['dt']) + 1)
n_t = len(t_plot)
# Define containers.
v = np.zeros((P['n'], n_t)) * brian2.mV
h = np.zeros((P['n'], n_t))
n = np.zeros((P['n'], n_t))
s = np.zeros((P['n'], n_t))
# Enter initial state.
v[:, 0] = v_init
h[:, 0] = h_init
n[:, 0] = n_init
s[:, 0] = s_init
# Scale weights based on in-degree.
m_syn = P['n'] * P['p']
P['g_syn'] /= m_syn
# Run simulation.
for i_t in xrange(1, n_t):
state = [v[:, i_t - 1], h[:, i_t - 1], n[:, i_t - 1], s[:, i_t - 1]]
new_v, new_h, new_n, new_s = rk4(state)
v[:, i_t] = new_v
h[:, i_t] = new_h
n[:, i_t] = new_n
s[:, i_t] = new_s
return {'t_plot': t_plot, 'v': v}
def plot_raster(results):
for i, v_trace in enumerate(results['v']):
spike_times = np.where(np.logical_and(v_trace[:-1] < P['thresh'],
v_trace[1:] >= P['thresh']))[0]
plt.plot(results['t_plot'][spike_times], [i] * len(spike_times), 'k.')
plt.show()
if __name__ == '__main__':
results = run()
plot_raster(results)
|
11504949
|
from kivy.uix.floatlayout import FloatLayout
from kivy.uix.popup import Popup
class P(FloatLayout):
pass
def show_popup():
show = P()
popupWindow = Popup(title="load", content=show, size_hint=(None, None), size=(400, 400))
popupWindow.open()
|
11504964
|
import uuid
import pytest
from server import create_app
DATASOURCES_ENDPOINT = "/v1/api/datasources"
NUM_DATASOURCES_IN_DB = 1
@pytest.fixture
def client(tmpdir):
# temp_db_file = f"sqlite:///{tmpdir.dirpath()}/"
app = create_app()
app.config["TESTING"] = True
with app.test_client() as client:
yield client
def test_datasources_get_all(client):
response = client.get(f"{DATASOURCES_ENDPOINT}")
assert response.status_code == 200
assert len(response.json) == NUM_DATASOURCES_IN_DB
def test_datasources_post(client):
datasource_name = "datasource " + uuid.uuid4().hex
new_datasource_json = {"name": datasource_name, "type": "snowflake", "config": {}}
response = client.post(f"{DATASOURCES_ENDPOINT}", json=new_datasource_json)
assert response.status_code == 200
assert response.json["datasources"]["name"] == datasource_name
def test_datasources_post_error(client):
missing_datasource_info_json = {"name": "<NAME>"}
response = client.post(f"{DATASOURCES_ENDPOINT}", json=missing_datasource_info_json)
assert response.status_code == 500
def test_datasources_single(client):
response = client.get(f"{DATASOURCES_ENDPOINT}/1")
assert response.status_code == 200
assert response.json["datasource"] is not None
def test_datasource_not_found(client):
response = client.get(f"{DATASOURCES_ENDPOINT}/7")
assert response.status_code == 404
# def test_datasource_test_connection_success(client):
# response = client.get(f"{DATASOURCES_ENDPOINT}/1/test")
# assert response.json['connection'] == "true"
# def test_datasource_test_connection_failure(client):
# response = client.get(f"{DATASOURCES_ENDPOINT}/4/test")
# assert response.json['connection'] == "false"
|
11504968
|
from dataclasses import dataclass, field
from typing import Dict, Optional
from pyhanko.pdf_utils import generic
__all__ = [
'ShapeResult', 'FontEngine', 'FontSubsetCollection', 'FontEngineFactory'
]
from pyhanko.pdf_utils.writer import BasePdfFileWriter
ALPHABET = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
def generate_subset_prefix():
import random
return ''.join(ALPHABET[random.randint(0, 25)] for _ in range(6))
@dataclass(frozen=True)
class FontSubsetCollection:
base_postscript_name: str
"""
Base postscript name of the font.
"""
subsets: Dict[Optional[str], 'FontEngine'] = field(default_factory=dict)
"""
Dictionary mapping prefixes to subsets. ``None`` represents the full font.
"""
def add_subset(self) -> str:
while True:
prefix = generate_subset_prefix()
if prefix not in self.subsets:
return prefix
@dataclass(frozen=True)
class ShapeResult:
"""Result of shaping a Unicode string."""
graphics_ops: bytes
"""
PDF graphics operators to render the glyphs.
"""
x_advance: float
"""Total horizontal advance in em units."""
y_advance: float
"""Total vertical advance in em units."""
class FontEngine:
"""General interface for text shaping and font metrics."""
def __init__(self, writer: BasePdfFileWriter, base_postscript_name: str,
embedded_subset: bool, obj_stream=None):
fsc = writer.get_subset_collection(base_postscript_name)
if embedded_subset:
self.subset_prefix = prefix = fsc.add_subset()
else:
self.subset_prefix = prefix = None
fsc.subsets[prefix] = self
self.writer = writer
self.obj_stream = obj_stream
@property
def uses_complex_positioning(self):
"""
If ``True``, this font engine expects the line matrix to always be equal
to the text matrix when exiting and entering :meth:`shape`.
In other words, the current text position is where ``0 0 Td`` would
move to.
If ``False``, this method does not use any text positioning operators,
and therefore uses the PDF standard's 'natural' positioning rules
for text showing operators.
The default is ``True`` unless overridden.
"""
return True
def shape(self, txt: str) -> ShapeResult:
"""Render a string to a format suitable for inclusion in a content
stream and measure its total cursor advancement vector in em units.
:param txt:
String to shape.
:return:
A shaping result.
"""
raise NotImplementedError
def as_resource(self) -> generic.PdfObject:
"""Convert a :class:`.FontEngine` to a PDF object suitable for embedding
inside a resource dictionary.
.. note::
If the PDF object is an indirect reference, the caller must not
attempt to dereference it. In other words, implementations can
use preallocated references to delay subsetting until the last
possible moment (this is even encouraged, see
:meth:`prepare_write`).
:return:
A PDF dictionary.
"""
raise NotImplementedError
def prepare_write(self):
"""
Called by the writer that manages this font resource before the PDF
content is written to a stream.
Subsetting operations and the like should be carried out as part of
this method.
"""
pass
class FontEngineFactory:
def create_font_engine(self, writer: 'BasePdfFileWriter',
obj_stream=None) -> FontEngine:
raise NotImplementedError
|
11504983
|
from ...models.timer import Timer
from unicorn.arm_const import *
def systick_config(uc):
#rate = qemu.regs.r0
# TODO: Figure out the systick rate.
rate = 2000
systick_irq = 15
print("Setting SysTick rate to %#08x" % rate)
Timer.start_timer('SysTick', rate, systick_irq)
uc.reg_write(UC_ARM_REG_R0, 0)
|
11504984
|
from ..param import *
from .component import *
from . import generic
name_space(
"multiplayer",
name="Multiplayer",
description=(
"Multiplayer options pertaining to how the game is run, such as"
" friendly fire, buy-anywhere, team managment, etc."
)
)
@Component("autokick", "auto_kick")
class AutoKick(generic.ChoiceVarBinary):
params = ParamObj(
Name("Auto Kick"),
Desc("Toggle kicking idle, team-killing, or team-hurting players."),
Override("convar", "mp_autokick")
)
@Component("buyanywhere", "buy_anywhere")
class BuyAnywhere(generic.ChoiceVarBinary):
params = ParamObj(
Name("Buy Anywhere"),
Desc("Toggle allowing players to buy weapons anywhere on the map."),
Override("convar", "mp_buy_anywhere"),
)
@Component("dropknives", "drop_knives")
class DropKnives(generic.ChoiceVarBinary):
params = ParamObj(
Name("Drop Knives"),
Desc("Toggle making players to drop knives on death."),
Override("convar", "mp_drop_knife_enable")
)
@Component("friendlyfire", "friendly_fire")
class FriendlyFire(generic.ChoiceVarBinary):
params = ParamObj(
Name("Friendly Fire"),
Desc("Toggle allowing friendly fire."),
Override("convar", "mp_friendlyfire")
)
@Component("c4enabled", "c4_enabled", "c4enable", "c4_enable")
class C4Enabled(generic.ChoiceVarBinary):
params = ParamObj(
Name("C4 Enabled"),
Desc("Toggle giving Terrorist players C4."),
Override("convar", "mp_give_player_c4"),
Override("default", 1)
)
@Component("randomspawns", "random_spawns", "randomspawn", "random_spawn")
class RandomSpawns(generic.ChoiceVarBinary):
params = ParamObj(
Name("Random Spawns"),
Desc("Toggle making players spawn in random locations."),
Override("convar", "mp_randomspawn")
)
@Component("restartgame", "restart_game")
class RestartGame(generic.FireableCmd):
params = ParamObj(
Name("Restart Game"),
Desc("Trigger a game restart."),
Override("concmd", "mp_restartgame")
)
@Component("forcewin", "force_win")
class ForceWin(generic.FireableCmd):
params = ParamObj(
Name("Force Win"),
Desc("Force your team to win."),
Override("concmd", "mp_forcewin"),
flags=["cheat"]
)
@Component("forcerespawn", "force_respawn")
class ForceRespawn(generic.FireableCmd):
params = ParamObj(
Name("Force Respawn"),
Desc("Force players to respawn."),
Override("concmd", "mp_forcerespawnplayers"),
flags=["cheat"]
)
name_space(
"teams",
name="Teams",
description=(
"Teams setup and team options, including display options like names,"
" odds, and flags."
)
)
@Component("scramble", "scrambeleggs_without_moms_help")
class ScrambleTeams(generic.FireableCmd):
params = ParamObj(
Name("Scramble Teams"),
Desc("Scramble the teams and trigger a game restart."),
Override("concmd", "mp_scrambleteams")
)
@Component("swap")
class SwapTeams(generic.FireableCmd):
params = ParamObj(
Name("Swap"),
Desc("Swap the teams and trigger a game restart."),
Override("concmd", "mp_swapteams")
)
@Component("switch")
class SwitchTeams(generic.FireableCmd):
params = ParamObj(
Name("Switch"),
Desc("Switch the teams and trigger a game restart."),
Override("concmd", "mp_switchteams")
)
@Component("name1preset", "tname_1_preset")
class TeamName1Preset(generic.ChoiceVar):
params = ParamObj(
Name("Name 1"),
Desc("Choose team 1 name presets."),
Override("convar", "mp_teamname_1")
)
@Component("name2preset", "name_2_preset")
class TeamName2Preset(generic.ChoiceVar):
params = ParamObj(
Name("Name 2"),
Desc("Choose team 2 name presets."),
Override("convar", "mp_teamname_2")
)
@Component("logo1preset", "logo_1_preset")
class Logo1Preset(generic.ChoiceVar):
params = ParamObj(
Name("Logo 1"),
Desc("Choose team 1 logo presets."),
Override("convar", "mp_teamlogo_1")
)
@Component("logo2preset", "logo_2_preset")
class TeamLogo2Preset(generic.ChoiceVar):
params = ParamObj(
Name("Logo 2"),
Desc("Choose team 2 logo presets."),
Override("convar", "mp_teamlogo_2")
)
@Component("flag1preset", "flag_1_preset")
class TeamFlag1Preset(generic.ChoiceVar):
params = ParamObj(
Name("Flag 1"),
Desc("Choose team 1 flag presets."),
Override("convar", "mp_teamflag_1")
)
@Component("flag2preset", "flag_2_preset")
class TeamFlag2Preset(generic.ChoiceVar):
params = ParamObj(
Name("Flag 2"),
Desc("Choose team 2 flag presets."),
Override("convar", "mp_teamflag_2")
)
@Component("matchstat1preset", "matchstat_1_preset")
class MatchStat1Preset(generic.ChoiceVar):
params = ParamObj(
Name("MatchStat 1"),
Desc("Choose team 1 matchstat presets."),
Override("convar", "mp_teammatchstat_1")
)
@Component("matchstat2preset", "matchstat_2_preset")
class TeamMatchStat2Preset(generic.ChoiceVar):
params = ParamObj(
Name("MatchStat 2"),
Desc("Choose team 2 matchstat presets."),
Override("convar", "mp_teammatchstat_2")
)
@Component("odds", "matchodds", "match_odds")
class TeamMatchOdds(generic.Bar):
params = ParamObj(
Name("Match Odds"),
Desc("Change the match odds."),
Override("min", 0),
Override("max", 100),
Override("steps", 50),
Override("convar", "sv_teamprediction_pct"),
Override("style", "str"),
Override("strleft", "CT"),
Override("strright", "T"),
Override("default", 50)
)
name_space()
@Component("tkpunish", "tk_punish")
class TKPunish(generic.ChoiceVarBinary):
params = ParamObj(
Name("TK Punish"),
Desc("Toggle punishing teamkillers/hurters the next round."),
Override("convar", "mp_tkpunish")
)
name_space(
"warmup",
name="Warmup",
description="Warmup controls."
)
@Component("start")
class StartWarmup(generic.FireableCmd):
params = ParamObj(
Name("Start"),
Desc("Start the warmup session."),
Override("concmd", "mp_warmup_start")
)
@Component("pause")
class PauseWarmup(generic.FireableCmd):
params = ParamObj(
Name("Pause"),
Desc("Pause the warmup session."),
Override("concmd", "mp_warmup_pause")
)
@Component("end")
class EndWarmup(generic.FireableCmd):
params = ParamObj(
Name("End"),
Desc("End the warmup session"),
Override("concmd", "mp_warmup_end")
)
@Component("freezetime")
class FreezeTime(generic.Bar):
params = ParamObj(
Name("Freeze Time"),
Desc("Change the amount of time players are frozen at the start."),
Override("min", 0),
Override("max", 20),
Override("steps", 20),
Override("convar", "mp_freezetime"),
Override("style", "int"),
Override("default", 5)
)
name_space()
name_space()
|
11505057
|
from mypy_extensions import TypedDict
class WidgetInterface(TypedDict, total=False):
widget_id: int
name: str
purpose: str
|
11505065
|
import torch
import torch.nn as nn
import math
def weights_init(m):
# Initialize filters with Gaussian random weights
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.ConvTranspose2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.in_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
class Generator(nn.Module):
def __init__(self, latent_dim=100, num_features=64, channels_out=3):
super(Generator, self).__init__()
self.model = nn.Sequential(
# input is Z, going into a convolution
nn.ConvTranspose2d(latent_dim, num_features * 8, 4, 1, 0, bias=False),
nn.BatchNorm2d(num_features * 8),
nn.ReLU(True),
# state size. (num_features*8) x 4 x 4
nn.ConvTranspose2d(num_features * 8, num_features * 4, 4, 2, 1, bias=False),
nn.BatchNorm2d(num_features * 4),
nn.ReLU(True),
# state size. (num_features*4) x 8 x 8
nn.ConvTranspose2d(num_features * 4, num_features * 2, 4, 2, 1, bias=False),
nn.BatchNorm2d(num_features * 2),
nn.ReLU(True),
# state size. (num_features*2) x 16 x 16
nn.ConvTranspose2d(num_features * 2, num_features, 4, 2, 1, bias=False),
nn.BatchNorm2d(num_features),
nn.ReLU(True),
# state size. (num_features) x 32 x 32
nn.ConvTranspose2d(num_features, num_features, 4, 2, 1, bias=False),
nn.BatchNorm2d(num_features),
nn.ReLU(True),
# state size. (num_features) x 64 x 64
nn.ConvTranspose2d(num_features, channels_out, 4, 2, 1, bias=False),
nn.Tanh()
# state size. (channels_out) x 128 x 128
)
def forward(self, input):
output = self.model(input)
return output
class Discriminator(nn.Module):
def __init__(self, channels_out=3, num_features=64):
super(Discriminator, self).__init__()
self.model = nn.Sequential(
# input is (channels_out) x 128 x 128
nn.Conv2d(channels_out, num_features, 4, 2, 1, bias=False),
nn.LeakyReLU(0.2, inplace=True),
# state size. (num_features) x 64 x 64
nn.Conv2d(num_features, num_features, 4, 2, 1, bias=False),
nn.LeakyReLU(0.2, inplace=True),
# state size. (num_features) x 32 x 32
nn.Conv2d(num_features, num_features * 2, 4, 2, 1, bias=False),
nn.BatchNorm2d(num_features * 2),
nn.LeakyReLU(0.2, inplace=True),
# state size. (num_features*2) x 16 x 16
nn.Conv2d(num_features * 2, num_features * 4, 4, 2, 1, bias=False),
nn.BatchNorm2d(num_features * 4),
nn.LeakyReLU(0.2, inplace=True),
# state size. (num_features*4) x 8 x 8
nn.Conv2d(num_features * 4, num_features * 8, 4, 2, 1, bias=False),
nn.BatchNorm2d(num_features * 8),
nn.LeakyReLU(0.2, inplace=True),
# state size. (num_features*8) x 4 x 4
nn.Conv2d(num_features * 8, 1, 4, 1, 0, bias=False),
nn.Sigmoid()
)
def forward(self, input):
output = self.model(input)
return output.view(-1, 1).squeeze(1)
|
11505066
|
import json
from django.contrib.auth import get_user_model
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APITestCase
from helium.auth.models import UserSettings, UserProfile
from helium.auth.tests.helpers import userhelper
__author__ = "<NAME>"
__copyright__ = "Copyright 2021, Helium Edu"
__version__ = "1.4.46"
class TestCaseUserViews(APITestCase):
def test_user_login_required(self):
# GIVEN
userhelper.given_a_user_exists()
# WHEN
responses = [
self.client.get(reverse('auth_user_detail')),
self.client.put(reverse('auth_user_detail'))
]
# THEN
for response in responses:
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_get_user(self):
# GIVEN
user = userhelper.given_a_user_exists_and_is_authenticated(self.client)
# WHEN
response = self.client.get(reverse('auth_user_detail'))
# THEN
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertNotIn('verification_code', response.data)
self.assertEqual(user.username, response.data['username'])
self.assertEqual(user.email, response.data['email'])
self.assertNotIn('phone_verification_code', response.data['profile'])
self.assertEqual(user.profile.phone, response.data['profile']['phone'])
self.assertEqual(user.profile.user.pk, response.data['profile']['user'])
self.assertEqual(user.settings.time_zone, response.data['settings']['time_zone'])
self.assertEqual(user.settings.default_view, response.data['settings']['default_view'])
self.assertEqual(user.settings.week_starts_on, response.data['settings']['week_starts_on'])
self.assertEqual(user.settings.all_day_offset, response.data['settings']['all_day_offset'])
self.assertEqual(user.settings.show_getting_started, response.data['settings']['show_getting_started'])
self.assertEqual(user.settings.events_color, response.data['settings']['events_color'])
self.assertEqual(user.settings.default_reminder_offset, response.data['settings']['default_reminder_offset'])
self.assertEqual(user.settings.default_reminder_offset_type,
response.data['settings']['default_reminder_offset_type'])
self.assertEqual(user.settings.default_reminder_type, response.data['settings']['default_reminder_type'])
self.assertEqual(user.settings.receive_emails_from_admin,
response.data['settings']['receive_emails_from_admin'])
self.assertEqual(user.settings.private_slug, response.data['settings']['private_slug'])
self.assertEqual(user.settings.user.pk, response.data['settings']['user'])
def test_username_changes(self):
# GIVEN
user = userhelper.given_a_user_exists_and_is_authenticated(self.client)
self.assertEqual(user.email, '<EMAIL>')
self.assertIsNone(user.email_changing)
# WHEN
data = {
'username': 'new_username',
# Intentionally NOT changing these value
'email': user.email
}
response = self.client.put(reverse('auth_user_detail'), json.dumps(data),
content_type='application/json')
# THEN
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['username'], data['username'])
self.assertEqual(response.data['email'], user.email)
user = get_user_model().objects.get(pk=user.id)
self.assertEqual(user.username, response.data['username'])
self.assertEqual(user.email, response.data['email'])
self.assertIsNone(user.email_changing)
def test_email_changing(self):
# GIVEN
user = userhelper.given_a_user_exists_and_is_authenticated(self.client)
self.assertEqual(user.email, '<EMAIL>')
self.assertIsNone(user.email_changing)
self.assertEqual(user.username, 'test_user')
# WHEN
data = {
'email': '<EMAIL>',
# Intentionally NOT changing these value
'username': user.username
}
response = self.client.put(reverse('auth_user_detail'), json.dumps(data),
content_type='application/json')
# THEN
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['username'], user.username)
self.assertEqual(response.data['email'], user.email)
self.assertEqual(response.data['email_changing'], '<EMAIL>')
user = get_user_model().objects.get(pk=user.id)
self.assertEqual(user.email, response.data['email'])
self.assertEqual(user.email_changing, response.data['email_changing'])
self.assertEqual(user.username, response.data['username'])
def test_email_changes_after_verification(self):
# GIVEN
user = userhelper.given_a_user_exists_and_is_authenticated(self.client)
user.email_changing = '<EMAIL>'
user.verification_code = 'moo-moo-moo'
user.save()
response = self.client.get(
reverse('auth_user_resource_verify') + f'?username={user.username}&code={user.verification_code}')
# THEN
self.assertEqual(response.status_code, status.HTTP_200_OK)
user = get_user_model().objects.get(pk=user.id)
self.assertEqual(user.email, '<EMAIL>')
self.assertIsNone(user.email_changing)
def test_password_change(self):
# GIVEN
user = userhelper.given_a_user_exists_and_is_authenticated(self.client)
# THEN
data = {
'old_password': '<PASSWORD>!',
'password': '<PASSWORD>!'
}
response = self.client.put(reverse('auth_user_detail'),
json.dumps(data),
content_type='application/json')
# WHEN
self.assertEqual(response.status_code, status.HTTP_200_OK)
user = get_user_model().objects.get(pk=user.id)
self.assertTrue(user.check_password('<PASSWORD>!'))
def test_password_change_fails_old_password(self):
# GIVEN
userhelper.given_a_user_exists_and_is_authenticated(self.client)
# THEN
data = {
'old_password': '<PASSWORD>',
'password': '<PASSWORD>!',
}
response = self.client.put(reverse('auth_user_detail'),
json.dumps(data),
content_type='application/json')
# WHEN
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn('old_password', response.data)
def test_password_change_fails_blank_new_pass(self):
# GIVEN
userhelper.given_a_user_exists_and_is_authenticated(self.client)
# THEN
data = {
'old_password': '<PASSWORD>!',
'password': '',
}
response = self.client.put(reverse('auth_user_detail'),
json.dumps(data),
content_type='application/json')
# WHEN
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn('password', response.data)
def test_password_change_fails_to_meet_requirements(self):
# GIVEN
userhelper.given_a_user_exists_and_is_authenticated(self.client)
# THEN
data = {
'old_password': '<PASSWORD>!',
'password': '<PASSWORD>',
}
response = self.client.put(reverse('auth_user_detail'),
json.dumps(data),
content_type='application/json')
# WHEN
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn('password', response.data)
def test_username_already_exists(self):
# GIVEN
user1 = userhelper.given_a_user_exists()
user2 = userhelper.given_a_user_exists_and_is_authenticated(self.client, username='user2',
email='<EMAIL>')
# WHEN
data = {
# Trying to change username to match user1's email
'username': user1.username,
'email': user2.email
}
response = self.client.put(reverse('auth_user_detail'), json.dumps(data),
content_type='application/json')
# THEN
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn('username', response.data)
def test_email_already_exists(self):
# GIVEN
user1 = userhelper.given_a_user_exists()
user2 = userhelper.given_a_user_exists_and_is_authenticated(self.client, username='user2',
email='<EMAIL>')
# WHEN
data = {
# Trying to change email to match user1's email
'email': user1.email,
'username': user2.username
}
response = self.client.put(reverse('auth_user_detail'), json.dumps(data),
content_type='application/json')
# THEN
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn('email', response.data)
def test_delete_user(self):
# GIVEN
user = userhelper.given_a_user_exists_and_is_authenticated(self.client)
# WHEN
data = {
# Trying to change email to match user1's email
'email': user.email,
'username': user.username,
'password': '<PASSWORD>!'
}
response = self.client.delete(reverse('auth_user_resource_delete'), json.dumps(data),
content_type='application/json')
# THEN
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertFalse(get_user_model().objects.filter(pk=user.pk).exists())
self.assertFalse(UserSettings.objects.filter(user_id=user.pk).exists())
self.assertFalse(UserProfile.objects.filter(user_id=user.pk).exists())
def test_delete_fails_bad_request(self):
# GIVEN
user = userhelper.given_a_user_exists_and_is_authenticated(self.client)
# WHEN
data = {
# Trying to change email to match user1's email
'email': user.email,
'username': user.username,
'password': '<PASSWORD>'
}
response = self.client.delete(reverse('auth_user_resource_delete'), json.dumps(data),
content_type='application/json')
# THEN
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn('password', response.data)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.