index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
32,985,621
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/op2/writer/case_writer.py
|
from __future__ import annotations
from copy import deepcopy
from struct import pack, Struct
from collections import defaultdict
from typing import TYPE_CHECKING
from pyNastran.op2.errors import SixtyFourBitError
from .geom1_writer import write_geom_header, close_geom_table, init_table
if TYPE_CHECKING: # pragma: no cover
from pyNastran.bdf.bdf import BDF
from pyNastran.bdf.subcase import Subcase
def write_casecc_header(table_name: bytes, op2_file, op2_ascii, endian: bytes=b'<'):
op2_ascii.write('----------\n')
data = init_table(table_name)
op2_file.write(pack('4i 8s i 3i', *data))
op2_ascii.write(str(data) + '\n')
data = [
4, 7, 4,
#28, 1, 2, 3, 4, 5, 6, 7, 28,
28,
101, 1, 0, 1030, 0, 0, 0,
28,
]
#struct_3i = Struct(endian + b'3i')
op2_file.write(pack('3i 9i', *data))
op2_ascii.write(str(data) + '\n')
#-------------------------------------
data = [
4, -2, 4,
4, 1, 4,
4, 0, 4]
op2_file.write(pack('9i', *data))
op2_ascii.write(str(data) + '\n')
data = [
#4, 0, 4,
4, 2, 4,
8, b'XCASECC ', 8,
]
op2_file.write(pack('3i i8si', *data))
op2_ascii.write(str(data) + '\n')
#data = [8, 1, 2, 8]
#op2_file.write(pack('4i', *data))
#-------------------------------------
data = [
4, -3, 4,
4, 1, 4,
4, 0, 4]
op2_file.write(pack('9i', *data))
op2_ascii.write(str(data) + '\n')
def write_casecc(op2_file, op2_ascii, obj, endian: bytes=b'<',
nastran_format: str='nx'):
"""writes the CASECC table"""
write_casecc_header(b'CASECC', op2_file, op2_ascii, endian=endian)
itable = -3
subcases = obj.subcases
if nastran_format == 'msc':
for subcase_id, subcase in sorted(subcases.items()):
print(subcase)
write_msc_casecc(subcase_id, subcase, obj)
else:
asdf
close_geom_table(op2_file, op2_ascii, itable)
def _get_int(key: str, subcase: Subcase) -> int:
value = 0
if key in subcase:
value = subcase[key][0]
assert isinstance(value, int), type(value)
return value
def _get_str(key: str, subcase: Subcase, nbytes: int) -> bytes:
assert nbytes > 0, nbytes
if key in subcase:
value_str = subcase[key][0]
assert isinstance(value_str, str), value_str
fmt = '%-%%ss' % nbytes
value_str2 = fmt % value_str
value_bytes = value_str2.encode('ascii')
assert len(v)
else:
value_bytes = b' ' * nbytes
return value_bytes
def _get_stress(key: str, subcase: Subcase) -> int:
value = 0
media = 0
fmt = 0
von_mises = 1
is_fmt = False
if key in subcase:
value, options_ = subcase[key]
options = deepcopy(options_)
if 'SORT1' in options:
options.remove('SORT1')
assert is_fmt is False
fmt = 1
is_fmt = True
if 'SORT2' in options:
options.remove('SORT2')
assert is_fmt is False
fmt = 2
if 'PLOT' in options:
options.remove('PLOT')
media += 1
if 'PRINT' in options:
options.remove('PRINT')
media += 2
if 'PUNCH' in options:
options.remove('PUNCH')
media += 4
assert len(options) == 0, options
return value, media, fmt, von_mises
def _get_set_media_load(key: str, subcase: Subcase) -> int:
#if media in (1, 3, 5, 7):
#options.append('PLOT')
#if media in ( 2, 3, 6, 7):
#options.append('PRINT')
#if media in ( 4, 5, 6, 7):
#options.append('PUNCH')
value = 0
media = 0
fmt = 0
is_fmt = False
if key in subcase:
value, options_ = subcase[key]
options = deepcopy(options_)
if 'SORT1' in options:
options.remove('SORT1')
assert is_fmt is False
fmt = 1
is_fmt = True
if 'SORT2' in options:
options.remove('SORT2')
assert is_fmt is False
fmt = 2
if 'PLOT' in options:
options.remove('PLOT')
media += 1
if 'PRINT' in options:
options.remove('PRINT')
media += 2
if 'PUNCH' in options:
options.remove('PUNCH')
media += 4
assert len(options) == 0, options
return value, media, fmt
def write_msc_casecc(subcase_id: int, subcase: Subcase, model: BDF):
nsub = 0
"""
Word Name Type Description
subcase, mpc, spc, load, method_structure, deform, temp_load, temp_mat_init, tic, nlload_set, nlload_media, nlload_format, dload, freq, tfl = ints[:15]
1 SID I Subcase identification number
2 MPCSET I Multipoint constraint set (MPC)
3 SPCSET I Single point constraint set (SPC)
4 ESLSET I External static load set (LOAD)
5 REESET I Real eigenvalue extraction set (METHOD(STRUCTURE))
6 ELDSET I Element deformation set (DEFORM)
7 THLDSET I Thermal load set (TEMP(LOAD))
8 THMATSET I Thermal material set TEMP(MAT or INIT)
9 TIC I Transient initial conditions (IC)
10 NONPTSET I Nonlinear load output set (NLLOAD)
11 NONMEDIA I Nonlinear load output media (NLLOAD)
12 NONFMT I Nonlinear load output format (NLLOAD)
13 DYMLDSET I Dynamic load set (DLOAD)
14 FEQRESET I Frequency response set (FREQUENCY)
15 TFSET I Transfer function set (TFL)
"""
mpc_id = _get_int('MPC', subcase)
spc_id = _get_int('SPC', subcase)
load_id = _get_int('LOAD', subcase)
deform_id = _get_int('DEFORM', subcase)
temp_load_id = _get_int('TEMP(LOAD)', subcase)
temp_mat_id = _get_int('TEMP(MAT)', subcase)
ic_id = _get_int('IC', subcase)
ree_set = _get_int('METHOD', subcase)
nlload_set, nlload_media, nlload_fmt = _get_set_media_load('NLLOAD', subcase)
data = [subcase_id, mpc_id, spc_id, load_id, ree_set, deform_id, temp_load_id, temp_mat_id, ic_id,
nlload_set, nlload_media, nlload_fmt]
"""
16 SYMFLG I Symmetry flag (SYMSEQ and SUBSEQ)
17 LDSPTSET I Load output set (OLOAD)
18 LDSMEDIA I Load output media (OLOAD)
19 LDSFMT I Load output format (OLOAD)
20 DPLPTSET I Displ., temp., or pressure output set (DISP,THERM,PRES)
21 DPLMEDIA I Displ., temp., or pressure output media (DISP,THERM,PRES)
22 DPLFMT I Displ., temp., or pressure output format (DISP,THERM,PRES)
23 STSPTSET I Stress output set (STRESS)
24 STSMEDIA I Stress output media (STRESS)
25 STSFMT I Stress output format (STRESS)
26 FCEPTSET I Force (or flux) output set (FORCE or FLUX)
27 FCEMEDIA I Force (or flux) output media (FORCE or FLUX)
28 FCEFMT I Force (or flux) output format (FORCE or FLUX)
"""
symflag = 0
oload_set, oload_media, oload_format = _get_set_media_load('OLOAD', subcase)
disp_set, disp_media, disp_format = _get_set_media_load('DISPLACEMENT', subcase)
stress_set, stress_media, stress_set = _get_set_media_load('STRESS', subcase)
force_set, force_media, force_set = _get_set_media_load('FORCE', subcase)
data += [symflag,
oload_set, oload_media, oload_format,
disp_set, disp_media, disp_format,
stress_set, stress_media, stress_set,
force_set, force_media, force_set]
"""
29 ACCPTSET I Acceleration (or enthalpy delta) output set (ACCEL or HDOT)
30 ACCMEDIA I Acceleration (or enthalpy delta) output media (ACCE, HDOT)
31 ACCFMT I Acceleration (or enthalpy delta) output format (ACCE, HDOT)
32 VELPTSET I Velocity (or enthalpy) output set (VELOCITY or ENTHALPY)
33 VELMEDIA I Velocity (or enthalpy) output media (VELOCITY) or ENTHALPY)
34 VELFMT I Velocity (or enthalpy) output format (VELOCITY) or ENTHALPY)
35 FOCPTSET I Forces of single-point constraint output set (SPCFORCE)
36 FOCMEDIA I Forces of single-point constraint output media (SPCFORCE)
37 FOCFMT I Forces of single-point constraint output format (SPCFORCE)
38 TSTEPTRN I Time step set for transient analysis (TSTEP)
39 TITLE(32) CHAR4 Title character string (TITLE)
71 SUBTITLE(32) CHAR4 Subtitle character string (SUBTITLE)
103 LABEL(32) CHAR4 LABEL character string (LABEL)
135 STPLTFLG I Model plot flag: set to 1 if OUTPUT(PLOT) is specified
136 AXSYMSET I Axisymmetric set (AXISYMMETRIC)
137 NOHARMON I Number of harmonics to output (HARMONICS)
138 TSTRV I Need definition
139 K2PP(2) CHAR4 Name of direct input (p-set) stiffness matrix (K2PP)
141 M2PP(2) CHAR4 Name of direct input (p-set) mass matrix (M2PP)
143 B2PP(2) CHAR4 Name of direct input (p-set) damping matrix (B2PP)
145 OUTRESPV I Output frequencies or times (OFREQ or OTIME)
146 SEDR I Data recovery superelement list (SEDR)
147 FLDBNDY I Fluid boundary element selection (MFLUID)
148 CEESET I Complex eigenvalue extraction set (CMETHOD)
149 DAMPTBL I Structural damping table set (SDAMP(STRUCT)
151 SSDSET I Solution set displacements output set (SDISP)
152 SSDMEDIA I Solution set displacements output media (SDISP)
153 SSDFMT I Solution set displacements output format (SDISP)
154 SSVSET I Solution set velocities output set (SVELO)
155 SSVMEDIA I Solution set velocities output media (SVELO)
156 SSVFMT I Solution set velocities output format (SVELO)
157 SSASET I Solution set accelerations output set (SACCE)
158 SSAMEDIA I Solution set accelerations output media (SACCE)
159 SSAFMT I Solution set accelerations output format (SACCE)
"""
n32 = 7
nsub -= (3 + n32 * 3)
accel_set, accel_media, accel_fmt = _get_set_media_load('ACCELERATION', subcase)
velo_set, velo_media, velo_fmt = _get_set_media_load('VELOCITY', subcase)
spc_force_set, spc_force_media, spc_force_fmt = _get_set_media_load('SPCFORCE', subcase)
#oload_set, oload_media, oload_format = _get_set_media_load('MPCFORCE', subcase)
sdisp_set, sdisp_media, sdisp_format = _get_set_media_load('SDISPLACEMENT', subcase)
svelo_set, svelo_media, svelo_fmt = _get_set_media_load('SVELOCITY', subcase)
saccel_set, saccel_media, saccel_fmt = _get_set_media_load('SACCELERATION', subcase)
tstep_id = _get_int('TSTEP', subcase)
title = _get_str('TITLE', subcase, nbytes=128)
subtitle = _get_str('SUBTITLE', subcase, nbytes=128)
label = _get_str('LABEL', subcase, nbytes=128)
model_plot_flag = 0
axisymmetric_flag = 0
nharmonics = 0
needs_definition = 0
k2pp_name = _get_str('K2PP', subcase, nbytes=8)
m2pp_name = _get_str('M2PP', subcase, nbytes=8)
b2pp_name = _get_str('B2PP', subcase, nbytes=8)
ofreq_otime_id = 0
sedr = _get_int('SEDR', subcase)
mfluid_id = _get_int('SEDR', subcase)
cmethod_id = _get_int('SEDR', subcase)
sdamp_id = _get_int('SDAMP', subcase)
data += [accel_set, accel_media, accel_fmt,
velo_set, velo_media, velo_fmt,
spc_force_set, spc_force_media, spc_force_fmt,
tstep_id, title, subtitle, label, model_plot_flag, axisymmetric_flag, nharmonics, needs_definition,
k2pp_name, m2pp_name, b2pp_name, ofreq_otime_id, sedr, mfluid_id, cmethod_id, sdamp_id,
sdisp_set, sdisp_media, sdisp_format,
svelo_set, svelo_media, svelo_fmt,
saccel_set, saccel_media, saccel_fmt,
]
#assert len(data) == 159 - nsub, len(data)
"""
160 NONLINLD I Nonlinear load set in transient problems (NONLINEAR)
161 PARTIT I Partitioning set (PARTN)
162 CYCLIC I Symmetry option in cyclic symmetry (DSYM)
163 RANDOM I Random analysis set (RANDOM)
164 NONPARAM I Nonlinear static analysis control parameters (NLPARM)
165 FLUTTER I Flutter set (FMETHOD)
166 LCC I Number of words in this record up to LSEM
167 GPFSET I Grid point force output set (GPFORCE)
168 GPFMEDIA I Grid point force output media (GPFORCE)
169 GPFFMT I Grid point force output format (GPFORCE)
170 ESESET I Strain energy output set (ESE)
171 ESEMEDIA I Strain energy output media (ESE)
172 ESEFMT I Strain energy output format (ESE)
173 ARFPTSET I Aerodynamic force output set (AEROF)
174 ARFMEDIA I Aerodynamic force output media (AEROF)
175 ARFFMT I Aerodynamic force output format (AEROF)
"""
nonlinear_id = _get_int('NONLINEAR', subcase)
partn = 0
dsym_id = 0
random_id = _get_int('RANDOM', subcase)
nlparm_id = _get_int('NLPARM', subcase)
fmethod_id = _get_int('FMETHOD', subcase)
nwords_to_lsem = -999999
gpforce_set, gpforce_media, gpforce_format = _get_set_media_load('GPFORCE', subcase)
ese_set, ese_media, ese_format = _get_set_media_load('ESE', subcase)
aerof_set, aerof_media, aerof_format = _get_set_media_load('AEROF', subcase)
data += [nonlinear_id, partn, dsym_id, random_id, nlparm_id, fmethod_id,
nwords_to_lsem,
gpforce_set, gpforce_media, gpforce_format,
ese_set, ese_media, ese_format,
aerof_set, aerof_media, aerof_format]
#assert len(data) == 175 - nsub, len(data)
"""
176 SEID I Superelement ID (SUPER)
177 LCN I Load column number (SUPER)
178 GUST I Gust load selection (GUST)
179 SEFINAL I Final Superelement ID (SEFINAL)
180 SEMG I Generate matrices (K,M,B,K4) for superelement set or ID (SEMG)
181 SEKR I Reduce stiffness matrix (K) for superelement set or ID (SEKR)
182 SELG I Generate static loads for superelement set or ID (SELG)
183 SELR I Reduce static loads for superelement set or ID (SELR)
184 SEEX I Superelement set or ID to be excluded (SEEXCLUDE)
185 K2GG(2) CHAR4 Name of direct input (g-set) stiffness matrix (K2GG)
187 M2GG(2) CHAR4 Name of direct input (g-set) stiffness matrix (M2GG)
189 B2GG(2) CHAR4 Name of direct input (g-set) stiffness matrix (B2GG)
"""
nsub -= 3
seid = _get_int('SUPER', subcase)
super_id = _get_int('SUPER', subcase)
gust_id = _get_int('GUST', subcase)
sefinal_id = _get_int('SEFINAL', subcase)
semg = _get_int('SEMG', subcase)
sekr = _get_int('SEKR', subcase)
selg = _get_int('SELG', subcase)
selr = _get_int('SELR', subcase)
seexclude = _get_int('SEEXCLUDE', subcase)
k2gg = _get_str('K2GG', subcase, nbytes=8)
m2gg = _get_str('M2GG', subcase, nbytes=8)
b2gg = _get_str('B2GG', subcase, nbytes=8)
data += [seid, super_id, gust_id, sefinal_id, semg, sekr, selg, selr, seexclude,
k2gg, m2gg, b2gg]
#assert len(data) == 189 - nsub, len(data)
"""
191 SVSET I Solution eigenvector output set (SVECTOR)
192 SVMEDIA I Solution eigenvector output media (SVECTOR)
193 SVFMT I Solution eigenvectors output format (SVECTOR)
194 FLUPTSET I Fluid pressure output set (MPRES)
195 FLUMEDIA I Fluid pressure output media (MPRES)
196 FLUFMT I Fluid pressure output format (MPRES)
197 HOUT(3) I Cyclic symmetry harmonic output (HOUTPUT)
200 NOUT(3) I Cyclic symmetry physical output (NOUTPUT)
203 P2G(2) CHAR4 Name of direct input (g-set) static loads matrix (P2G)
205 LOADSET I Sequence of static loads sets (LOADSET)
206 SEMR I Generate matrices (M,B,K4) for superelement set or ID (SEMG)
207 VONMISES I von Mises fiber (STRESS)
208 SECMDFLG I Superelement command existence flag
"""
svector_set, svector_media, svector_fmt = _get_set_media_load('SVECTOR', subcase)
mpress_set, mpress_media, mpress_fmt = _get_set_media_load('MPRES', subcase)
p2g = _get_str('P2GG', subcase, nbytes=8)
loadset_id = _get_int('LOADSET', subcase)
#semg = _get_int('SEMG', subcase)
semr = _get_int('SEMR', subcase)
stress_set, stress_media, stress_fmt, von_mises = _get_stress('STRESS', subcase)
houtput = 0
noutput = 0
se_cmd_flag = 0
data += [
svector_set, svector_media, svector_fmt,
mpress_set, mpress_media, mpress_fmt,
houtput, houtput, houtput,
noutput, noutput, noutput,
p2g, loadset_id, semr, von_mises, se_cmd_flag
#svector_set, svector_media, svector_fmt,
]
#assert len(data) == 208, len(data)
"""
209 GPSPTSET I Grid point stress output set (GPSTRESS)
210 GPSMEDIA I Grid point stress output media (GPSTRESS)
211 GPSFMT I Grid point stress output format (GPSTRESS)
212 STFSET I Grid point stress field output set (STRFIELD)
213 STFMEDIA I Grid point stress field output media (STRFIELD
214 STFFMT I Grid point stress field output format (STRFIELD)
215 CLOAD I Superelement static load combination set (CLOAD)
216 SET2ID I Legacy design sensitivity contraint and variable set (SET2)
217 DSAPRT I Legacy design sensitivity analysis print option (SENSITY)
218 DSASTORE I Legacy design sensitivity analysis store option (SENSITY)
219 DSAOUTPT I Legacy design sensitivity analysis OUTPUT4 option (SENSITY)
220 STNSET I Strain output set (STRAIN)
221 STNMEDIA I Strain output media (STRAIN)
222 STNFMT I Strain output format (STRAIN)
"""
gpstress_set, gpstress_media, gpstress_fmt = _get_set_media_load('GPSTRESS', subcase)
strfield_set, strfield_media, strfield_fmt = _get_set_media_load('STRFIELD', subcase)
#sensity_set, sensity_media, sensity_fmt = _get_set_media_load('SENSITY', subcase)
strain_set, strain_media, strain_fmt = _get_set_media_load('STRAIN', subcase)
cload_id = _get_int('CLOAD', subcase)
set2_id = 0
dsa_print_id = 0
dsa_store_id = 0
dsa_output_id = 0
data += [
gpstress_set, gpstress_media, gpstress_fmt,
strfield_set, strfield_media, strfield_fmt,
cload_id, set2_id,
dsa_print_id, dsa_store_id, dsa_output_id,
strain_set, strain_media, strain_fmt,
]
#assert len(data) == 222, len(data)
"""
223 APRESS I Aerodynamic pressure output set (APRESSURE)
224 TRIM I Aerostatic trim variable constrain set (TRIM)
225 MODLIST I Output modes list set (OMODES)
226 REESETF I Real eigenvalue extraction set for fluid (METHOD(FLUID))
227 ESDPTSET I Element stress discontinuity output set (ELSDCON)
228 ESDMEDIA I Element stress discontinuity output media (ELSDCON)
229 ESDFMT I Element stress discontinuity output format (ELSDCON)
230 GSDPTSET I Grid point stress discontinuity output set (GPSDCON)
231 GSDMEDIA I Grid point stress discontinuity output media (GPSDCON)
232 GSDFMT I Grid point stress discontinuity output format (GPSDCON)
"""
apress = _get_int('APRESSURE', subcase)
trim_id = _get_int('TRIM', subcase)
omodes = _get_int('OMODES', subcase)
method_fluid = _get_int('METHOD(FLUID)', subcase)
elsdcon_set, elsdcon_media, elsdcon_fmt = _get_set_media_load('ELSDCON', subcase)
gpsdcon_set, gpsdcon_media, gpsdcon_fmt = _get_set_media_load('GPSDCON', subcase)
data += [
apress, trim_id, omodes, method_fluid,
elsdcon_set, elsdcon_media, elsdcon_fmt,
gpsdcon_set, gpsdcon_media, gpsdcon_fmt,
]
"""
233 SEDV I Generate pseudo-loads for superelement set or identification number (SEDV)
234 SERE I Generate responses for superelement set or ID (SERESP)
235 SERS I Restart processing for superelement set or ID (SERS)
236 CNTSET I Slideline contact output set (BOUTPUT)
237 CNTMEDIA I Slideline contact output media (BOUTPUT)
238 CNTFMT I Slideline contact output format (BOUTPUT)
239 DIVERG I Aerostatic divergence control parameter set (DIVERG)
240 OUTRCV I P-element output control parameters (OUTRCV)
241 STATSUBP I Static subcase identification number for pre-load (STATSUB(PRELOAD))
242 MODESELS I Mode selection set identification number for the structure (MODESELECT)
243 MODESELF I Mode selection set identification number for the fluid (MODESELECT)
244 SOLNUM I Solution sequence number
245 ANLOPT I SOL 601 analysis option: 106, 129, 153 or 159
"""
sedv = _get_int('SEDV', subcase)
sere = _get_int('SERE', subcase)
sers = _get_int('SERS', subcase)
boutput_set, boutput_media, boutput_fmt = _get_set_media_load('BOUTPUT', subcase)
diverg_id = _get_int('DIVERG', subcase)
outrcv = 0
statsub_preload = _get_int('STATSUB(PRELOAD)', subcase)
modes_select_structure = 0
mode_select_fluid = 0
sol = 0
if model.sol is not None:
sol = model.sol
sol_method = 0
if model.sol_method is not None:
sol_method = model.sol_method
data += [
sedv, sere, sers,
boutput_set, boutput_media, boutput_fmt,
diverg_id, outrcv,
statsub_preload, modes_select_structure, mode_select_fluid,
sol, sol_method,
]
"""
246 ADAPT I P-element adaptivity control parameter set (ADAPT)
247 DESOBJ I Design objective set (DESOBJ)
248 DESSUB I Design constraint set for current subcase (DESSUB)
249 SUBSPAN I DRSPAN defined set ID of DRESP1 responses specific to this subcase.
250 DESGLB I Design constraint set for all subcases (DESGLB)
251 ANALYSIS CHAR4 Type of analysis (ANALYSIS)
252 GPQSTRS I CQUAD4 grid point corner stress option (STRESS)
253 GPQFORC I CQUAD4 grid point corner force option (STRESS)
254 GPQSTRN I CQUAD4 grid point corner strain option (STRESS)
255 SUPORT1 I Supported degree-of-freedom set (SUPORT1)
256 STATSUBB I Static subcase ID for buckling (STATSUB(BUCKLE))
257 BCID I Boundary condition ID (BC)
"""
adapt = _get_int('ADAPT', subcase)
desobj = _get_int('DESOBJ', subcase)
dessub = _get_int('DESSUB', subcase)
subspan = _get_int('DRSPAN', subcase)
desglb = _get_int('DESGLB', subcase)
analysis = _get_str('ANALYSIS', subcase, nbytes=4)
stress_corner, force_corner, strain_corner = 0, 0, 0
suport1 = _get_int('SUPORT1', subcase)
statsub_buckle = _get_int('STATSUB(BUCKLE)', subcase)
bc = _get_int('BC', subcase)
data += [
adapt, desobj, dessub, subspan, desglb, analysis,
stress_corner, force_corner, strain_corner,
suport1, statsub_buckle, bc,
]
"""
258 AUXMODEL I Auxiliary model ID (AUXMODEL)
259 ADACT I P-element adaptivity active subcase flag (ADACT)
260 DATSET I P-element output set (DATAREC)
261 DATMEDIA I P-element output media (DATAREC)
262 DATFMT I P-element output format (DATAREC)
263 VUGSET I View-grid and element output set (VUGRID)
264 VUGMEDIA I View-grid and element output media (VUGRID)
265 VUGFMT I View-grid and element output format (VUGRID)
266 MPCFSET I Forces of multipoint constraint output set (MPCFORCE)
267 MPCMEDIA I Forces of multipoint constraint output media (MPCFORCE)
268 MPCFFMT I Forces of multipoint constraint output format (MPCFORCE)
269 REUESET I Real unsymmetric eigenvalue extraction set (UMETHOD)
270 DAMPTBLF I Structural damping table set for the fluid (SDAMP(FLUID)
271 ITERMETH I Iterative solver control parameters (SMETHOD)
272 NLSSET I Nonlinear stress output set (NLSTRESS)
273 NLSMEDIA I Nonlinear stress output media (NLSTRESS)
274 NLSFMT I Nonlinear stress output format (NLSTRESS)
"""
auxmodel = _get_int('AUXMODEL', subcase)
adact = _get_int('ADACT', subcase)
data_set, data_media, data_fmt = _get_set_media_load('DATAREC', subcase)
vu_set, vu_media, vu_fmt = _get_set_media_load('VUGRID', subcase)
mpc_set, mpc_media, mpc_fmt = _get_set_media_load('MPCFORCE', subcase)
nlstress_set, nlstress_media, nlstress_fmt = _get_set_media_load('NLSTRESS', subcase)
umethod = 0
sdamp_fluid = _get_int('SDAMP(FLUID)', subcase)
smethod = _get_int('SMETHOD', subcase)
data += [
auxmodel, adact,
data_set, data_media, data_fmt,
vu_set, vu_media, vu_fmt,
mpc_set, mpc_media, mpc_fmt,
umethod, sdamp_fluid, smethod,
nlstress_set, nlstress_media, nlstress_fmt,
]
"""
275 MODTRKID I Mode tracking control parameter set (MODTRAK)
276 DSAFORM I Design sensitivity output format: 1=yes,2=no (DSAPRT)
277 DSAEXPO I Design sensitivity output export: 1=no,2=yes (DSAPRT)
278 DSABEGIN I Design sensitivity output start iteration (DSAPRT)
279 DSAINTVL I Design sensitivity output interval (DSAPRT)
280 DSAFINAL I Design sensitivity output final iteration (DSAPRT)
281 DSASETID I Design sensitivity output set (DSAPRT)
282 SORTFLG I Overall SORT1/SORT2 flag: 1 means SORT1 and 2 means SORT2.
283 RANDBIT I Random analysis request bit pattern (DISP,VELO, and so on)
"""
modtrack_id = _get_int('MODTRAK', subcase)
dsa_form, dsa_expo, dsa_begain, dsa_interval, dsa_final, dsa_set = 0, 0, 0, 0, 0, 0
sort_flag = 1
randbit = 0
data += [
modtrack_id, dsa_form, dsa_expo, dsa_begain, dsa_interval, dsa_final, dsa_set,
sort_flag, randbit,
]
"""
284 AECONFIG(2) CHAR4 Aerodynamic configuration name
286 AESYMXY I Symmetry flag for aerodynamic xy plane
287 AESYMXZ I Symmetry flag for aerodynamic xz plane
288 DISREL I Displacement relative output flag
289 VELREL I Velocity relative output flag
290 ACCREL I Acceleration relative output flag
291 GPEPTSET I Grid point strain output set (GPSTRAIN)
292 GPEMEDIA I Grid point strain output media (GPSTRAIN)
293 GPEFMT I Grid point strain output format (GPSTRAIN)
294 TEMPMAT I Thermal material set TEMP(MAT).
295 AECSSSET I Aerodynamic Control Surface Schedule (CSSCHD)
296 EKEPTSET I Element kinetic energy output set (EKE)
297 EKEMEDIA I Element kinetic energy media (EKE)
298 EKEFMT I Element kinetic energy format (EKE)
299 EKETHRSH RS Element kinetic energy threshold (EKE)
300 EDEPTSET I Element damping energy output set (EDE)
301 EDEMEDIA I Element damping energy media (EDE)
302 EDEFMT I Element damping energy format (EDE)
303 EDETHRSH RS Element damping energy threshold (EDE)
"""
aeconfig = b'AECONFIG'
ae_sym_xy = _get_int('AESYMXY', subcase)
ae_sym_xz = _get_int('AESYMXZ', subcase)
disp_rel, velo_rel, accel_rel = 0, 0, 0
temp_mat_id2 = _get_int('TEMP(MAT)', subcase)
csschd_id = _get_int('CSSCHD', subcase)
gpstrain_set, gpstrain_media, gpstrain_fmt = _get_set_media_load('GPSTRAIN', subcase)
eke_set, eke_media, eke_fmt = _get_set_media_load('EKE', subcase)
ede_set, ede_media, ede_fmt = _get_set_media_load('EDE', subcase)
eke_threshold = ede_threshold = 0.0
data += [
aeconfig, ae_sym_xy, ae_sym_xz,
disp_rel, velo_rel, accel_rel,
gpstrain_set, gpstrain_media, gpstrain_fmt,
temp_mat_id2, csschd_id,
eke_set, eke_media, eke_fmt, eke_threshold,
ede_set, ede_media, ede_fmt, ede_threshold,
]
"""
304 PANCON I Panel contributions set (PANCON)
305 PCMEDIA I Panel contributions media (PANCON)
306 PCFMT I Panel contributions format (PANCON)
307 PCFORM I Panel contributions form (PANCON)
308 PCTOPP I Panel contributions TOPP (PANCON)
309 GCTOPG I Grid contributions TOPG (GRDCON)
310 PCSOL I Panel contributions SOLUTION (PANCON)
311 PCPAN I Panel contributions PANEL (PANCON)
312 GCGRID I Grid contributions GRID (GRDCON)
313 MODSLF I Mode selection set (fluid)
"""
pancon_set, pancon_media, pancon_fmt = _get_set_media_load('PANCON', subcase)
gridcon, mode_select = 0, 0
data += [
pancon_set, pancon_media, pancon_fmt, pancon_form, pancon_topp, pancon_topg, pancon_solution, pancon_panel,
gridcon, mode_select]
"""
314 EFFMASET I Modal effective mass output set (MEFFMASS)
315 EFFMAGID I Modal effective mass GID (MEFFMASS)
316 EFFMATHR RS Modal effective mass fraction threshold (MEFFMASS)
317 A2GG(2) CHAR4 Name of direct input (g-set) acoustic coupling matrix (A2GG)
319 RCRSET I RCROSS output set
320 RCRFMT I RCROSS format
321 AEUXREF I AEUXREF
"""
meffmass_set, meffmass_node, meffmasss_threshold = _get_set_media_load('MEFFMASS', subcase)
a2gg_name = _get_str('A2GG', subcase, nbytes=8)
rcross_set, rcross_media, rcross_fmt = _get_set_media_load('RCROSS', subcase)
aeuxref = 0
data += [
meffmass_set, meffmass_node, meffmasss_threshold,
a2gg_name,
rcross_set, rcross_fmt,
aeuxref]
"""
322 GCHK I Ground Check Flag (GROUNDCHECK)
323 GCHKOUT I Ground Check Output (GROUNDCHECK)
324 GCHKSET I Ground Check Set (GROUNDCHECK)
325 GCHKGID I Ground Check Gid (GROUNDCHECK)
326 GCHKTHR RS Ground Check Thresh (GROUNDCHECK)
327 GCHKRTHR RS Ground Check RThresh (GROUNDCHECK)
328 GCHKDREC I Ground Check Data recovery (GROUNDCHECK)
329 ASPCMED I Output Media Request (AUTOSPC)
330 ASPCEPS RS EPS value for fixup (AUTOSPC)
331 ASPCPRT I EPS value for printing (AUTOSPC)
332 ASPCPCH I Punch Set Id (AUTOSPC)
"""
data = []
"""
333 EXSEGEOM I External superelement geometry flag (EXTSEOUT)
334 NA2GG I Internal set id for A2GG
335 NK2PP I Internal set id for K2PP
336 NM2PP I Internal set id for M2PP
337 NB2PP I Internal set id for B2PP
338 NK2GG I Internal set id for K2GG
339 NM2GG I Internal set id for M2GG
340 NB2GG I Internal set id for B2GG
341 NP2G I Internal set id for P2G
"""
"""
342 GEODSET I Geometry Check DISP Set identification number (GEOMCHECK)
343 GEODMXMN I Geometry Check DISP Max/Min (GEOMCHECK)
344 GEODOCID I Geometry Check DISP Max/Min Output Cor. Sys. (GEOMCHECK)
345 GEODNUMB I Geometry Check No. of DISP Max/Min Output (GEOMCHECK)
346 GEOLSET I Geometry Check OLOAD Set identification number (GEOMCHECK)
347 GEOLMXMN I Geometry Check OLOAD Max/Min (GEOMCHECK)
348 GEOLOCID I Geometry Check OLOAD Max/Min Output Cor. Sys. (GEOMCHECK)
349 GEOLNUMB I Geometry Check No. of OLOAD Max/Min Output (GEOMCHECK)
350 GEOSSET I Geometry Check SPCF Set identification number (GEOMCHECK)
351 GEOSMXMN I Geometry Check SPCF Max/Min (GEOMCHECK)
352 GEOSOCID I Geometry Check SPCF Max/Min Output Cor. Sys. (GEOMCHECK)
353 GEOSNUMB I Geometry Check No. of SPCF Max/Min Output (GEOMCHECK)
354 GEOMSET I Geometry Check MPCF Set identification number (GEOMCHECK)
355 GEOMMXMN I Geometry Check MPCF Max/Min (GEOMCHECK)
356 GEOMOCID I Geometry Check MPCF Max/Min Output Cor. Sys. (GEOMCHECK)
357 GEOMNUMB I Geometry Check No. of MPCF Max/Min Output (GEOMCHECK)
358 GEOASET I Geometry Check ACCE Set identification number (GEOMCHECK)
359 GEOAMXMN I Geometry Check ACCE Max/Min (GEOMCHECK)
360 GEOAOCID I Geometry Check ACCE Max/Min Output Cor. Sys. (GEOMCHECK)
361 GEOANUMB I Geometry Check No. of ACCE Max/Min Output (GEOMCHECK)
362 GEOVSET I Geometry Check VELO Set identification number (GEOMCHECK)
363 GEOVMXMN I Geometry Check VELO Max/Min (GEOMCHECK)
364 GEOVOCID I Geometry Check VELO Max/Min Output Cor. Sys. (GEOMCHECK)
365 GEOVNUMB I Geometry Check No. of VELO Max/Min Output (GEOMCHECK)
"""
"""
366 NTFL I Internal set id for TFL
367 BCONTACT I BCONTACT Set identification number
368 GPKESET I Grid point kinetic energy output set (GPKE)
369 GPKEMEDI I Grid point kinetic energy media (GPKE)
370 GPKEFMT I Grid point kinetic energy format (GPKE)
371 ELMSUM I Element Summary Output (ELSUM)
"""
"""
372 WCHK I Weight Check Flag (WEIGHTCHECK)
373 WCHKOUT I Weight Check Output (WEIGHTCHECK)
374 WCHKSET I Weight Check Set identification number (WEIGHTCHECK)
375 WCHKGID I Weight Check GID (WEIGHTCHECK)
376 WCHKCGI I Weight Check CGI (WEIGHTCHECK)
377 WCHKWM I Weight Check Weight/Mass units (WEIGHTCHECK)
"""
"""
378 EXSEOUT I External Superelement Output Flag
379 EXSEMED I External Superelement Output Media
380 EXSEUNIT I External Superelement Output Unit
381 EXSEASMB I External Superelement Output ASMBULK Flag
382 EXSEEXTB I External Superelement Output EXTBULK Flag
"""
"""
383 K42GG(2) CHAR4 Name of direct input (g-set) structural damping matrix K42GG
385 NK42GG I Internal set id for K42GG
386 EXSESTIF I External Superelement Output STIFFNESS Flag
387 EXSEMASS I External Superelement Output MASS Flag
388 EXSEDAMP I External Superelement Output DAMPING Flag
389 EXSEK4DA I External Superelement Output K4DAMP Flag
390 EXSELOAD I External Superelement Output LOADS Flag
391 EXSESEID I External Superelement Output SE ID
392 EXSEDMFX(2) CHAR4 External Superelement DMIGSFIX String
394 NSMID I Non-Structural Mass Set ID
395 NSELD I Internal SID for SELOAD
396 FSELD I Internal SID for SELOAD scale factor
"""
"""
397 OP4UNIT I MBDEXPORT OP4 logical unit number
398 RPOSTS1 I Random RPOSTS1 parameter
399 CHECK I ADAMSMNF/MBDEXPORT CHECK flag
400 ADMOUT I ADAMSMNF ADMOUT flag//MBDEXPORT RECVROP2 flag
401 FLEXBODY I ADAMSMNF/MBDEXPORT FLEXBODY flag
402 FLEXONLY I ADAMSMNF/MBDEXPORT FLEXONLY flag
403 MINVAR I ADAMSMNF/MBDEXPORT MINVAR parameter
404 PSETID I ADAMSMNF/MBDEXPORT PSETID parameter
405 OUTGSTRS I ADAMSMNF/MBDEXPORT OUTGSTRS flag
406 OUTGSTRN I ADAMSMNF/MBDEXPORT OUTGSTRN flag
407 RMSBIT I Random analysis RMS required bit pattern
408 MODESCC I MODES case control existence flag
409 RMSSF RS Random RMSSF parameter
410 UNDEF(3) None
"""
"""
413 BCSET I Contact Set ID
414 BCRESU I Contact results output
415 BCMEDIA I Contact results media code
416 BCFMT I Contact results format code
417 BCTYPE I Traction=1, Force=2, Both=3
418 GKRESU I Gasket results output
419 GKMEDIA I Gasket results media code
420 GKFMT I Gasket results format code
421 PRSSET I Pressure output set (PRESSURE)
422 PRSMEDIA I Pressure output media (PRESSURE)
423 PRSFMT I Pressure output format (PRESSURE)
424 FRFIN I FRFIN set number
425 PRSTOTAL I Pressure output:
total bit(0)=0,
scatter bit(0)=0
426 RSMETHOM I RSMETHOD parameter
427 ESETHRSH I ESE THRESHOLD
428 MDESET I Modal energy output set (MODALE)
429 MDEMEDI I Modal energy media (MODALE)
430 MCSOL I Modal contributions SOLUTION (MODCOM)
431 MCPAN I Modal contributions PANELMC (MODCOM)
432 MDEFMT I Modal energy output format (MODALE)
433 ACTLDSET I Acoustic load set (ALOAD)
434 MDECMPT I Modal energy computation set (MODALE)
435 MDESORT I Modal energy sort flag (MODALE)
436 MDETYPE I Modal energy type flag (MODALE)
437 MDECALC I Modal energy calculation flag (MODALE)
438 RMETSET I RMETHOD set id
439 RIGID I Rigid element type
440 BOLTPRE I Bolt preload set
441 BGSET I Glue set id
442 MCTOPF I Modal contributions TOPF (MODCON)
443 IPRPU I RANDOM print/punch option
444 ADMCHK I ADMRECVR ADMCHK flag
445 MODSEL I Mode selection set (structural)
446 ADMREC I ADMRECVR activation flag
447 ADMFORM I ADMRECVR ADMFORM parameter
448 MSRMODE I ADMRECVR MSRMODE parameter
449 RGBODY I ADMRECVR RGBODY flag
450 MSGLVL I ADMRECVR MSGLVL parameter
451 EBDSET I Element birth/death set
452 SHELLTHK I Shell thickness results output flag
453 STMEDIA I Shell thickness results media code
454 STFMT I Shell thickness results format code
455 ICTYPE I Transient IC type
456 RMXMN I RMAXMIN flag to indicate presence of card
457 ROPT I RMAXMIN print, plot, punch flag
458 RINP I RMAXMIN stress, force, displacement flag
459 RABS I RMAXMIN maximum, absolute, minimum flag
460 RAPP I RMAXMIN approach flag
461 RMXTRN I Alternate request of RMXTRN parameter
462 NPAVG I Number of maximum peaks to average
463 RSTAR RS Start time step for desired interval
464 RSTOP RS End time step for desired interval
465 MODCON I Modal contribution set
466 MCMEDIA I Modal contribution media
467 MCFMT I Modal contribution format
468 MCFORM I Modal contribution FORM
469 MCTOPS I Modal contributions TOPS (MODCON)
470 PSDD I SOL200: int. set no. for grids w/ PSDDISP design response
471 PSDV I SOL200: int. set no. for grids w/ PSDVELO design response
472 PSDA I SOL200: int. set no. for grids w/ PSDACCL design response
473 ISTAR I Start subcase id (RMAXMIN)
474 ISTOP I End subcase id (RMAXMIN)
475 FK2PP I Internal set id for K2PP scale factor
476 FM2PP I Internal set id for M2PP scale factor
477 FB2PP I Internal set id for B2PP scale factor
478 FK2GG I Internal set id for K2GG scale factor
479 FM2GG I Internal set id for M2GG scale factor
480 FB2GG I Internal set id for B2GG scale factor
481 FK42GG I Internal set id for K42GG scale factor
482 FP2G I Internal set id for P2G scale factor
483 FA2GG I Internal set id for A2GG scale factor
484 GPRSORT I Global ply results sorted with global ply ID numbers
485 EFLOAD1 I External field load orientation
486 EFLOAD2 I External field coordinate system
487 BGRESU I Glue results output
488 BGMEDIA I Glue results media code
489 RANLOOP I RANDOM loop number; used with ANALYSIS = RANDOM
490 BGTYPE I Glue results type
491 RSVCOMP I Residual vector component flag
492 RSVOPTC I Residual vector component options
493 RSVSYST I Residual vector system flag
494 RSVOPTS I Residual vector system options
495 PLSLOC I Ply strain or stress locations
496 ELSMOP I ELSUM output option
497 ERPSET I ERP set
498 ERPSORT I SORT1/SORT2
499 ERPMEDIA I ERP output media
500 ERPFMT I ERP output format
501 ERPSOL I ERP SOLUTION set
502 ERPELEM I ERP element output
503 ERPCSV I Unused. Reserved for ERP
504 ERPCOEFF RS ERP coefficient
505 UNDEF(4) None
509 ATVFSID I SID of ATVF
510 ATVUNIT I ATVOUT OP2 unit
511 ATVSETNO I ATVOUT microphone set identification number
512 ATVFLAGS I ATVOUT bits for flags = 1 if ATVOUT specified
513 ACPANEL I PANEL in ACPOWER:
0 for none,
-1 for all,
>0 for panel identification number
514 RMETCMR I Rotor dynamics CMR method
515 EFFMAT1 RS Modal effective mass minimum total value in X displacement
516 EFFMAT2 RS Modal effective mass minimum total value in Y displacement
517 EFFMAT3 RS Modal effective mass minimum total value in Z displacement
518 EFFMAMIT I Modal effective mass maximum number of additional iterations
519 SEQDEP CHAR4 Sequence dependency on or off (SEQDEP)
520 NLCSET I Set Identification nonlinear control (NLCNTL)
521 GSTRESET I Gauss point stress output set (GSTRESS)
522 GSTMEDIA I Gauss point stress output media (GSTRESS)
523 GSTREFMT I Gauss point stress output format (GSTRESS)
524 GSTRNSET I Gauss point total strain output set (GSTRAIN)
525 GSNMEDIA I Gauss point total strain output media (GSTRAIN)
526 GSTRNFMT I Gauss point total strain output format (GSTRAIN)
527 ELSTNSET I Nodal elastic strain on elements, output set (ELSTRN)
528 ELNMEDIA I Nodal elastic strain on elements, output media (ELSTRN)
529 ELSTNFMT I Nodal elastic strain on elements, output format (ELSTRN)
530 GELSSET I Gauss point elastic strains on elements, output set (GELSTRN)
531 GESMEDIA I Gauss point elastic strains on elements, output media (GELSTRN)
532 GELSFMT I Gauss point elastic strains on elements, output format (GELSTRN)
533 CRSTSET I Nodal creep strains on elements, output set (CRSTRN)
534 CRSMEDIA I Nodal creep strains on elements, output media (CRSTRN)
535 CRSTFMT I Nodal creep strains on elements, output format (CRSTRN)
536 GCRSSET I Gauss point creep strains on elements, output set (GCRSTRN)
537 GCRMEDIA I Gauss point creep strains on elements, output media (GCRSTRN)
538 GCRSFMT I Gauss point creep strains on elements, output format (GCRSTRN)
539 PLSTSET I Nodal plastic strains on elements, output set (PLSTRN)
540 PLSMEDIA I Nodal plastic strains on elements, output media (PLSTRN)
541 PLSTFMT I Nodal plastic strains on elements, output format (PLSTRN)
542 GPLSSET I Gauss point plastic strains on elements, output set (GPLSTRN)
543 GPLMEDIA I Gauss point plastic strains on elements, output media (GPLSTRN)
544 GPLSFMT I Gauss point plastic strains on elements, output format (GPLSTRN)
545 THSTSET I Nodal thermal strains on elements, output set (THSTRN)
546 THSMEDIA I Nodal thermal strains on elements, output media (THSTRN)
547 THSTFMT I Nodal thermal strains on elements, output format (THSTRN)
548 GTHSSET I Gauss point thermal strains on elements, output set (GTHSTRN)
549 GTHMEDIA I Gauss point thermal strains on elements, output media (GTHSTRN)
550 GTHSFMT I Gauss point thermal strains on elements, output format (GTHSTRN)
551 OTEMPSET I Temperatures used at solution points, output set (OTEMP)
552 OTEMEDIA I Temperatures used at solution points, output media (OTEMP)
553 OTEMPFMT I Temperatures used at solution points, output format (OTEMP)
554 NONCUP I ADAMSMNF/MBDEXPORT
555 DTEMPSET I Time dependent temperature load (DTEMP)
556 JINSET I J integral output set (JINTEG)
557 JINMEDIA I J integral output media (JINTEG)
558 ADAPTRESU I Adaptive Meshing set, output error estimator
559 ADAPTMEDIA I Error Estimator media code
560 ADAPTPYE I Error Estimator based on ENERGY FORM or STRESS FORM and STEP
561 INITSSET I Initial stress/strain:
INITS=n where n=0 for none,
n>0 for INITS or INITADD bulk entry SID,
n<0 for invalid value
562 OSTNSET I Set no for initial strain output after subcase 0
563 OPRESSET I Pressures used at solution points, output set (OPRESS)
564 OPRESDIA I Pressures used at solution points, output media (OPRESS)
565 BOLTRESMED I Bolt axial force, shear force, bending moment, and strain output media
566 CYCLSET I SOL 401 cyclic symmetry set IF (CYCSET)
567 OSTNOPT I Output options for initial strain after subcase 0:
=1 for element-node;
=2 for Gauss;
=3 for both. (For INITSTN/INITSTS)
568 OSTNMED I Media for initial strain output after subcase 0; PRINT/PUNCH/PLOT. (For INITSTN/INITSTS)
569 ACPWRGST I Acoustic power, GROUP output set (ACPOWER)
570 ACPWRAST I Acoustic power, AMLREG output set (ACPOWER)
571 ACPWRDIA I Acoustic power, output media (ACPOWER)
572 ACPWRFMT I Acoustic power, output format (ACPOWER)
573 MPINTSET I Microphone point intensity, output set (ACINTENSITY)
574 MPINTDIA I Microphone point intensity, output media (ACINTENSITY)
575 MPINTFMT I Microphone point intensity, output format (ACINTENSITY)
576 OTMFORC I Output set (OTMFORC)
577 OTMFORCM I Output media (OTMFORC)
578 OTMFORCF I Output format (OTMFORC)
579 MPVELSET I Microphone point velocity, output set (MPVELOCITY)
580 MPVELDIA I Microphone point velocity, output media (MPVELOCITY)
581 MPVELFMT I Microphone point velocity, output format (MPVELOCITY)
582 PFRESUSET I Progressive failure analysis of composites, output set (PFRESULTS)
583 PFRESUDIA I Progressive failure analysis of composites, output media (PFRESULTS)
584 PFRESUFMT I Progressive failure analysis of composites, output code for damage value/damage status/damage energy (PFRESULTS)
585 MONVAR I Maya monitor variable for displacement
586 CYCFSET I Forces of cyclic constraint output set (CYCFORCE)
587 CYCMEDIA I Forces of cyclic constraint output media (CYCFORCE)
588 CYCFFMT I Forces of cyclic constraint output format (CYCFORCE)
589 BOLTRESULTS I Bolt axial force, shear force, bending moment, and strain
590 STVARSET I State variable values on elements, output set (STATVAR)
591 STVARMEDIA I State variable values on elements, output media (STATVAR)
592 STVARFMT I State variable values on elements, output format (STATVAR)
593 CZRESUSET I Cohesive elements, output set (CZRESULTS)
594 CZRESUDIA I Cohesive elements, output media (CZRESULTS)
595 CZRESUFMT I Cohesive elements, output code for traction/relative motion/damage value (CZRESULTS)
596 CKGAPSET I Gap results, output set (CKGAP)
597 CKGAPDIA I Gap results, output media (CKGAP)
598 CKGAPFMT I Gap results, output location:
=1 for grid;
=2 for Gauss;
=3 for both (CKGAP)
599 GRDCON I Grid contributions set
600 GCMEDIA I Grid contributions media
601 GCFMT I Grid contributions format
602 GCFORM I Grid contributions FORM
603 GCSOL I Grid contributions SOLUTION
604 INITSOFF I Initial strain offset for balanced initial
stress/strain: INITS(OFFSET)=n where n=0
for none, n>0 for INITS or INITADD bulk entry
SID, n<0 for invalid value
605 INPWRGST I Incident acoustic power, GROUP output set (INPOWER)
606 INPWRFST I Incident acoustic power, FACES output set (INPOWER)
607 INPWRDIA I Incident acoustic power, output media (INPOWER)
608 INPWRFMT I Incident acoustic power, output format (INPOWER)
609 TRPWRGST I Transmitted acoustic power, GROUP output set (TRPOWER)
610 TRPWRAST I Transmitted acoustic power, AMLREG output set (TRPOWER)
611 TRPWRDIA I Transmitted acoustic power, output media (TRPOWER)
612 TRPWRFMT I Transmitted acoustic power, output format (TRPOWER)
613 TRLOSFLG I Acoustic transmission loss, YES/NO flag
(1=yes, 0=no) (TRLOSS)
614 TRLOSDIA I Acoustic transmission loss, output media (TRLOSS)
615 TRLOSFMT I Acoustic transmission loss, output format (TRLOSS)
616 NLARCST I SOL 401 nonlinear arc-length solution flag set IF (NLARCL)
617 IMPRFST I SOL 401 imperfection set flag, SET IF (IMPERF)
618 MONPNT I MONPNTn output bit flag(s)
619 FRFOUT I Frequency-dependent component output flag (FRFOUT)
620 FRFOPT I Frequency-dependent component output options (FRFOUT)
621 FRFSEID I SEID for frequency-dependent component output (FRFOUT)
622 FRFOP2 I Unit for frequency-dependent component output (FRFOUT)
623 RMSINT I Random RMSINT parameter
624 XSEMODAC I External superelement MODACC parameter
625 XSEFSCOU I External superelement FSCOUP parameter
626 SCSET I Optimization static subcase set identification number (DESOBJ)
627 SCFUNC I Optimization static subcase function option (DESOBJ)
628 ELAR I Element add/remove set identification number
629 ELAROFLG I Element status output flag:
1=yes,
0=no
(ELAROUT)
630 DMTRSET I
1=yes (default),
0=no
631 DMTRMEDIA I
bit(1)=1 (default), bit(1)=0 noprint;
bit(2)=1 punch, bit(2)=0 nopunch (default);
bit(3)=1 plot
632 DMTRFMT I
0=real/imaginary (default),
1=magnitude/phase
633 DMTRTYPE I Unused
634 PEAKOUT I PEAKOUT bulk entry selection
635 ELAROMDA I Element status output, output media (ELAROUT)
636 FLXSLI I Flexible slider identification number
637 JCONSET I Joint constraint set identification number
638 JRESSET I Kinematic joints output set (JRESULTS)
639 JRESMEDIA I Kinematic joints output media (JRESULTS)
640 JRESFMT I Kinematic joints output code:
1=force,
2=moment,
4=position,
8=rotation,
16=speed,
32=rotation speed (JRESULTS)
641 FLXRSET I Flexible slider output set (FLXRESULTS)
642 FLXRMEDIA I Flexible slider output media (FLXRESULTS)
643 FLXRFMT I Flexible slider output code: 64-curvdisp (FLXRESULTS)
644 ACTEMP I ACTEMP bulk entry selection
645 DMTRLSET I
1=yes (default),
0=no
646 DMTRLSMEDIA I
bit(1)=1 print (default), bit(1)=0 noprint;
bit(2)=1 punch, bit(2)=0 nopunch (default);
bit(3)=1 plot
647 DMTRLSFMT I Unused
648 ENFUNC I Optimization entity response function option (DESOBJ)
649 GPFSOL I GPFORCE output frequency selection value
650 CSMSET I Co-simulation (wetted) region set identification number
651 DISPSOL I DISPLACEMENT output frequency selection value
652 VELOSOL I VELOCITY output frequency selection value
653 ACCESOL I ACCELERATION output frequency selection value
654 PRESSOL I PRESSURE output frequency selection value
655 OPRESSOPT I Pressures used at solution points (output options):
0=COUPLED,
1=FPP,
2=BOTH
656 NLCSETG I Set Identification nonlinear control specified globally (NLCNTL):
0=none
657 ACORDCHK CHAR4 Acoustic max frequency and element order check mode = STOP
658 UNDEF(542) None
"""
data += []
"""
LCC LSEM(C) I Number of symmetry subcase coefficients from item SYMFLG
The value for LCC is set by word 166
LCC+1 COEF RS Symmetry subcase coefficients (SUBSEQ or SYMSEQ)
Word LCC+1 repeats LSEM times
LCC+2 SETID I Set identification number
LCC+3 SETLEN(C) I Length of this set
LCC+4 SETMEM I Set member identification number
Word LCC+4 repeats SETLEN times
Words LCC+2 through LCC+4 repeat NSETS times
LCC+5 PARA CHAR4 Hard-coded to "PARA"
LCC+6 PARLEN(C) I Length of this parameter value specification
LCC+7 CHTYPE(C) I Character type flag: 3 means character, 2 otherwise
LCC+8 PARAM(2) CHAR4 Hard-coded to "PARA" and "M "
LCC+10 PNAME(2) CHAR4 Name of parameter
PARLEN=8 Length
LCC+12 INTEGER I Integer value
PARLEN=9 Real-double parameter value
LCC+12 TYPE I Real type - hard-coded to -4
LCC+13 REAL RD Real-double value
PARLEN=10 Complex-single parameter value
LCC+12 RTYPE I Real part type - hard-coded to -2
LCC+13 REAL RS Real part value
LCC+14 ITYPE I Imaginary part type - hard-coded to -2
LCC+15 IMAG RS Imaginary part value
PARLEN=12 Complex-double parameter value
LCC+12 RTYPE I Real part type - hard-coded to -4
LCC+13 REAL RD Real part value
LCC+14 ITYPE I Imaginary part type - hard-coded to -4
LCC+15 IMAG RD Imaginary part value
End PARLEN
Words LCC+5 through max repeat until NANQ occurs
Words LCC+5 through LCC+15 repeat until End of Record
"""
asdf
assert -999999 not in data
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,622
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/bdf/cards/base_card.py
|
"""
defines:
- BaseCard()
- Element()
- Property()
- Material()
- word, num = break_word_by_trailing_integer(pname_fid)
- word, num = break_word_by_trailing_parentheses_integer_ab(pname_fid)
"""
from __future__ import annotations
from abc import abstractmethod, abstractproperty, abstractclassmethod
from typing import List, Tuple, Union, Optional, Any, TYPE_CHECKING
import numpy as np
#from numpy import nan, empty, unique
from pyNastran.bdf.bdf_interface.bdf_card import BDFCard
from pyNastran.utils import object_attributes, object_methods
from pyNastran.utils.numpy_utils import integer_types
from pyNastran.bdf.field_writer import print_card, print_card_8, print_card_16, print_card_double
from pyNastran.bdf.field_writer_8 import is_same
from pyNastran.utils import deprecated
from pyNastran.bdf.cards.expand_card import expand_thru, expand_thru_by
if TYPE_CHECKING: # pragma: no cover
from pyNastran.bdf.bdf import BDF
#from abc import ABC, abstractmethod
def write_card(comment: str, card: List[Optional[int, float, str]],
size: int, is_double: bool) -> str:
if size == 8:
try:
return comment + print_card_8(card)
except RuntimeError:
return comment + print_card_16(card)
elif is_double:
return comment + print_card_double(card)
return comment + print_card_16(card)
class BaseCard:
"""
Defines a series of base methods for every card class
(e.g., GRID, CTRIA3) including:
- deepcopy()
- get_stats()
- validate()
- object_attributes(mode='public', keys_to_skip=None)
- object_methods(self, mode='public', keys_to_skip=None)
- comment
- update_field(self, n, value)
"""
def __init__(self) -> None:
pass
#ABC.__init__(self)
#@abstractproperty
#def _field_map(self) -> str:
#return ''
@abstractproperty
def type(self) -> str:
return ''
@abstractmethod
def raw_fields(self): # pragma: no cover
return []
@abstractclassmethod
def add_card(self, card, comment=''): # pragma: no cover
return BaseCard()
def __deepcopy__(self, memo_dict):
#raw_fields = self.repr_fields()
raw_fields = self.raw_fields()
card = BDFCard(raw_fields)
return self.add_card(card, comment=self.comment)
def get_stats(self) -> str:
"""Prints out an easy to read summary of the card"""
msg = '---%s---\n' % self.type
for name in sorted(self.object_attributes()):
#if short and '_ref' in name:
#continue
value = getattr(self, name)
msg += ' %-6s : %r\n' % (name, value)
return msg
def deprecated(self, old_name: str, new_name: str, deprecated_version: str) -> None:
"""deprecates methods"""
deprecated(old_name, new_name, deprecated_version, levels=[0, 1, 2])
def validate(self) -> None:
"""card checking method that should be overwritten"""
pass
def object_attributes(self, mode: str='public',
keys_to_skip: Optional[List[str]]=None,
filter_properties: bool=False) -> List[str]:
""".. seealso:: `pyNastran.utils.object_attributes(...)`"""
if keys_to_skip is None:
keys_to_skip = []
my_keys_to_skip = [] # type: List[str]
return object_attributes(self, mode=mode, keys_to_skip=keys_to_skip+my_keys_to_skip,
filter_properties=filter_properties)
def object_methods(self, mode: str='public',
keys_to_skip: Optional[List[str]]=None) -> List[str]:
""".. seealso:: `pyNastran.utils.object_methods(...)`"""
if keys_to_skip is None:
keys_to_skip = []
my_keys_to_skip = [] # type: List[str]
return object_methods(self, mode=mode, keys_to_skip=keys_to_skip+my_keys_to_skip)
@property
def comment(self) -> str:
"""accesses the comment"""
# just for testing
#self.deprecated('comment()', 'comment2()', '0.7')
if hasattr(self, '_comment'):
return '%s' % self._comment
return ''
@comment.setter
def comment(self, new_comment: str) -> None:
"""sets a comment"""
#comment = new_comment.rstrip()
#self._comment = comment + '\n' if comment else ''
self._comment = _format_comment(new_comment)
def _test_update_fields(self) -> None:
n = 1
while 1:
try:
self.update_field(n, 1.0) # dummy updating the field
except IndexError:
return
except KeyError:
return
def update_field(self, n: int, value: Optional[Union[int, float, str]]) -> None:
"""
Updates a field based on it's field number.
Parameters
----------
n : int
the field number
value : int/float/str/None
the value to update the field to
.. note::
This is dynamic if the card length changes.
update_field can be used as follows to change the z coordinate
of a node::
>>> nid = 1
>>> node = model.nodes[nid]
>>> node.update_field(3, 0.1)
"""
try:
key_name = self._field_map[n]
setattr(self, key_name, value)
except KeyError:
self._update_field_helper(n, value)
def _update_field_helper(self, n: int, value: Optional[Union[int, float, str]]):
"""
dynamic method for non-standard attributes
(e.g., node.update_field(3, 0.1) to update z)
"""
msg = '%s has not overwritten _update_field_helper; out of range' % self.__class__.__name__
raise IndexError(msg)
def _get_field_helper(self, n: int):
"""dynamic method for non-standard attributes (e.g., node.get_field(3, 0.1) to get z)"""
msg = '%s has not overwritten _get_field_helper; out of range' % self.__class__.__name__
raise IndexError(msg)
def get_field(self, n: int) -> Optional[Union[int, float, str]]:
"""
Gets a field based on it's field number
Parameters
----------
n : int
the field number
Returns
-------
value : int/float/str/None
the value of the field
.. code-block:: python
nid = 1
node = model.nodes[nid]
# ['GRID', nid, cp, x, y, z]
z = node.get_field(5)
"""
try:
key_name = self._field_map[n]
value = getattr(self, key_name)
except KeyError:
value = self._get_field_helper(n)
return value
def _verify(self, xref: bool) -> None:
"""
Verifies all methods for this object work
Parameters
----------
xref : bool
has this model been cross referenced
"""
print('# skipping _verify (type=%s) because _verify is '
'not implemented' % self.type)
def __eq__(self, card: BDFCard) -> bool:
"""
Enables functions like:
.. code-block:: python
>>> GRID(nid=1, ...) === GRID(nid=1, ...)
True
>>> GRID(nid=1, ...) === GRID(nid=2, ...)
False
>>> GRID(nid=1, ...) === CQUAD4(eid=1, ...)
False
"""
if not isinstance(card, self.__class__):
return False
if self.type != card.type:
return False
fields1 = self.raw_fields()
fields2 = card.raw_fields()
return self._is_same_fields(fields1, fields2)
def _is_same_fields(self,
fields1: List[Union[int, float, str, None]],
fields2: List[Union[int, float, str, None]]) -> bool:
for (field1, field2) in zip(fields1, fields2):
if not is_same(field1, field2):
return False
return True
def _is_same_fields_long(self, fields1, fields2): # pragma: no cover
"""helper for __eq__"""
out = []
for (field1, field2) in zip(fields1, fields2):
is_samei = is_same(field1, field2)
out.append(is_samei)
return out
def print_raw_card(self, size: int=8, is_double: bool=False) -> str:
"""A card's raw fields include all defaults for all fields"""
list_fields = self.raw_fields()
return self.comment + print_card(list_fields, size=size, is_double=is_double)
def repr_fields(self) -> List[Union[int, float, str, None]]:
"""
Gets the fields in their simplified form
Returns
-------
fields : List[varies]
the fields that define the card
"""
return self.raw_fields()
def print_card(self, size: int=8, is_double: bool=False) -> str:
"""prints the card in 8/16/16-double format"""
list_fields = self.repr_fields()
return self.comment + print_card(list_fields, size=size, is_double=is_double)
def print_repr_card(self, size: int=8, is_double: bool=False) -> str:
"""prints the card in 8/16/16-double format"""
list_fields = self.repr_fields()
return self.comment + print_card(list_fields, size=size, is_double=is_double)
def __repr__(self) -> str:
"""
Prints a card in the simplest way possible
(default values are left blank).
"""
comment = self.comment
list_fields = self.repr_fields()
try:
return comment + print_card(list_fields, size=8)
except Exception:
try:
return comment + print_card(list_fields, size=16)
except Exception:
print('problem printing %s card' % self.type)
print("list_fields = ", list_fields)
raise
def rstrip(self) -> str:
try:
msg = '%s' % str(self)
except UnicodeEncodeError:
comment = self.comment
self.comment = ''
msg = '$ dropped comment due to unicode error\n%s' % str(self)
self.comment = comment
return msg.rstrip()
def write_card(self, size: int=8, is_double: bool=False) -> str:
"""
Writes the card with the specified width and precision
Parameters
----------
size : int (default=8)
size of the field; {8, 16}
is_double : bool (default=False)
is this card double precision
Returns
-------
msg : str
the string representation of the card
"""
raise NotImplementedError('%s has not overwritten write_card' % self.__class__.__name__)
def write_card_16(self, is_double: bool=False) -> str:
fields = self.repr_fields()
return print_card(fields, size=16, is_double=False)
class Property(BaseCard):
"""Base Property Class"""
def __init__(self) -> None:
"""dummy init"""
pass
def Pid(self) -> int:
"""
returns the property ID of an property
Returns
-------
pid : int
the Property ID
"""
return self.pid
def Mid(self) -> int:
"""
returns the material ID of an element
Returns
-------
mid : int
the Material ID
"""
if self.mid_ref is None:
return self.mid
return self.mid_ref.mid
#@abstractmethod
#def cross_reference(self, model: BDF) -> None:
#pass
#@abstractmethod
#def uncross_reference(self) -> None:
#pass
def write_card_8(self) -> str:
return self.write_card()
def write_card_16(self, is_double: bool=False) -> str:
return self.write_card()
class Material(BaseCard):
"""Base Material Class"""
def __init__(self) -> None:
"""dummy init"""
BaseCard.__init__(self)
@property
def TRef(self) -> float: # pramga: no cover
if not hasattr(self, 'tref'):
raise AttributeError('%r object has no attribute tref' % self.type)
return self.tref
@TRef.setter
def TRef(self, tref: float) -> None: # pramga: no cover
"""sets the self.Tref attributes"""
if not hasattr(self, 'tref'):
raise AttributeError('%r object has no attribute tref' % self.type)
self.tref = tref
def cross_reference(self, model: BDF) -> None:
"""dummy cross reference method for a Material"""
pass
def Mid(self) -> Any:
"""
returns the material ID of an element
Returns
-------
mid : int
the Material ID
"""
return self.mid
class Element(BaseCard):
"""defines the Element class"""
pid = 0 # CONM2, rigid
def __init__(self) -> None:
"""dummy init"""
BaseCard.__init__(self)
#: the list of node IDs for an element (default=None)
#self.nodes = None
def verify_unique_node_ids(self) -> None:
node_ids = self.node_ids
self._verify_unique_node_ids(node_ids)
def _verify_unique_node_ids(self, required_node_ids, non_required_node_ids=None) -> None:
# type (Any, Any) -> None
if required_node_ids:
if non_required_node_ids:
raise NotImplementedError('only required nodes implemented')
else:
urnids = np.unique(required_node_ids)
n_unique_node_ids = len(urnids)
n_node_ids = len(required_node_ids)
if n_unique_node_ids != n_node_ids:
msg = 'nunique_node_ids=%s nnode_ids=%s' % (n_unique_node_ids, n_node_ids)
raise RuntimeError(msg)
else:
raise NotImplementedError('only required nodes implemented')
def Pid(self) -> int:
"""
Gets the Property ID of an element
Returns
-------
pid : int
the Property ID
"""
if self.pid_ref is None:
return self.pid
return self.pid_ref.pid
def get_node_positions(self, nodes: Any=None) -> np.ndarray:
"""returns the positions of multiple node objects"""
if nodes is None:
nodes = self.nodes_ref
nnodes = len(nodes)
positions = np.empty((nnodes, 3), dtype='float64')
positions.fill(np.nan)
for i, node in enumerate(nodes):
if isinstance(node, int):
raise TypeError("node=%s; type=%s must be a Node\n%s" % (
str(node), type(node), self.get_stats()))
if node is not None:
positions[i, :] = node.get_position()
return positions
def get_node_positions_no_xref(self, model: BDF, nodes: List[Any]=None) -> np.ndarray:
"""returns the positions of multiple node objects"""
if not nodes:
nodes = self.nodes
nnodes = len(nodes)
positions = np.empty((nnodes, 3), dtype='float64')
positions.fill(np.nan)
for i, nid in enumerate(nodes):
if nid is not None:
node = model.Node(nid)
positions[i, :] = node.get_position_no_xref(model)
return positions
def _node_ids(self, nodes: Optional[List[Any]]=None,
allow_empty_nodes: bool=False, msg: str='') -> List[int]:
"""returns nodeIDs for repr functions"""
return _node_ids(self, nodes=nodes, allow_empty_nodes=allow_empty_nodes, msg=msg)
def prepare_node_ids(self, nids: List[int], allow_empty_nodes: bool=False) -> None:
"""Verifies all node IDs exist and that they're integers"""
#self.nodes = nids
nids = self.validate_node_ids(nids, allow_empty_nodes)
return nids
def validate_node_ids(self, nodes: List[int], allow_empty_nodes: bool=False) -> None:
if allow_empty_nodes:
# verify we have nodes
if len(nodes) == 0:
msg = '%s requires at least one node id be specified; node_ids=%s' % (
self.type, nodes)
raise ValueError(msg)
#unique_nodes = unique(nodes)
#if len(nodes) != len(unique_nodes):
#msg = '%s requires that all node ids be unique; node_ids=%s' % (self.type, nodes)
#raise IndexError(msg)
# remove 0 nodes
nodes2 = [nid if nid != 0 else None
for nid in nodes]
else:
nodes2 = nodes
#unique_nodes = unique(self.nodes)
#if len(self.nodes) != len(unique_nodes):
#msg = '%s requires that all node ids be unique; node_ids=%s' % (
#self.type, self.nodes)
#raise IndexError(msg)
#nodes3 = []
#for nid in nodes:
#if isinstance(nid, integer_types):
#nodes3.append(nid)
#elif nid is None and allow_empty_nodes or np.isnan(nid):
#nodes3.append(None)
#else: # string???
#msg = 'this element may have missing nodes...\n'
#msg += 'nids=%s allow_empty_nodes=False;\ntype(nid)=%s' % (nodes, type(nid))
#raise RuntimeError(msg)
#print('nodes', nodes)
#print('nodes2', nodes2)
#print('nodes3 =', nodes3)
#self.nodes = nodes2
return nodes2
def _format_comment(comment: str) -> str:
r"""Format a card comment to precede the card using
nastran-compatible comment character $. The comment
string can have multiple lines specified as linebreaks.
Empty comments or just spaces are returned as an empty string.
Examples
--------
>>> _format_comment('a comment\ntaking two lines')
$a comment
$taking two lines
>>> _format_comment('')
<empty string>
>>> _format_comment(' ')
<empty string>
>>> _format_comment('$ a comment within a comment looks weird')
'$$ a comment within a comment looks weird'
>>> _format_comment('no trailing whitespace ')
$no trailing extra whitespace
"""
if comment.strip() == '': # deals with a bunch of spaces
return ''
return ''.join(['${}\n'.format(comment_line)
for comment_line in comment.rstrip().split('\n')])
def _node_ids(card, nodes=None, allow_empty_nodes: bool=False, msg: str='') -> Any:
try:
if not nodes:
nodes = card.nodes
assert nodes is not None, card.__dict__
if allow_empty_nodes:
nodes2 = []
for node in nodes:
if node == 0 or node is None:
nodes2.append(None)
elif isinstance(node, integer_types):
nodes2.append(node)
else:
nodes2.append(node.nid)
assert nodes2 is not None, str(card)
return nodes2
try:
node_ids = []
for node in nodes:
if isinstance(node, integer_types):
node_ids.append(node)
else:
node_ids.append(node.nid)
#if isinstance(nodes[0], integer_types):
#node_ids = [node for node in nodes]
#else:
#node_ids = [node.nid for node in nodes]
except Exception:
print('type=%s nodes=%s allow_empty_nodes=%s\nmsg=%s' % (
card.type, nodes, allow_empty_nodes, msg))
raise
assert 0 not in node_ids, 'node_ids = %s' % node_ids
assert node_ids is not None, str(card)
return node_ids
except Exception:
print('type=%s nodes=%s allow_empty_nodes=%s\nmsg=%s' % (
card.type, nodes, allow_empty_nodes, msg))
raise
raise RuntimeError('huh...')
def break_word_by_trailing_integer(pname_fid: str) -> Tuple[str, str]:
"""
Splits a word that has a value that is an integer
Parameters
----------
pname_fid : str
the DVPRELx term (e.g., A(11), NSM(5))
Returns
-------
word : str
the value not in parentheses
value : int
the value in parentheses
Examples
--------
>>> break_word_by_trailing_integer('T11')
('T', '11')
>>> break_word_by_trailing_integer('THETA11')
('THETA', '11')
"""
nums = []
i = 0
for i, letter in enumerate(reversed(pname_fid)):
if letter.isdigit():
nums.append(letter)
else:
break
num = ''.join(nums[::-1])
if not num:
msg = ("pname_fid=%r does not follow the form 'T1', 'T11', 'THETA42' "
"(letters and a number)" % pname_fid)
raise SyntaxError(msg)
word = pname_fid[:-i]
assert len(word)+len(num) == len(pname_fid), 'word=%r num=%r pname_fid=%r' % (word, num, pname_fid)
return word, num
def break_word_by_trailing_parentheses_integer_ab(pname_fid: str) -> Tuple[str, str]:
"""
Splits a word that has a value that can be A/B as well as an integer
Parameters
----------
pname_fid : str
the DVPRELx term; A(11), NSM(5), NSM(B)
Returns
-------
word : str
the value not in parentheses
value : int/str
the value in parenthese
Examples
--------
>>> break_word_by_trailing_parentheses_integer('A(11)')
('A', '11')
>>> break_word_by_trailing_parentheses_integer('NSM(11)')
('NSM', '11')
>>> break_word_by_trailing_parentheses_integer('NSM(B)')
('NSM', 'B')
"""
assert pname_fid.endswith(')'), pname_fid
word, num = pname_fid[:-1].split('(')
if num not in ['A', 'B']:
num = int(num)
return word, num
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,623
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/gui/qt_files/load_actions.py
|
import os
import sys
import traceback
import time as time_module
from typing import Tuple, Optional, Any
import numpy as np
from qtpy.compat import getopenfilename
#from qtpy.QtWidgets import QFileDialog
from pyNastran.bdf.patran_utils.read_patran_custom_results import load_patran_nod
from pyNastran.utils import print_bad_path
from pyNastran.gui.utils.load_results import load_csv, load_deflection_csv
from pyNastran.gui.utils.load_results import create_res_obj
IS_TESTING = 'test' in sys.argv[0]
class LoadActions:
"""performance mode should be handled in the main gui to minimize flipping"""
def __init__(self, gui):
self.gui = gui
@property
def log(self):
"""links the the GUI's log"""
return self.gui.log
def on_load_geometry(self, infile_name=None, geometry_format=None, name='main',
plot=True, raise_error=False):
"""
Loads a baseline geometry
Parameters
----------
infile_name : str; default=None -> popup
path to the filename
geometry_format : str; default=None
the geometry format for programmatic loading
name : str; default='main'
the name of the actor; don't use this
plot : bool; default=True
Should the baseline geometry have results created and plotted/rendered?
If you're calling the on_load_results method immediately after, set it to False
raise_error : bool; default=True
stop the code if True
"""
assert isinstance(name, str), 'name=%r type=%s' % (name, type(name))
is_failed, out = self._load_geometry_filename(
geometry_format, infile_name)
print("is_failed =", is_failed)
if is_failed:
return
has_results = False
infile_name, load_function, filter_index, formats, geometry_format2 = out
if load_function is not None:
self.gui.last_dir = os.path.split(infile_name)[0]
if self.gui.name == '':
name = 'main'
else:
print('name = %r' % name)
if name != self.gui.name:
#scalar_range = self.grid_selected.GetScalarRange()
#self.grid_mapper.SetScalarRange(scalar_range)
self.gui.grid_mapper.ScalarVisibilityOff()
#self.grid_mapper.SetLookupTable(self.color_function)
self.gui.name = name
self.gui._reset_model(name)
# reset alt grids
names = self.gui.alt_grids.keys()
for name in names:
self.gui.alt_grids[name].Reset()
self.gui.alt_grids[name].Modified()
if not os.path.exists(infile_name) and geometry_format:
msg = 'input file=%r does not exist' % infile_name
self.gui.log_error(msg)
self.gui.log_error(print_bad_path(infile_name))
return
# clear out old data
if self.gui.model_type is not None:
clear_name = 'clear_' + self.gui.model_type
try:
dy_method = getattr(self, clear_name) # 'self.clear_nastran()'
dy_method()
except Exception:
self.gui.log_error("method %r does not exist" % clear_name)
self.gui.log_info("reading %s file %r" % (geometry_format, infile_name))
try:
time0 = time_module.time()
if geometry_format2 in self.gui.format_class_map:
# intialize the class
#print('geometry_format=%r geometry_format2=%s' % (geometry_format, geometry_format2))
# TODO: was geometry_format going into this...
cls = self.gui.format_class_map[geometry_format2](self.gui)
function_name2 = 'load_%s_geometry' % geometry_format2
load_function2 = getattr(cls, function_name2)
has_results = load_function2(infile_name, name=name, plot=plot)
else:
has_results = load_function(infile_name, name=name, plot=plot) # self.last_dir,
dt = time_module.time() - time0
print('dt_load = %.2f sec = %.2f min' % (dt, dt / 60.))
#else:
#name = load_function.__name__
#self.log_error(str(args))
#self.log_error("'plot' needs to be added to %r; "
#"args[-1]=%r" % (name, args[-1]))
#has_results = load_function(infile_name) # , self.last_dir
#form, cases = load_function(infile_name) # , self.last_dir
except Exception as error:
#raise
msg = traceback.format_exc()
self.gui.log_error(msg)
if raise_error or self.gui.dev:
raise
#return
#self.vtk_panel.Update()
self.gui.rend.ResetCamera()
# the model has been loaded, so we enable load_results
if filter_index >= 0:
self.gui.format = formats[filter_index].lower()
unused_enable = has_results
#self.load_results.Enable(enable)
else: # no file specified
return
#print("on_load_geometry(infile_name=%r, geometry_format=None)" % infile_name)
self.gui.infile_name = infile_name
self.gui.out_filename = None
#if self.out_filename is not None:
#msg = '%s - %s - %s' % (self.format, self.infile_name, self.out_filename)
if name == 'main':
msg = '%s - %s' % (self.gui.format, self.gui.infile_name)
self.gui.window_title = msg
self.gui.update_menu_bar()
main_str = ''
else:
main_str = ', name=%r' % name
self.gui.log_command("on_load_geometry(infile_name=%r, geometry_format=%r%s)" % (
infile_name, self.gui.format, main_str))
def _load_geometry_filename(self, geometry_format: str, infile_name: str) -> Tuple[bool, Any]:
"""gets the filename and format"""
wildcard = ''
is_failed = False
if geometry_format and geometry_format.lower() not in self.gui.supported_formats:
is_failed = True
msg = f'The import for the {geometry_format!r} module failed.\n'
self.gui.log_error(msg)
if IS_TESTING: # pragma: no cover
raise RuntimeError(msg)
return is_failed, None
if infile_name:
if geometry_format is None:
is_failed = True
msg = 'infile_name=%r and geometry_format=%r; both must be specified\n' % (
infile_name, geometry_format)
self.gui.log_error(msg)
return is_failed, None
geometry_format = geometry_format.lower()
for fmt in self.gui.fmts:
fmt_name, _major_name, _geom_wildcard, geom_func, res_wildcard, _resfunc = fmt
if geometry_format == fmt_name:
load_function = geom_func
if res_wildcard is None:
unused_has_results = False
else:
unused_has_results = True
break
else:
self.gui.log_error('---invalid format=%r' % geometry_format)
is_failed = True
return is_failed, None
formats = [geometry_format]
filter_index = 0
else:
# load a pyqt window
formats = []
load_functions = []
has_results_list = []
wildcard_list = []
# setup the selectable formats
for fmt in self.gui.fmts:
fmt_name, _major_name, geom_wildcard, geom_func, res_wildcard, _res_func = fmt
formats.append(_major_name)
wildcard_list.append(geom_wildcard)
load_functions.append(geom_func)
if res_wildcard is None:
has_results_list.append(False)
else:
has_results_list.append(True)
# the list of formats that will be selectable in some odd syntax
# that pyqt uses
wildcard = ';;'.join(wildcard_list)
# get the filter index and filename
if infile_name is not None and geometry_format is not None:
filter_index = formats.index(geometry_format)
else:
title = 'Choose a Geometry File to Load'
wildcard_index, infile_name = self.create_load_file_dialog(wildcard, title)
if not infile_name:
# user clicked cancel
is_failed = True
return is_failed, None
filter_index = wildcard_list.index(wildcard_index)
geometry_format = formats[filter_index]
load_function = load_functions[filter_index]
unused_has_results = has_results_list[filter_index]
return is_failed, (infile_name, load_function, filter_index, formats, geometry_format)
def on_load_results(self, out_filename=None):
"""
Loads a results file. Must have called on_load_geometry first.
Parameters
----------
out_filename : str / None
the path to the results file
"""
geometry_format = self.gui.format
if self.gui.format is None:
msg = 'on_load_results failed: You need to load a file first...'
self.gui.log_error(msg)
return
#raise RuntimeError(msg)
if out_filename in [None, False]:
title = 'Select a Results File for %s' % self.gui.format
wildcard = None
load_function = None
for fmt in self.gui.fmts:
fmt_name, _major_name, _geowild, _geofunc, _reswild, _resfunc = fmt
if geometry_format == fmt_name:
wildcard = _reswild
load_function = _resfunc
break
else:
msg = 'format=%r is not supported' % geometry_format
self.gui.log_error(msg)
raise RuntimeError(msg)
if wildcard is None:
msg = 'format=%r has no method to load results' % geometry_format
self.gui.log_error(msg)
return
out_filename = self.create_load_file_dialog(wildcard, title)[1]
else:
for fmt in self.gui.fmts:
fmt_name, _major_name, _geowild, _geofunc, _reswild, _resfunc = fmt
#print('fmt_name=%r geometry_format=%r' % (fmt_name, geometry_format))
if fmt_name == geometry_format:
load_function = _resfunc
break
else:
msg = ('format=%r is not supported. '
'Did you load a geometry model?' % geometry_format)
self.gui.log_error(msg)
raise RuntimeError(msg)
if out_filename == '':
return
if isinstance(out_filename, str):
out_filename = [out_filename]
for out_filenamei in out_filename:
if not os.path.exists(out_filenamei):
msg = 'result file=%r does not exist' % out_filenamei
self.gui.log_error(msg)
return
#raise IOError(msg)
self.gui.last_dir = os.path.split(out_filenamei)[0]
try:
load_function(out_filenamei)
except Exception: # as e
msg = traceback.format_exc()
self.gui.log_error(msg)
print(msg)
return
#raise
self.gui.out_filename = out_filenamei
msg = '%s - %s - %s' % (self.gui.format, self.gui.infile_name, out_filenamei)
self.gui.window_title = msg
print("on_load_results(%r)" % out_filenamei)
self.gui.out_filename = out_filenamei
self.gui.log_command("on_load_results(%r)" % out_filenamei)
def on_load_custom_results(self, out_filename=None, restype=None, stop_on_failure=False):
"""will be a more generalized results reader"""
is_failed, out_filename, iwildcard = self._on_load_custom_results_load_filename(
out_filename=out_filename, restype=restype)
if is_failed:
if stop_on_failure: # pragma: no cover
raise RuntimeError('failed getting filename')
return is_failed
if out_filename == '':
is_failed = True
return is_failed
is_failed = True
if not os.path.exists(out_filename):
msg = 'result file=%r does not exist' % out_filename
self.gui.log_error(msg)
if stop_on_failure: # pragma: no cover
raise RuntimeError(msg)
return is_failed
try:
if iwildcard == 0:
self._on_load_nodal_elemental_results('Nodal', out_filename, stop_on_failure=stop_on_failure)
restype = 'Node'
elif iwildcard == 1:
self._on_load_nodal_elemental_results('Elemental', out_filename, stop_on_failure=stop_on_failure)
restype = 'Element'
elif iwildcard == 2:
self._load_deflection(out_filename)
restype = 'Deflection'
elif iwildcard == 3:
self._load_force(out_filename)
restype = 'Force'
elif iwildcard == 4:
self.load_patran_nod(out_filename)
restype = 'Patran_nod'
else:
raise NotImplementedError('iwildcard = %s' % iwildcard)
except Exception:
msg = traceback.format_exc()
self.gui.log_error(msg)
if stop_on_failure: # pragma: no cover
raise RuntimeError(msg)
return is_failed
self.gui.log_command("on_load_custom_results(%r, restype=%r)" % (out_filename, restype))
is_failed = False
return is_failed
def _on_load_custom_results_load_filename(self, out_filename=None, restype=None):
is_failed = True
#unused_geometry_format = self.format
if self.gui.format is None:
msg = 'on_load_results failed: You need to load a file first...'
self.gui.log_error(msg)
return is_failed, None, None
if out_filename in [None, False]:
title = 'Select a Custom Results File for %s' % (self.gui.format)
#print('wildcard_level =', wildcard_level)
#self.wildcard_delimited = 'Delimited Text (*.txt; *.dat; *.csv)'
fmts = [
'Node - Delimited Text (*.txt; *.dat; *.csv)',
'Element - Delimited Text (*.txt; *.dat; *.csv)',
'Nodal Deflection - Delimited Text (*.txt; *.dat; *.csv)',
'Nodal Force - Delimited Text (*.txt; *.dat; *.csv)',
'Patran nod (*.nod)',
]
fmt = ';;'.join(fmts)
wildcard_level, out_filename = self.create_load_file_dialog(fmt, title)
if not out_filename:
return is_failed, None, None # user clicked cancel
iwildcard = fmts.index(wildcard_level)
else:
fmts = [
'node', 'element', 'deflection', 'force', 'patran_nod',
]
iwildcard = fmts.index(restype.lower())
is_failed = False
return is_failed, out_filename, iwildcard
def _load_deflection(self, out_filename):
"""loads a deflection file"""
self._load_deflection_force(out_filename, is_deflection=True, is_force=False)
def _load_force(self, out_filename):
"""loads a force file"""
self._load_deflection_force(out_filename, is_deflection=False, is_force=True)
def _load_deflection_force(self, out_filename, is_deflection=False, is_force=False):
out_filename_short = os.path.basename(out_filename)
A, nids_index, fmt_dict, headers = load_deflection_csv(out_filename)
#nrows, ncols, fmts
header0 = headers[0]
result0 = A[header0]
nrows = result0.shape[0]
nnodes = self.gui.nnodes
if nrows != nnodes:
#'nrows=%s nnodes=%s' % (nrows, self.gui.nnodes)
self.log.warning('The deflection CSV has %i rows, but there are %i nodes in the model.'
" Verify that the result is for the correct model and that it's "
'not an elemental result.' % (nrows, nnodes))
A = _resize_array(A, nids_index, self.gui.node_ids, nrows, nnodes)
result_type = 'node'
self._add_cases_to_form(A, fmt_dict, headers, result_type,
out_filename_short, update=True, is_scalar=False,
is_deflection=is_deflection, is_force=is_force)
def _on_load_nodal_elemental_results(self, result_type, out_filename=None, stop_on_failure=False):
"""
Loads a CSV/TXT results file. Must have called on_load_geometry first.
Parameters
----------
result_type : str
'Nodal', 'Elemental'
out_filename : str / None
the path to the results file
"""
try:
self._load_csv(result_type, out_filename, stop_on_failure=stop_on_failure)
except Exception:
msg = traceback.format_exc()
self.gui.log_error(msg)
if stop_on_failure: # pragma: no cover
raise
#return
raise
#if 0:
#self.out_filename = out_filename
#msg = '%s - %s - %s' % (self.format, self.infile_name, out_filename)
#self.window_title = msg
#self.out_filename = out_filename
def load_patran_nod(self, nod_filename):
"""reads a Patran formatted *.nod file"""
A, fmt_dict, headers = load_patran_nod(nod_filename, self.gui.node_ids)
out_filename_short = os.path.relpath(nod_filename)
result_type = 'node'
self._add_cases_to_form(A, fmt_dict, headers, result_type,
out_filename_short, update=True,
is_scalar=True)
def _load_csv(self, result_type, out_filename, stop_on_failure=False):
"""
common method between:
- on_add_nodal_results(filename)
- on_add_elemental_results(filename)
Parameters
----------
result_type : str
???
out_filename : str
the CSV filename to load
"""
out_filename_short = os.path.relpath(out_filename)
A, fmt_dict, headers = load_csv(out_filename)
#nrows, ncols, fmts
header0 = headers[0]
result0 = A[header0]
nrows = result0.size
if result_type == 'Nodal':
nnodes = self.gui.nnodes
if nrows != nnodes:
self.log.warning('The fringe CSV has %i rows, but there are %i nodes in the '
'model. Verify that the result is for the correct model and '
"that it's not an elemental result." % (nrows, nnodes))
A = _resize_array(A, A['index'], self.gui.node_ids, nrows, nnodes)
result_type2 = 'node'
elif result_type == 'Elemental':
nelements = self.gui.nelements
if nrows != nelements:
self.log.warning('The fringe CSV has %i rows, but there are %i elements in the '
'model. Verify that the result is for the correct model and '
"that it's not a nodal result." % (nrows, nelements))
A = _resize_array(A, A['index'], self.gui.element_ids, nrows, nelements)
result_type2 = 'centroid'
else:
raise NotImplementedError('result_type=%r' % result_type)
#num_ids = len(ids)
#if num_ids != nrows:
#A2 = {}
#for key, matrix in A.items():
#fmt = fmt_dict[key]
#assert fmt not in ['%i'], 'fmt=%r' % fmt
#if len(matrix.shape) == 1:
#matrix2 = np.full(num_ids, dtype=matrix.dtype)
#iids = np.searchsorted(ids, )
#A = A2
self._add_cases_to_form(A, fmt_dict, headers, result_type2,
out_filename_short, update=True, is_scalar=True)
def _add_cases_to_form(self, A, fmt_dict, headers, result_type,
out_filename_short, update=True, is_scalar=True,
is_deflection=False, is_force=False):
"""
common method between:
- _load_csv
- _load_deflection_csv
Parameters
----------
A : dict[key] = (n, m) array
the numpy arrays
key : str
the name
n : int
number of nodes/elements
m : int
secondary dimension
N/A : 1D array
3 : deflection
fmt_dict : dict[header] = fmt
the format of the arrays
header : str
the name
fmt : str
'%i', '%f'
headers : List[str]???
the titles???
result_type : str
'node', 'centroid'
out_filename_short : str
the display name
update : bool; default=True
update the res_widget
# A = np.loadtxt('loadtxt_spike.txt', dtype=('float,int'))
# dtype=[('f0', '<f8'), ('f1', '<i4')])
# A['f0']
# A['f1']
"""
#print('A =', A)
formi = []
form = self.gui.get_form()
icase = len(self.gui.case_keys)
islot = 0
for case_key in self.gui.case_keys:
if isinstance(case_key, tuple):
islot = case_key[0]
break
#assert len(headers) > 0, 'headers=%s' % (headers)
#assert len(headers) < 50, 'headers=%s' % (headers)
for header in headers:
if is_scalar:
out = create_res_obj(islot, headers, header, A, fmt_dict, result_type,
colormap='jet')
else:
out = create_res_obj(islot, headers, header, A, fmt_dict, result_type,
is_deflection=is_deflection, is_force=is_force,
dim_max=self.gui.settings.dim_max, xyz_cid0=self.gui.xyz_cid0,
colormap='jet')
res_obj, title = out
#cases[icase] = (stress_res, (subcase_id, 'Stress - isElementOn'))
#form_dict[(key, itime)].append(('Stress - IsElementOn', icase, []))
#key = (res_obj, (0, title))
self.gui.case_keys.append(icase)
self.gui.result_cases[icase] = (res_obj, (islot, title))
formi.append((header, icase, []))
# TODO: double check this should be a string instead of an int
self.gui.label_actors[icase] = []
self.gui.label_ids[icase] = set()
icase += 1
form.append((out_filename_short, None, formi))
self.gui.ncases += len(headers)
#cases[(ID, 2, 'Region', 1, 'centroid', '%i')] = regions
if update:
self.gui.res_widget.update_results(form, 'main')
def create_load_file_dialog(self, qt_wildcard: str, title: str,
default_filename: Optional[str]=None) -> Tuple[str, str]:
#options = QFileDialog.Options()
#options |= QFileDialog.DontUseNativeDialog
#fname, flt = QFileDialog.getOpenFileName(
#self, title, default_filename, file_types, options=options)
#flt = str(filt).strip()
#return fname, flt
if default_filename is None:
default_filename = self.gui.last_dir
fname, wildcard_level = getopenfilename(
parent=self.gui, caption=title,
basedir=default_filename, filters=qt_wildcard,
selectedfilter='', options=None)
return wildcard_level, fname
#def create_load_file_dialog2(self, qt_wildcard, title):
## getOpenFileName return QString and we want Python string
##title = 'Load a Tecplot Geometry/Results File'
#last_dir = ''
##qt_wildcard = ['Tecplot Hex Binary (*.tec; *.dat)']
#dialog = MultiFileDialog()
#dialog.setWindowTitle(title)
#dialog.setDirectory(self.last_dir)
#dialog.setFilters(qt_wildcard.split(';;'))
#if dialog.exec_() == QtGui.QDialog.Accepted:
#outfiles = dialog.selectedFiles()
#wildcard_level = dialog.selectedFilter()
#return str(wildcard_level), str(fname)
#return None, None
def _resize_array(A, nids_index, node_ids, nrows, nnodes):
"""
Resizes an array to be the right size.
Let's say we have 5 nodes in the output csv that aren't in the model.
We need to filter them out. Alternatively, we may have extra nodes.
We need to get the array to have results at all the node ids.
Parameters
----------
A : np.ndarray
the dictionary-like results array
node_ids : int np.ndarray
the node ids in the model
nrows : int
the number of rows in the csv; same length as nids_index
nnodes : int
the number of nodes in the real model; same length as node_ids
Returns
-------
A2 : np.ndarray
the properly sized dictionary-like results array
"""
# we might have extra nodes or missing nodes, so
# find the list of valid indices
inids = np.searchsorted(node_ids, nids_index)
iexist = np.where(inids < nnodes)[0]
A2 = {}
for key, Ai in A.items():
#print('Ai.shape', Ai.shape, len(iexist))
if key == 'index':
A2[key] = node_ids
continue
if len(Ai.shape) == 1:
if isinstance(Ai[0], (np.float32, np.float64)):
new_array = np.full((nnodes, ), np.nan, dtype=Ai.dtype)
elif isinstance(Ai[0], (np.int32, np.int64)):
new_array = np.full((nnodes, ), -1, dtype=Ai.dtype)
else:
raise NotImplementedError(Ai[0].dtype)
#print('iexist', iexist.shape, Ai.shape)
new_array[iexist] = Ai[iexist]
A2[key] = new_array
elif len(Ai.shape) == 2:
ncols = Ai.shape[1]
if isinstance(Ai[0, 0], (np.float32, np.float64)):
new_array = np.full((nnodes, ncols), np.nan, dtype=Ai.dtype)
elif isinstance(Ai[0, 0], (np.int32, np.int64)):
new_array = np.full((nnodes, ncols), -1, dtype=Ai.dtype)
else:
raise NotImplementedError(Ai[0].dtype)
#print('iexist', iexist.shape, Ai.shape)
new_array[iexist] = Ai[iexist]
A2[key] = new_array
else:
raise NotImplementedError(Ai.shape)
#A2[key] = Ai[iexist]
#print('A2[%s].shape = %s' % (key, A2[key].shape))
#print()
return A2
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,624
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/dev/bdf_vectorized/bdf.py
|
# coding: utf-8
# pylint: disable=W0201,R0915,R0912
"""
Main BDF class. Defines:
- BDF
see https://docs.plm.automation.siemens.com/tdoc/nxnastran/10/help/#uid:index
"""
import sys
import traceback
from pickle import load, dump
from collections import defaultdict
from typing import List, Union, Optional
import numpy as np
from cpylog import SimpleLogger, get_logger2, __version__ as CPYLOG_VERSION
from pyNastran.utils import object_attributes, check_path # _filename
from pyNastran.bdf.bdf_interface.utils import (
to_fields, _parse_pynastran_header, parse_executive_control_deck)
from pyNastran.bdf.utils import parse_patran_syntax
from pyNastran.bdf.field_writer_8 import print_card_8
from pyNastran.bdf.field_writer_16 import print_card_16
from pyNastran.bdf.cards.base_card import _format_comment
from pyNastran.bdf.cards.utils import wipe_empty_fields
#from pyNastran.bdf.write_path import write_include
from pyNastran.bdf.bdf_interface.assign_type import (
integer, integer_or_string, string)
#from pyNastran.bdf.errors import CrossReferenceError, DuplicateIDsError, CardParseSyntaxError
#from pyNastran.bdf.field_writer_16 import print_field_16
from pyNastran.bdf.case_control_deck import CaseControlDeck
from pyNastran.bdf.bdf_interface.utils import fill_dmigs
from pyNastran.bdf.bdf_interface.bdf_card import BDFCard
from pyNastran.dev.bdf_vectorized.bdf_interface2.write_mesh import WriteMesh
from pyNastran.dev.bdf_vectorized.bdf_interface2.get_card import GetMethods
from pyNastran.dev.bdf_vectorized.bdf_interface2.cross_reference import CrossReference
from pyNastran.dev.bdf_vectorized.bdf_interface2.add_card import AddCard
from pyNastran.bdf.field_writer_16 import print_field_16
from pyNastran.dev.bdf_vectorized.cards.constraints.spc import SPC, get_spc_constraint
from pyNastran.dev.bdf_vectorized.cards.constraints.spcd import SPCD
from pyNastran.dev.bdf_vectorized.cards.constraints.spc1 import SPC1, get_spc1_constraint
from pyNastran.dev.bdf_vectorized.cards.constraints.spcadd import SPCADD, get_spcadd_constraint
from pyNastran.dev.bdf_vectorized.cards.constraints.mpc import MPC, get_mpc_constraint
#from pyNastran.dev.bdf_vectorized.cards.constraints.mpcax import MPCAX
from pyNastran.dev.bdf_vectorized.cards.constraints.mpcadd import MPCADD
from pyNastran.dev.bdf_vectorized.cards.deqatn import DEQATN
from pyNastran.dev.bdf_vectorized.cards.dynamic import (
#DELAY, DPHASE, FREQ, FREQ1, FREQ2, FREQ4,
TSTEP, TSTEPNL, NLPARM, NLPCI, #TF
)
from pyNastran.dev.bdf_vectorized.cards.aero.aero_cards import (
AECOMP, AEFACT, AELINK, AELIST, AEPARM, AESTAT,
AESURF, AESURFS, AERO, AEROS, CSSCHD,
CAERO1, CAERO2, CAERO3, CAERO4, CAERO5,
PAERO1, PAERO2, PAERO3, PAERO4, PAERO5,
MONPNT1, MONPNT2, MONPNT3,
FLFACT, FLUTTER, GUST, MKAERO1,
MKAERO2, SPLINE1, SPLINE2, SPLINE3, SPLINE4,
SPLINE5, TRIM, DIVERG)
from pyNastran.dev.bdf_vectorized.cards.optimization import (
DCONADD, DCONSTR, DESVAR, DDVAL, DOPTPRM, DLINK,
DRESP1, DRESP2, DRESP3,
DVCREL1, DVCREL2,
DVMREL1, DVMREL2,
DVPREL1, DVPREL2,
DVGRID)
from pyNastran.dev.bdf_vectorized.cards.bdf_sets import (
ASET, BSET, CSET, QSET, USET,
ASET1, BSET1, CSET1, QSET1, USET1,
SET1, SET3, #RADSET,
SEBSET, SECSET, SEQSET, # SEUSET
SEBSET1, SECSET1, SEQSET1, # SEUSET1
SESET, #SEQSEP,
)
# old cards
from pyNastran.bdf.cards.params import PARAM
from pyNastran.bdf.cards.elements.rigid import RBAR, RBAR1, RBE1, RBE2, RBE3, RROD, RSPLINE
from pyNastran.bdf.cards.contact import BCRPARA, BCTADD, BCTSET, BSURF, BSURFS, BCTPARA
from pyNastran.bdf.cards.elements.elements import PLOTEL #CFAST, CGAP, CRAC2D, CRAC3D,
from pyNastran.bdf.cards.methods import EIGB, EIGC, EIGR, EIGP, EIGRL
from pyNastran.bdf.cards.dmig import DMIG, DMI, DMIJ, DMIK, DMIJI, DMIG_UACCEL
#from pyNastran.bdf.cards.loads.loads import (
#DAREA, #LSEQ, SLOAD, DAREA, RANDPS, RFORCE, RFORCE1, SPCD, LOADCYN
#)
from pyNastran.bdf.errors import DuplicateIDsError, CrossReferenceError, CardParseSyntaxError
#from pyNastran.bdf.errors import (CrossReferenceError, DuplicateIDsError,
#CardParseSyntaxError, UnsupportedCard, DisabledCardError,
#SuperelementFlagError, ReplicationError)
from pyNastran.bdf.bdf_interface.pybdf import (
BDFInputPy, _show_bad_file)
def read_bdf(bdf_filename=None, validate=True, xref=True, punch=False,
encoding=None, log=None, debug=True, mode='msc'):
"""
Creates the BDF object
Parameters
----------
bdf_filename : str (default=None -> popup)
the bdf filename
debug : bool/None
used to set the logger if no logger is passed in
True: logs debug/info/error messages
False: logs info/error messages
None: logs error messages
log : logging module object / None
if log is set, debug is ignored and uses the
settings the logging object has
validate : bool
runs various checks on the BDF (default=True)
xref : bool
should the bdf be cross referenced (default=True)
punch : bool
indicates whether the file is a punch file (default=False)
encoding : str
the unicode encoding (default=None; system default)
Returns
-------
model : BDF()
an BDF object
.. code-block:: python
>>> bdf = BDF()
>>> bdf.read_bdf(bdf_filename, xref=True)
>>> g1 = bdf.Node(1)
>>> print(g1.get_position())
[10.0, 12.0, 42.0]
>>> bdf.write_card(bdf_filename2)
>>> print(bdf.card_stats())
---BDF Statistics---
SOL 101
bdf.nodes = 20
bdf.elements = 10
etc.
.. note :: this method will change in order to return an object that
does not have so many methods
.. todo:: finish this
"""
model = BDF(log=log, debug=debug, mode=mode)
model.read_bdf(bdf_filename=bdf_filename, validate=validate,
xref=xref, punch=punch, read_includes=True, encoding=encoding)
#if 0:
#keys_to_suppress = []
#method_names = model.object_methods(keys_to_skip=keys_to_suppress)
#methods_to_remove = [
#'_process_card', 'read_bdf', 'disable_cards', 'set_dynamic_syntax',
#'create_card_object', 'create_card_object_fields', 'create_card_object_list',
#'add_AECOMP', 'add_AEFACT', 'add_AELINK', 'add_AELIST', 'add_AEPARM', 'add_AERO',
#'add_AEROS', 'add_AESTAT', 'add_AESURF', 'add_ASET', 'add_BCRPARA', 'add_BCTADD',
#'add_BCTPARA', 'add_BCTSET', 'add_BSET', 'add_BSURF', 'add_BSURFS', 'add_CAERO',
#'add_DIVERG',
#'add_CSET', 'add_CSSCHD', 'add_DAREA', 'add_DCONADD', 'add_DCONSTR', 'add_DDVAL',
#'add_DELAY', 'add_DEQATN', 'add_DESVAR', 'add_DLINK', 'add_DMI', 'add_DMIG',
#'add_DMIJ', 'add_DMIJI', 'add_DMIK', 'add_DPHASE', 'add_DRESP', 'add_DTABLE',
#'add_DVMREL', 'add_DVPREL', 'add_EPOINT', 'add_FLFACT', 'add_FLUTTER', 'add_FREQ',
#'add_GUST', 'add_LSEQ', 'add_MKAERO', 'add_MONPNT', 'add_NLPARM', 'add_NLPCI',
#'add_PAERO', 'add_PARAM', 'add_PBUSHT', 'add_PDAMPT', 'add_PELAST', 'add_PHBDY',
#'add_QSET', 'add_SEBSET', 'add_SECSET', 'add_SEQSET', 'add_SESET', 'add_SET',
#'add_SEUSET', 'add_SPLINE', 'add_spoint', 'add_tempd', 'add_TF', 'add_TRIM',
#'add_TSTEP', 'add_TSTEPNL', 'add_USET',
#'add_card', 'add_card_fields', 'add_cmethod', 'add_constraint',
#'add_constraint_MPC', 'add_constraint_MPCADD',
#'add_constraint_SPC', 'add_constraint_SPCADD',
#'add_convection_property', 'add_coord', 'add_creep_material', 'add_damper',
#'add_dload', '_add_dload_entry', 'add_element', 'add_hyperelastic_material',
#'add_load', 'add_mass', 'add_material_dependence', 'add_method', 'add_node',
#'add_plotel', 'add_property', 'add_property_mass', 'add_random_table',
#'add_rigid_element', 'add_structural_material', 'add_suport', 'add_suport1',
#'add_table', 'add_table_sdamping', 'add_thermal_BC', 'add_thermal_element',
#'add_thermal_load', 'add_thermal_material',
#'set_as_msc',
#'set_as_nx',
#'pop_parse_errors',
##'pop_xref_errors',
#'set_error_storage',
#'is_reject',
#]
#for method_name in method_names:
#if method_name not in methods_to_remove + keys_to_suppress:
##print(method_name)
#pass
#else:
### TODO: doesn't work...
##delattr(model, method_name)
#pass
#model.get_bdf_stats()
return model
class BDF(AddCard, CrossReference, WriteMesh, GetMethods):
"""
NASTRAN BDF Reader/Writer/Editor class.
"""
#: required for sphinx bug
#: http://stackoverflow.com/questions/11208997/autoclass-and-instance-attributes
#__slots__ = ['_is_dynamic_syntax']
def __init__(self, debug: Union[str, bool, None],
log: Optional[SimpleLogger]=None, mode: str='msc'):
"""
Initializes the BDF object
Parameters
----------
debug : bool/None
used to set the logger if no logger is passed in
True: logs debug/info/error messages
False: logs info/error messages
None: logs error messages
log : logging module object / None
if log is set, debug is ignored and uses the
settings the logging object has
"""
AddCard.__init__(self)
CrossReference.__init__(self)
WriteMesh.__init__(self)
GetMethods.__init__(self)
assert debug in [True, False, None], 'debug=%r' % debug
self.echo = False
self.read_includes = True
# file management parameters
self.active_filenames = []
self.active_filename = None
self.include_dir = ''
self.dumplines = False
# this flag will be flipped to True someday (and then removed), but
# doesn't support 100% of cards yet. It enables a new method for card
# parsing.
#
# 80.3 seconds -> 67.2 seconds for full_bay model
# (multiple BDF passes among other things)
self._fast_add = True
log_args = {} if CPYLOG_VERSION <= '1.5.0' else {'nlevels': 2}
self.log = get_logger2(log=log, debug=debug, **log_args)
#: list of all read in cards - useful in determining if entire BDF
#: was read & really useful in debugging
self.card_count = {}
#: stores the card_count of cards that have been rejected
self.reject_count = {}
#: was an ENDDATA card found
#self.foundEndData = False
#: useful in debugging errors in input
self.debug = debug
#: flag that allows for OpenMDAO-style optimization syntax to be used
self._is_dynamic_syntax = False
#: lines that were rejected b/c they were for a card that isnt supported
self.reject_lines = []
#: cards that were created, but not processed
self.reject_cards = []
# self.__init_attributes()
#: the list of possible cards that will be parsed
self.cards_to_read = set([
'GRID', 'SPOINT', 'EPOINT', 'POINT', 'POINTAX',
'PARAM', ## params
# coords
'CORD1R', 'CORD1C', 'CORD1S',
'CORD2R', 'CORD2C', 'CORD2S',
'PELAS', 'CELAS1', 'CELAS2', 'CELAS3', 'CELAS4',
'CROD', 'PROD', 'CONROD',
'CTUBE', 'PTUBE',
'PBAR', 'PBARL', 'CBAR',
'CBEAM',
'PSHEAR', 'CSHEAR',
'CQUAD4', 'CTRIA3', 'CQUAD8', 'CTRIA6',
'PSHELL', 'PCOMP', 'PCOMPG',
'PSOLID', 'PLSOLID',
'CTETRA', 'CTETRA4', 'CTETRA10',
'CPYRAM', 'CPYRAM5', 'CPYRAM13',
'CPENTA', 'CPENTA6', 'CPENTA15',
'CHEXA', 'CHEXA8', 'CHEXA20',
'CBUSH', 'CBUSH1D', 'CBUSH2D',
#'PBUSH', 'PBUSH1D', 'PBUSH2D',
'CONM1', 'CONM2',
'PLOTEL',
'RBAR', 'RBAR1', 'RBE1', 'RBE2', 'RBE3', 'RROD', 'RSPLINE',
'MAT1', 'MAT8',
# loads
'LOAD', 'GRAV',
'FORCE', 'FORCE1', 'FORCE2',
'MOMENT', 'MOMENT1', 'MOMENT2',
'PLOAD', 'PLOAD2', 'PLOAD4', 'PLOADX1',
'TLOAD1', 'TLOAD2', 'DELAY',
'RLOAD1', 'DPHASE', #'RLOAD2',
# constraints
'SPC', 'SPCADD', 'SPC1', 'SPCD',
'MPC', 'MPCADD',
# aero cards
'AERO', ## aero
'AEROS', ## aeros
'GUST', ## gusts
'FLUTTER', ## flutters
'FLFACT', ## flfacts
'MKAERO1', 'MKAERO2', ## mkaeros
'AECOMP', ## aecomps
'AEFACT', ## aefacts
'AELINK', ## aelinks
'AELIST', ## aelists
'AEPARM', ## aeparams
'AESTAT', ## aestats
'AESURF', ## aesurf
#'AESURFS', ## aesurfs
'CAERO1', 'CAERO2', 'CAERO3', 'CAERO4', ## caeros
# 'CAERO5',
'PAERO1', 'PAERO2', 'PAERO3', ## paeros
'PAERO4', # 'PAERO5',
'MONPNT1', ## monitor_points
'SPLINE1', 'SPLINE2', 'SPLINE4', 'SPLINE5', ## splines
#'SPLINE3', 'SPLINE6', 'SPLINE7',
'TRIM', ## trims
'CSSCHD', ## csschds
'DIVERG', ## divergs
# ---- dynamic cards ---- #
'DAREA', ## dareas
'DPHASE', ## dphases
'DELAY', ## delays
'NLPARM', ## nlparms
'ROTORG', 'ROTORD', ## rotors
'NLPCI', ## nlpcis
'TSTEP', ## tsteps
'TSTEPNL', 'TSTEP1', ## tstepnls
# direct matrix input cards
'DMIG', 'DMIJ', 'DMIJI', 'DMIK', 'DMI',
# optimization cards
'DEQATN', 'DTABLE',
'DCONSTR', 'DESVAR', 'DDVAL', 'DRESP1', 'DRESP2', 'DRESP3',
'DVCREL1', 'DVCREL2',
'DVPREL1', 'DVPREL2',
'DVMREL1', 'DVMREL2',
'DOPTPRM', 'DLINK', 'DCONADD', 'DVGRID',
'SET1', 'SET3', ## sets
'ASET', 'ASET1', ## asets
'BSET', 'BSET1', ## bsets
'CSET', 'CSET1', ## csets
'QSET', 'QSET1', ## qsets
'USET', 'USET1', ## usets
## suport/suport1/se_suport
'SUPORT', 'SUPORT1', 'SESUP',
#: methods
'EIGB', 'EIGR', 'EIGRL',
#: cMethods
'EIGC', 'EIGP',
# other
'INCLUDE', # '='
'ENDDATA',
])
case_control_cards = {'FREQ', 'GUST', 'MPC', 'SPC', 'NLPARM', 'NSM',
'TEMP', 'TSTEPNL', 'INCLUDE'}
self._unique_bulk_data_cards = self.cards_to_read.difference(case_control_cards)
#: / is the delete from restart card
self.special_cards = ['DEQATN', '/']
self._make_card_parser()
if self.is_msc:
self.set_as_msc()
elif self.is_nx:
self.set_as_nx()
#elif self.is_optistruct:
#self.set_as_optistruct()
#elif self.is_radioss:
#self.set_as_radioss()
else:
msg = 'mode=%r is not supported; modes=[msc, nx]' % self._nastran_format
raise NotImplementedError(msg)
def __getstate__(self):
"""clears out a few variables in order to pickle the object"""
# Copy the object's state from self.__dict__ which contains
# all our instance attributes. Always use the dict.copy()
# method to avoid modifying the original state.
state = self.__dict__.copy()
# Remove the unpicklable entries.
#del state['spcObject'], state['mpcObject'],
del state['_card_parser'], state['_card_parser_b'], state['log']
return state
def save(self, obj_filename='model.obj', unxref=True):
"""
..warning:: doesn't work right
"""
#del self.log
#del self.spcObject
#del self.mpcObject
#del self._card_parser, self._card_parser_prepare
#try:
#del self.log
#except AttributeError:
#pass
#self.case_control_lines = str(self.case_control_deck).split('\n')
#del self.case_control_deck
if unxref:
self.uncross_reference()
with open(obj_filename, 'w') as obj_file:
dump(self, obj_file)
def load(self, obj_filename='model.obj'):
"""
..warning:: doesn't work right
"""
return
#del self.log
#del self.spcObject
#del self.mpcObject
#lines = print(self.case_control_deck)
#self.case_control_lines = lines.split('\n')
#del self.case_control_deck
#self.uncross_reference()
#import types
with open(obj_filename, "r") as obj_file:
obj = load(obj_file)
keys_to_skip = [
'case_control_deck',
'log', #'mpcObject', 'spcObject',
'node_ids', 'coord_ids', 'element_ids', 'property_ids',
'material_ids', 'caero_ids', 'is_long_ids',
'nnodes', 'ncoords', 'nelements', 'nproperties',
'nmaterials', 'ncaeros',
'point_ids', 'subcases',
'_card_parser', '_card_parser_b',
]
for key in object_attributes(self, mode="all", keys_to_skip=keys_to_skip):
if key.startswith('__') and key.endswith('__'):
continue
#print('key =', key)
val = getattr(obj, key)
#print(key)
#if isinstance(val, types.FunctionType):
#continue
setattr(self, key, val)
self.case_control_deck = CaseControlDeck(self.case_control_lines, log=self.log)
self.log.debug('done loading!')
def replace_cards(self, replace_model):
"""
Replaces the common cards from the current (self) model from the
ones in the new replace_model. The intention is that you're
going to replace things like PSHELLs and DESVARs from a pch file
in order to update your BDF with the optimized geometry.
.. todo:: only does a subset of cards.
Notes
-----
loads/spcs (not supported) are tricky because you
can't replace cards one-to-one...not sure what to do.
"""
for nid, node in replace_model.nodes.items():
self.nodes[nid] = node
for eid, elem in replace_model.elements.items():
self.elements[eid] = elem
for eid, elem in replace_model.rigid_elements.items():
self.rigid_elements[eid] = elem
for pid, prop in replace_model.properties.items():
self.properties[pid] = prop
for mid, mat in replace_model.materials.items():
self.materials[mid] = mat
for dvid, desvar in replace_model.desvars.items():
self.desvars[dvid] = desvar
for dvid, dvprel in replace_model.dvprels.items():
self.dvprels[dvid] = dvprel
for dvid, dvmrel in replace_model.dvmrels.items():
self.dvmrels[dvid] = dvmrel
for dvid, dvgrid in replace_model.dvgrids.items():
self.dvgrids[dvid] = dvgrid
def disable_cards(self, cards):
"""
Method for removing broken cards from the reader
Parameters
----------
cards : List[str]; Set[str]
a list/set of cards that should not be read
.. python ::
bdfModel.disable_cards(['DMIG', 'PCOMP'])
"""
if cards is None:
return
elif isinstance(cards, str):
disable_set = set([cards])
else:
disable_set = set(cards)
self.cards_to_read = self.cards_to_read.difference(disable_set)
def set_error_storage(self, nparse_errors=100, stop_on_parsing_error=True,
nxref_errors=100, stop_on_xref_error=True):
"""
Catch parsing errors and store them up to print them out all at once
(not all errors are caught).
Parameters
----------
nparse_errors : int
how many parse errors should be stored
(default=0; all=None; no storage=0)
stop_on_parsing_error : bool
should an error be raised if there
are parsing errors (default=True)
nxref_errors : int
how many cross-reference errors
should be stored (default=0; all=None; no storage=0)
stop_on_xref_error : bool
should an error be raised if there
are cross-reference errors (default=True)
"""
assert isinstance(nparse_errors, int), type(nparse_errors)
assert isinstance(nxref_errors, int), type(nxref_errors)
self._nparse_errors = nparse_errors
self._nxref_errors = nxref_errors
self._stop_on_parsing_error = stop_on_parsing_error
self._stop_on_xref_error = stop_on_xref_error
def validate(self):
"""runs some checks on the input data beyond just type checking"""
return
for nid, node in sorted(self.nodes.items()):
node.validate()
for cid, coord in sorted(self.coords.items()):
coord.validate()
for eid, elem in sorted(self.elements.items()):
elem.validate()
for pid, prop in sorted(self.properties.items()):
prop.validate()
for eid, elem in sorted(self.rigid_elements.items()):
elem.validate()
for eid, plotel in sorted(self.plotels.items()):
plotel.validate()
#for eid, mass in sorted(self.masses.items()):
#mass.validate()
for pid, property_mass in sorted(self.properties_mass.items()):
property_mass.validate()
#------------------------------------------------
for mid, mat in sorted(self.materials.items()):
mat.validate()
for mid, mat in sorted(self.thermal_materials.items()):
mat.validate()
for mid, mat in sorted(self.MATS1.items()):
mat.validate()
for mid, mat in sorted(self.MATS3.items()):
mat.validate()
for mid, mat in sorted(self.MATS8.items()):
mat.validate()
for mid, mat in sorted(self.MATT1.items()):
mat.validate()
for mid, mat in sorted(self.MATT2.items()):
mat.validate()
for mid, mat in sorted(self.MATT3.items()):
mat.validate()
for mid, mat in sorted(self.MATT4.items()):
mat.validate()
for mid, mat in sorted(self.MATT5.items()):
mat.validate()
for mid, mat in sorted(self.MATT8.items()):
mat.validate()
for mid, mat in sorted(self.MATT9.items()):
mat.validate()
for mid, mat in sorted(self.creep_materials.items()):
mat.validate()
for mid, mat in sorted(self.hyperelastic_materials.items()):
mat.validate()
#------------------------------------------------
for key, loads in sorted(self.loads.items()):
for loadi in loads:
loadi.validate()
for key, tic in sorted(self.tics.items()):
tic.validate()
for key, dloads in sorted(self.dloads.items()):
for dload in dloads:
dload.validate()
for key, dload_entries in sorted(self.dload_entries.items()):
for dload_entry in dload_entries:
dload_entry.validate()
#------------------------------------------------
for key, nlpci in sorted(self.nlpcis.items()):
nlpci.validate()
for key, nlparm in sorted(self.nlparms.items()):
nlparm.validate()
for key, tstep in sorted(self.tsteps.items()):
tstep.validate()
for key, tstepnl in sorted(self.tstepnls.items()):
tstepnl.validate()
for key, transfer_functions in sorted(self.transfer_functions.items()):
for transfer_function in transfer_functions:
transfer_function.validate()
for key, delay in sorted(self.delays.items()):
delay.validate()
#------------------------------------------------
if self.aeros is not None:
self.aeros.validate()
for caero_id, caero in sorted(self.caeros.items()):
caero.validate()
for key, paero in sorted(self.paeros.items()):
paero.validate()
for spline_id, spline in sorted(self.splines.items()):
spline.validate()
for key, aecomp in sorted(self.aecomps.items()):
aecomp.validate()
for key, aefact in sorted(self.aefacts.items()):
aefact.validate()
for key, aelinks in sorted(self.aelinks.items()):
for aelink in aelinks:
aelink.validate()
for key, aeparam in sorted(self.aeparams.items()):
aeparam.validate()
for key, aesurf in sorted(self.aesurf.items()):
aesurf.validate()
for key, aesurfs in sorted(self.aesurfs.items()):
aesurfs.validate()
for key, aestat in sorted(self.aestats.items()):
aestat.validate()
for key, trim in sorted(self.trims.items()):
trim.validate()
for key, diverg in sorted(self.divergs.items()):
diverg.validate()
for key, csschd in sorted(self.csschds.items()):
csschd.validate()
for monitor in self.monitor_points:
monitor.validate()
#------------------------------------------------
if self.aero is not None:
self.aero.validate()
for key, flfact in sorted(self.flfacts.items()):
flfact.validate()
for key, flutter in sorted(self.flutters.items()):
flutter.validate()
for key, gust in sorted(self.gusts.items()):
gust.validate()
#self.mkaeros = []
#------------------------------------------------
for key, bcs in sorted(self.bcs.items()):
for bc in bcs:
bc.validate()
for key, phbdy in sorted(self.phbdys.items()):
phbdy.validate()
for key, convection_property in sorted(self.convection_properties.items()):
convection_property.validate()
for key, tempd in sorted(self.tempds.items()):
tempd.validate()
#------------------------------------------------
for key, bcrpara in sorted(self.bcrparas.items()):
bcrpara.validate()
for key, bctadd in sorted(self.bctadds.items()):
bctadd.validate()
for key, bctpara in sorted(self.bctparas.items()):
bctpara.validate()
for key, bctset in sorted(self.bctsets.items()):
bctset.validate()
for key, bsurf in sorted(self.bsurf.items()):
bsurf.validate()
for key, bsurfs in sorted(self.bsurfs.items()):
bsurfs.validate()
#------------------------------------------------
for key, suport1 in sorted(self.suport1.items()):
suport1.validate()
for suport in self.suport:
suport.validate()
for se_suport in self.se_suport:
se_suport.validate()
for key, spcs in sorted(self.spcs.items()):
for spc in spcs:
spc.validate()
for key, spcadd in sorted(self.spcadds.items()):
spcadd.validate()
for key, mpcs in sorted(self.mpcs.items()):
for mpc in mpcs:
mpc.validate()
for key, mpcadd in sorted(self.mpcadds.items()):
mpcadd.validate()
#------------------------------------------------
#for key, darea in sorted(self.dareas.items()):
#darea.validate()
#for key, dphase in sorted(self.dphases.items()):
#dphase.validate()
for pid, pbusht in sorted(self.pbusht.items()):
pbusht.validate()
for pid, pdampt in sorted(self.pdampt.items()):
pdampt.validate()
for pid, pelast in sorted(self.pelast.items()):
pelast.validate()
for pid, frequency in sorted(self.frequencies.items()):
frequency.validate()
#------------------------------------------------
for key, dmi in sorted(self.dmi.items()):
dmi.validate()
for key, dmig in sorted(self.dmig.items()):
dmig.validate()
for key, dmij in sorted(self.dmij.items()):
dmij.validate()
for key, dmiji in sorted(self.dmiji.items()):
dmiji.validate()
for key, dmik in sorted(self.dmik.items()):
dmik.validate()
#------------------------------------------------
#self.asets = []
#self.bsets = []
#self.csets = []
#self.qsets = []
#self.usets = {}
##: SExSETy
#self.se_bsets = []
#self.se_csets = []
#self.se_qsets = []
#self.se_usets = {}
#self.se_sets = {}
for key, sets in sorted(self.sets.items()):
sets.validate()
for key, uset in sorted(self.usets.items()):
for useti in uset:
useti.validate()
for aset in self.asets:
aset.validate()
for omit in self.omits:
omit.validate()
for bset in self.bsets:
bset.validate()
for cset in self.csets:
cset.validate()
for qset in self.qsets:
qset.validate()
for key, se_set in sorted(self.se_sets.items()):
se_set.validate()
for key, se_uset in sorted(self.se_usets.items()):
se_uset.validate()
for se_bset in self.se_bsets:
se_bset.validate()
for se_cset in self.se_csets:
se_cset.validate()
for se_qset in self.se_qsets:
se_qset.validate()
#------------------------------------------------
for key, table in sorted(self.tables.items()):
table.validate()
for key, table in sorted(self.tables_d.items()):
table.validate()
for key, table in sorted(self.tables_m.items()):
table.validate()
for key, random_table in sorted(self.random_tables.items()):
random_table.validate()
for key, table_sdamping in sorted(self.tables_sdamping.items()):
table_sdamping.validate()
#------------------------------------------------
for key, method in sorted(self.methods.items()):
method.validate()
for key, cmethod in sorted(self.cMethods.items()):
cmethod.validate()
#------------------------------------------------
for key, dconadd in sorted(self.dconadds.items()):
dconadd.validate()
for key, dconstrs in sorted(self.dconstrs.items()):
for dconstr in dconstrs:
dconstr.validate()
for key, desvar in sorted(self.desvars.items()):
desvar.validate()
for key, ddval in sorted(self.ddvals.items()):
ddval.validate()
for key, dlink in sorted(self.dlinks.items()):
dlink.validate()
for key, dresp in sorted(self.dresps.items()):
dresp.validate()
if self.dtable is not None:
self.dtable.validate()
if self.doptprm is not None:
self.doptprm.validate()
for key, dequation in sorted(self.dequations.items()):
dequation.validate()
for key, dvprel in sorted(self.dvprels.items()):
dvprel.validate()
for key, dvmrel in sorted(self.dvmrels.items()):
dvmrel.validate()
for key, dvcrel in sorted(self.dvcrels.items()):
dvcrel.validate()
for key, dscreen in sorted(self.dscreen.items()):
dscreen.validate()
for dvid, dvgrid in self.dvgrids.items():
dvgrid.validate()
#------------------------------------------------
def read_bdf(self, bdf_filename=None,
validate=True, xref=True, punch=False, read_includes=True, encoding=None):
"""
Read method for the bdf files
Parameters
----------
bdf_filename : str / None
the input bdf (default=None; popup a dialog)
validate : bool
runs various checks on the BDF (default=True)
xref : bool
should the bdf be cross referenced (default=True)
punch : bool
indicates whether the file is a punch file (default=False)
read_includes : bool
indicates whether INCLUDE files should be read (default=True)
encoding : str
the unicode encoding (default=None; system default)
.. code-block:: python
>>> bdf = BDF()
>>> bdf.read_bdf(bdf_filename, xref=True)
>>> g1 = bdf.Node(1)
>>> print(g1.get_position())
[10.0, 12.0, 42.0]
>>> bdf.write_card(bdf_filename2)
>>> print(bdf.card_stats())
---BDF Statistics---
SOL 101
bdf.nodes = 20
bdf.elements = 10
etc.
"""
self._read_bdf_helper(bdf_filename, encoding, punch, read_includes)
self._parse_primary_file_header(bdf_filename)
self.log.debug('---starting BDF.read_bdf of %s---' % self.bdf_filename)
#executive_control_lines, case_control_lines, \
#bulk_data_lines = self.get_lines(self.bdf_filename, self.punch)
obj = BDFInputPy(self.read_includes, self.dumplines, self._encoding,
log=self.log, debug=self.debug)
out = obj.get_lines(bdf_filename, punch=self.punch)
#system_lines, executive_control_lines, case_control_lines, bulk_data_lines = out
(system_lines, executive_control_lines, case_control_lines,
bulk_data_lines, bulk_data_ilines,
superelement_lines, superelement_ilines) = out
self._set_pybdf_attributes(obj)
self.case_control_lines = case_control_lines
self.executive_control_lines = executive_control_lines
sol, method, sol_iline = parse_executive_control_deck(executive_control_lines)
self.update_solution(sol, method, sol_iline)
self.case_control_deck = CaseControlDeck(self.case_control_lines, self.log)
#print(self.object_attributes())
self.case_control_deck.solmap_to_value = self._solmap_to_value
self.case_control_deck.rsolmap_to_str = self.rsolmap_to_str
if self._is_cards_dict:
cards, card_count = self.get_bdf_cards_dict(bulk_data_lines)
else:
cards, card_count = self.get_bdf_cards(bulk_data_lines)
self._parse_cards(cards, card_count)
if 0 and self.values_to_skip:
for key, values in self.values_to_skip.items():
dict_values = getattr(self, key)
if not isinstance(dict_values, dict):
msg = '%r is an invalid type; only dictionaries are supported' % key
raise TypeError(msg)
for value in values:
del dict_values[value]
# TODO: redo get_card_ids_by_card_types & card_count
#self.pop_parse_errors()
fill_dmigs(self)
if validate:
self.validate()
self.cross_reference(xref=xref)
self._xref = xref
self.log.debug('---finished BDF.read_bdf of %s---' % self.bdf_filename)
#self.pop_xref_errors()
def _set_pybdf_attributes(self, obj):
"""common method for all functions that use BDFInputPy"""
#self.reject_lines += obj.reject_lines
self.active_filenames += obj.active_filenames
self.active_filename = obj.active_filename
self.include_dir = obj.include_dir
def _read_bdf_helper(self, bdf_filename, encoding, punch, read_includes):
"""creates the file loading if bdf_filename is None"""
#self.set_error_storage(nparse_errors=None, stop_on_parsing_error=True,
# nxref_errors=None, stop_on_xref_error=True)
if encoding is None:
encoding = sys.getdefaultencoding()
self._encoding = encoding
if bdf_filename is None:
from pyNastran.utils.gui_io import load_file_dialog
wildcard_wx = "Nastran BDF (*.bdf; *.dat; *.nas; *.pch, *.ecd)|" \
"*.bdf;*.dat;*.nas;*.pch|" \
"All files (*.*)|*.*"
wildcard_qt = "Nastran BDF (*.bdf *.dat *.nas *.pch *.ecd);;All files (*)"
title = 'Please select a BDF/DAT/PCH/ECD to load'
bdf_filename = load_file_dialog(title, wildcard_wx, wildcard_qt)[0]
assert bdf_filename is not None, bdf_filename
check_path(bdf_filename, 'bdf_filename')
if bdf_filename.lower().endswith('.pch'): # .. todo:: should this be removed???
punch = True
#: the active filename (string)
self.bdf_filename = bdf_filename
#: is this a punch file (no executive control deck)
self.punch = punch
self.read_includes = read_includes
self.active_filenames = []
def fill_dmigs(self):
"""fills the DMIx cards with the column data that's been stored"""
return
#for name, card_comments in self._dmig_temp.items():
#card0, comment0 = card_comments[0]
#card_name = card0[0]
#card_name = card_name.rstrip(' *').upper()
#if card_name == 'DMIG':
## if field2 == 'UACCEL': # special DMIG card
#card = self.dmig[name]
#elif card_name == 'DMI':
#card = self.dmi[name]
#elif card_name == 'DMIJ':
#card = self.dmij[name]
#elif card_name == 'DMIJI':
#card = self.dmiji[name]
#elif card_name == 'DMIK':
#card = self.dmik[name]
#else:
#raise NotImplementedError(card_name)
#for (card_obj, comment) in card_comments:
#card._add_column(card_obj, comment=comment)
#card.finalize()
#self._dmig_temp = defaultdict(list)
def pop_parse_errors(self):
"""raises an error if there are parsing errors"""
if self._stop_on_parsing_error:
if self._iparse_errors == 1 and self._nparse_errors == 0:
raise
is_error = False
msg = ''
if self._duplicate_elements:
duplicate_eids = [elem.eid for elem in self._duplicate_elements]
uduplicate_eids = np.unique(duplicate_eids)
msg += 'self.elements IDs are not unique=%s\n' % uduplicate_eids
for eid in uduplicate_eids:
msg += 'old_element=\n%s\n' % str(self.elements[eid])
msg += 'new_elements=\n'
for elem, eidi in zip(self._duplicate_elements, duplicate_eids):
if eidi == eid:
msg += str(elem)
msg += '\n'
is_error = True
raise DuplicateIDsError(msg)
if self._duplicate_properties:
duplicate_pids = [prop.pid for prop in self._duplicate_properties]
uduplicate_pids = np.unique(duplicate_pids)
msg += 'self.properties IDs are not unique=%s\n' % uduplicate_pids
for pid in duplicate_pids:
msg += 'old_property=\n%s\n' % str(self.properties[pid])
msg += 'new_properties=\n'
for prop, pidi in zip(self._duplicate_properties, duplicate_pids):
if pidi == pid:
msg += str(prop)
msg += '\n'
is_error = True
if self._duplicate_masses:
duplicate_eids = [elem.eid for elem in self._duplicate_masses]
uduplicate_eids = np.unique(duplicate_eids)
msg += 'self.massses IDs are not unique=%s\n' % uduplicate_eids
for eid in uduplicate_eids:
msg += 'old_mass=\n%s\n' % str(self.masses[eid])
msg += 'new_masses=\n'
for elem, eidi in zip(self._duplicate_masses, duplicate_eids):
if eidi == eid:
msg += str(elem)
msg += '\n'
is_error = True
if self._duplicate_materials:
duplicate_mids = [mat.mid for mat in self._duplicate_materials]
uduplicate_mids = np.unique(duplicate_mids)
msg += 'self.materials IDs are not unique=%s\n' % uduplicate_mids
for mid in uduplicate_mids:
msg += 'old_material=\n%s\n' % str(self.materials[mid])
msg += 'new_materials=\n'
for mat, midi in zip(self._duplicate_materials, duplicate_mids):
if midi == mid:
msg += str(mat)
msg += '\n'
is_error = True
if self._duplicate_thermal_materials:
duplicate_mids = [mat.mid for mat in self._duplicate_thermal_materials]
uduplicate_mids = np.unique(duplicate_mids)
msg += 'self.thermal_materials IDs are not unique=%s\n' % uduplicate_mids
for mid in uduplicate_mids:
msg += 'old_thermal_material=\n%s\n' % str(self.thermal_materials[mid])
msg += 'new_thermal_materials=\n'
for mat, midi in zip(self._duplicate_thermal_materials, duplicate_mids):
if midi == mid:
msg += str(mat)
msg += '\n'
is_error = True
if self._duplicate_coords:
duplicate_cids = [coord.cid for coord in self._duplicate_coords]
uduplicate_cids = np.unique(duplicate_cids)
msg += 'self.coords IDs are not unique=%s\n' % uduplicate_cids
for cid in uduplicate_cids:
msg += 'old_coord=\n%s\n' % str(self.coords[cid])
msg += 'new_coords=\n'
for coord, cidi in zip(self._duplicate_coords, duplicate_cids):
if cidi == cid:
msg += str(coord)
msg += '\n'
is_error = True
if is_error:
msg = 'There are dupliate cards.\n\n' + msg
if self._stop_on_xref_error:
msg += 'There are parsing errors.\n\n'
for (card, an_error) in self._stored_parse_errors:
msg += '%scard=%s\n' % (an_error[0], card)
msg += 'xref error: %s\n\n'% an_error[0]
is_error = True
if is_error:
self.log.error('%s' % msg)
raise DuplicateIDsError(msg.rstrip())
def pop_xref_errors(self):
"""raises an error if there are cross-reference errors"""
is_error = False
if self._stop_on_xref_error:
if self._ixref_errors == 1 and self._nxref_errors == 0:
raise
if self._stored_xref_errors:
msg = 'There are cross-reference errors.\n\n'
for (card, an_error) in self._stored_xref_errors:
msg += '%scard=%s\n' % (an_error[0], card)
is_error = True
if is_error and self._stop_on_xref_error:
raise CrossReferenceError(msg.rstrip())
def get_bdf_cards(self, bulk_data_lines):
"""Parses the BDF lines into a list of card_lines"""
cards = []
#cards = defaultdict(list)
card_count = defaultdict(int)
full_comment = ''
card_lines = []
old_card_name = None
backup_comment = ''
nlines = len(bulk_data_lines)
for i, line in enumerate(bulk_data_lines):
#print(' backup=%r' % backup_comment)
comment = ''
if '$' in line:
line, comment = line.split('$', 1)
card_name = line.split(',', 1)[0].split('\t', 1)[0][:8].rstrip().upper()
if card_name and card_name[0] not in ['+', '*']:
if old_card_name:
if self.echo:
self.log.info('Reading %s:\n' %
old_card_name + full_comment + ''.join(card_lines))
# old dictionary version
# cards[old_card_name].append([full_comment, card_lines])
# new list version
#if full_comment:
#print('full_comment = ', full_comment)
cards.append([old_card_name, _prep_comment(full_comment), card_lines])
card_count[old_card_name] += 1
card_lines = []
full_comment = ''
if old_card_name == 'ECHOON':
self.echo = True
elif old_card_name == 'ECHOOFF':
self.echo = False
old_card_name = card_name.rstrip(' *')
if old_card_name == 'ENDDATA':
self.card_count['ENDDATA'] = 1
if nlines - i > 1:
nleftover = nlines - i - 1
msg = 'exiting due to ENDDATA found with %i lines left' % nleftover
self.log.debug(msg)
return cards, card_count
#print("card_name = %s" % card_name)
comment = _clean_comment(comment)
if line.rstrip():
card_lines.append(line)
if backup_comment:
if comment:
full_comment += backup_comment + comment + '\n'
else:
full_comment += backup_comment
backup_comment = ''
elif comment:
full_comment += comment + '\n'
backup_comment = ''
elif comment:
backup_comment += comment + '\n'
#print('add backup=%r' % backup_comment)
#elif comment:
#backup_comment += '$' + comment + '\n'
if card_lines:
if self.echo:
self.log.info('Reading %s:\n' % old_card_name + full_comment + ''.join(card_lines))
#print('end_add %s' % card_lines)
# old dictionary version
#cards[old_card_name].append([backup_comment + full_comment, card_lines])
# new list version
#if backup_comment + full_comment:
#print('backup_comment + full_comment = ', backup_comment + full_comment)
cards.append([old_card_name, _prep_comment(backup_comment + full_comment), card_lines])
card_count[old_card_name] += 1
return cards, card_count
def get_bdf_cards_dict(self, bulk_data_lines):
"""Parses the BDF lines into a list of card_lines"""
cards = defaultdict(list)
card_count = defaultdict(int)
full_comment = ''
card_lines = []
old_card_name = None
backup_comment = ''
nlines = len(bulk_data_lines)
for i, line in enumerate(bulk_data_lines):
#print(' backup=%r' % backup_comment)
comment = ''
if '$' in line:
line, comment = line.split('$', 1)
card_name = line.split(',', 1)[0].split('\t', 1)[0][:8].rstrip().upper()
if card_name and card_name[0] not in ['+', '*']:
if old_card_name:
if self.echo:
self.log.info('Reading %s:\n' %
old_card_name + full_comment + ''.join(card_lines))
# old dictionary version
cards[old_card_name].append([full_comment, card_lines])
# new list version
#cards.append([old_card_name, full_comment, card_lines])
card_count[old_card_name] += 1
card_lines = []
full_comment = ''
if old_card_name == 'ECHOON':
self.echo = True
elif old_card_name == 'ECHOOFF':
self.echo = False
old_card_name = card_name.rstrip(' *')
if old_card_name == 'ENDDATA':
self.card_count['ENDDATA'] = 1
if nlines - i > 1:
nleftover = nlines - i - 1
msg = 'exiting due to ENDDATA found with %i lines left' % nleftover
self.log.debug(msg)
return cards, card_count
#print("card_name = %s" % card_name)
comment = _clean_comment(comment)
if line.rstrip():
card_lines.append(line)
if backup_comment:
if comment:
full_comment += backup_comment + comment + '\n'
else:
full_comment += backup_comment
backup_comment = ''
elif comment:
full_comment += comment + '\n'
backup_comment = ''
elif comment:
backup_comment += comment + '\n'
#print('add backup=%r' % backup_comment)
#elif comment:
#backup_comment += comment + '\n'
if card_lines:
if self.echo:
self.log.info('Reading %s:\n' % old_card_name + full_comment + ''.join(card_lines))
#print('end_add %s' % card_lines)
# old dictionary version
cards[old_card_name].append([backup_comment + full_comment, card_lines])
# new list version
#cards.append([old_card_name, backup_comment + full_comment, card_lines])
card_count[old_card_name] += 1
return cards, card_count
def update_solution(self, sol, method, sol_iline):
"""
Updates the overall solution type (e.g. 101,200,600)
Parameters
----------
sol : int
the solution type (101, 103, etc)
method : str
the solution method (only for SOL=600)
sol_iline : int
the line to put the SOL/method on
"""
self.sol_iline = sol_iline
# the integer of the solution type (e.g. SOL 101)
if sol is None:
self.sol = None
self.sol_method = None
return
try:
self.sol = int(sol)
except ValueError:
try:
self.sol = self._solmap_to_value[sol]
except KeyError:
self.sol = sol
if self.sol == 600:
#: solution 600 method modifier
self.sol_method = method.strip()
self.log.debug("sol=%s method=%s" % (self.sol, self.sol_method))
else: # very common
self.sol_method = None
def set_dynamic_syntax(self, dict_of_vars):
"""
Uses the OpenMDAO syntax of %varName in an embedded BDF to
update the values for an optimization study.
Parameters
----------
dict_of_vars : dict[str] = int/float/str
dictionary of 7 character variable names to map.
.. code-block:: python
GRID, 1, %xVar, %yVar, %zVar
>>> dict_of_vars = {'xVar': 1.0, 'yVar', 2.0, 'zVar':3.0}
>>> bdf = BDF()
>>> bdf.set_dynamic_syntax(dict_of_vars)
>>> bdf,read_bdf(bdf_filename, xref=True)
Notes
-----
Case sensitivity is supported.
Variables should be 7 characters or less to fit in an
8-character field.
.. warning:: Type matters!
"""
self.dict_of_vars = {}
assert len(dict_of_vars) > 0, 'nvars = %s' % len(dict_of_vars)
for (key, value) in sorted(dict_of_vars.items()):
assert len(key) <= 7, ('max length for key is 7; '
'len(%s)=%s' % (key, len(key)))
assert len(key) >= 1, ('min length for key is 1; '
'len(%s)=%s' % (key, len(key)))
if not isinstance(key, str):
msg = 'key=%r must be a string. type=%s' % (key, type(key))
raise TypeError(msg)
self.dict_of_vars[key] = value
self._is_dynamic_syntax = True
def is_reject(self, card_name):
"""
Can the card be read.
If the card is rejected, it's added to self.reject_count
Parameters
----------
card_name : str
the card_name -> 'GRID'
"""
if card_name.startswith('='):
return False
elif card_name in self.cards_to_read:
return False
if card_name:
if card_name not in self.reject_count:
self.reject_count[card_name] = 0
self.reject_count[card_name] += 1
return True
def _process_card(self, card_lines):
"""
Converts card_lines into a card.
Considers dynamic syntax and removes empty fields
Parameters
----------
card_lines : List[str]
list of strings that represent the card's lines
Returns
-------
fields : list[str]
the parsed card's fields
card_name : str
the card's name
.. code-block:: python
>>> card_lines = ['GRID,1,,1.0,2.0,3.0,,']
>>> model = BDF()
>>> fields, card_name = model._process_card(card_lines)
>>> fields
['GRID', '1', '', '1.0', '2.0', '3.0']
>>> card_name
'GRID'
"""
card_name = self._get_card_name(card_lines)
fields = to_fields(card_lines, card_name)
if self._is_dynamic_syntax:
fields = [self._parse_dynamic_syntax(field) if '%' in
field[0:1] else field for field in fields]
card = wipe_empty_fields(fields)
card[0] = card_name
return card
def create_card_object(self, card_lines, card_name, is_list=True, has_none=True):
"""
Creates a BDFCard object, which is really just a list that
allows indexing past the last field
Parameters
----------
card_lines: list[str]
the list of the card fields
input is list of card_lines -> ['GRID, 1, 2, 3.0, 4.0, 5.0']
card_name : str
the card_name -> 'GRID'
is_list : bool; default=True
True : this is a list of fields
False : this is a list of lines
has_none : bool; default=True
can there be trailing Nones in the card data (e.g. ['GRID, 1, 2, 3.0, 4.0, 5.0, '])
Returns
-------
card_object : BDFCard()
the card object representation of card
card : list[str]
the card with empty fields removed
"""
card_name = card_name.upper()
self.increase_card_count(card_name)
if card_name in ['DEQATN', 'PBRSECT', 'PBMSECT']:
card_obj = card_lines
card = card_lines
else:
if is_list:
fields = card_lines
else:
fields = to_fields(card_lines, card_name)
# apply OPENMDAO syntax
if self._is_dynamic_syntax:
fields = [print_field_16(self._parse_dynamic_syntax(field)) if '%' in
field.strip()[0:1] else print_field_16(field) for field in fields]
has_none = False
if has_none:
card = wipe_empty_fields([print_field_16(field) for field in fields])
else:
#card = remove_trailing_fields(fields)
card = wipe_empty_fields(fields)
card_obj = BDFCard(card, has_none=False)
return card_obj, card
def create_card_object_list(self, card_lines, card_name, has_none=True):
"""
Creates a BDFCard object, which is really just a list that
allows indexing past the last field
Parameters
----------
card_lines: list[str]
the list of the card lines
input is list of lines -> ['GRID, 1, 2, 3.0, 4.0, 5.0']
card_name : str
the card_name -> 'GRID'
has_none : bool; default=True
???
Returns
-------
card_obj : BDFCard
the BDFCard object
card : list[str]
the card with empty fields removed
"""
card_name = card_name.upper()
self.increase_card_count(card_name)
if card_name in ['DEQATN', 'PBRSECT', 'PBMSECT']:
card_obj = card_lines
card = card_lines
else:
fields = card_lines
# apply OPENMDAO syntax
if self._is_dynamic_syntax:
fields = [print_field_16(self._parse_dynamic_syntax(field)) if '%' in
field.strip()[0:1] else print_field_16(field) for field in fields]
has_none = False
if has_none:
card = wipe_empty_fields([print_field_16(field) for field in fields])
else:
#card = remove_trailing_fields(fields)
card = wipe_empty_fields(fields)
card_obj = BDFCard(card, has_none=False)
return card_obj, card
def create_card_object_fields(self, card_lines, card_name, has_none=True):
"""
Creates a BDFCard object, which is really just a list that
allows indexing past the last field
Parameters
----------
card_lines: list[str]
the list of the card fields
input is list of fields -> ['GRID', '1', '2', '3.0', '4.0', '5.0']
card_name : str
the card_name -> 'GRID'
has_none : bool; default=True
can there be trailing Nones in the card data
(e.g. ['GRID', '1', '2', '3.0', '4.0', '5.0'])
Returns
-------
card_obj : BDFCard
the BDFCard object
card : list[str]
the card with empty fields removed
"""
card_name = card_name.upper()
self.increase_card_count(card_name)
if card_name in ['DEQATN', 'PBRSECT', 'PBMSECT']:
card_obj = card_lines
card = card_lines
else:
fields = to_fields(card_lines, card_name)
# apply OPENMDAO syntax
if self._is_dynamic_syntax:
fields = [print_field_16(self._parse_dynamic_syntax(field)) if '%' in
field.strip()[0:1] else print_field_16(field) for field in fields]
has_none = False
if has_none:
card = wipe_empty_fields([print_field_16(field) for field in fields])
else:
#card = remove_trailing_fields(fields)
card = wipe_empty_fields(fields)
card_obj = BDFCard(card, has_none=False)
return card_obj, card
def _make_card_parser(self):
"""creates the card parser variables that are used by add_card"""
class Crash:
"""class for crashing on specific cards"""
def __init__(self):
"""dummy init"""
pass
@classmethod
def add_card(cls, card, comment=''):
"""the method that forces the crash"""
raise NotImplementedError(card)
add_methods = self._add_methods
self._card_parser = {
#'=' : (Crash, None),
'/' : (Crash, None),
# nodes
#'GRID' : (GRID, self.add_node),
#'SPOINT' : (SPOINTs, self.add_spoint),
#'EPOINT' : (EPOINTs, self.add_epoint),
#'POINT' : (POINT, self.add_point),
'PARAM' : (PARAM, add_methods._add_param_object),
#'CORD2R' : (CORD2R, self._add_coord_object),
#'CORD2C' : (CORD2C, self._add_coord_object),
#'CORD2S' : (CORD2S, self._add_coord_object),
#'GMCORD' : (GMCORD, self._add_coord_object),
'PLOTEL' : (PLOTEL, add_methods._add_plotel_object),
#'CONROD' : (CONROD, self.add_element),
#'CROD' : (CROD, self.add_element),
#'PROD' : (PROD, self.add_property),
#'CTUBE' : (CTUBE, self.add_element),
#'PTUBE' : (PTUBE, self.add_property),
#'CBAR' : (CBAR, self.add_element),
#'PBAR' : (PBAR, self.add_property),
#'PBARL' : (PBARL, self.add_property),
#'PBRSECT' : (PBRSECT, self.add_property),
#'CBEAM' : (CBEAM, self.add_element),
#'PBEAM' : (PBEAM, self.add_property),
#'PBEAML' : (PBEAML, self.add_property),
#'PBCOMP' : (PBCOMP, self.add_property),
#'PBMSECT' : (PBMSECT, self.add_property),
#'CBEAM3' : (CBEAM3, self.add_element),
#'PBEAM3' : (PBEAM3, self.add_property),
#'CBEND' : (CBEND, self.add_element),
#'PBEND' : (PBEND, self.add_property),
#'CTRIA3' : (CTRIA3, self.add_element),
#'CQUAD4' : (CQUAD4, self.add_element),
#'CQUAD' : (CQUAD, self.add_element),
#'CQUAD8' : (CQUAD8, self.add_element),
#'CQUADX' : (CQUADX, self.add_element),
#'CQUADR' : (CQUADR, self.add_element),
#'CTRIA6' : (CTRIA6, self.add_element),
#'CTRIAR' : (CTRIAR, self.add_element),
#'CTRIAX' : (CTRIAX, self.add_element),
#'CTRIAX6' : (CTRIAX6, self.add_element),
#'PCOMP' : (PCOMP, self.add_property),
#'PCOMPG' : (PCOMPG, self.add_property),
#'PSHELL' : (PSHELL, self.add_property),
#'PLPLANE' : (PLPLANE, self.add_property),
#'CPLSTN3' : (CPLSTN3, self.add_element),
#'CPLSTN4' : (CPLSTN4, self.add_element),
#'CPLSTN6' : (CPLSTN6, self.add_element),
#'CPLSTN8' : (CPLSTN8, self.add_element),
#'PPLANE' : (PPLANE, self.add_property),
#'CSHEAR' : (CSHEAR, self.add_element),
#'PSHEAR' : (PSHEAR, self.add_property),
#'CTETRA' : (CTETRA, self.add_element),
#'CPYRAM' : (CPYRAM, self.add_element),
#'CPENTA' : (CPENTA, self.add_element),
#'CHEXA' : (CHEXA, self.add_element),
#'CIHEX1' : (CIHEX1, self.add_element),
#'PIHEX' : (PIHEX, self.add_property),
#'PSOLID' : (PSOLID, self.add_property),
#'PLSOLID' : (PLSOLID, self.add_property),
#'PCOMPS' : (PCOMPS, self.add_property),
#'CELAS1' : (CELAS1, self.add_element),
#'CELAS2' : (CELAS2, self.add_element),
#'CELAS3' : (CELAS3, self.add_element),
#'CELAS4' : (CELAS4, self.add_element),
#'CVISC' : (CVISC, self.add_element),
#'PELAST' : (PELAST, self.add_PELAST),
#'CDAMP1' : (CDAMP1, self.add_damper),
#'CDAMP2' : (CDAMP2, self.add_damper),
#'CDAMP3' : (CDAMP3, self.add_damper),
# CDAMP4 added later because the documentation is wrong
#'CDAMP5' : (CDAMP5, self.add_damper),
#'PDAMP5' : (PDAMP5, self.add_property),
#'CFAST' : (CFAST, self.add_damper),
#'PFAST' : (PFAST, self.add_property),
#'CGAP' : (CGAP, self.add_element),
#'PGAP' : (PGAP, self.add_property),
#'CBUSH' : (CBUSH, self.add_damper),
#'CBUSH1D' : (CBUSH1D, self.add_damper),
#'CBUSH2D' : (CBUSH2D, self.add_damper),
#'PBUSH' : (PBUSH, self.add_property),
#'PBUSH1D' : (PBUSH1D, self.add_property),
#'CRAC2D' : (CRAC2D, self.add_element),
#'PRAC2D' : (PRAC2D, self.add_property),
#'CRAC3D' : (CRAC3D, self.add_element),
#'PRAC3D' : (PRAC3D, self.add_property),
#'PDAMPT' : (PDAMPT, self.add_PDAMPT),
#'PBUSHT' : (PBUSHT, self.add_PBUSHT),
#'PCONEAX' : (PCONEAX, self.add_property),
'RBAR' : (RBAR, add_methods._add_rigid_element_object),
'RBAR1' : (RBAR1, add_methods._add_rigid_element_object),
'RBE1' : (RBE1, add_methods._add_rigid_element_object),
'RBE2' : (RBE2, add_methods._add_rigid_element_object),
'RBE3' : (RBE3, add_methods._add_rigid_element_object),
'RROD' : (RROD, add_methods._add_rigid_element_object),
'RSPLINE' : (RSPLINE, add_methods._add_rigid_element_object),
## there is no MAT6 or MAT7
#'MAT1' : (MAT1, self.add_structural_material),
#'MAT2' : (MAT2, self.add_structural_material),
#'MAT3' : (MAT3, self.add_structural_material),
#'MAT8' : (MAT8, self.add_structural_material),
#'MAT9' : (MAT9, self.add_structural_material),
#'MAT10' : (MAT10, self.add_structural_material),
#'MAT11' : (MAT11, self.add_structural_material),
#'EQUIV' : (EQUIV, self.add_structural_material),
#'MATHE' : (MATHE, self.add_hyperelastic_material),
#'MATHP' : (MATHP, self.add_hyperelastic_material),
#'MAT4' : (MAT4, self.add_thermal_material),
#'MAT5' : (MAT5, self.add_thermal_material),
#'MATS1' : (MATS1, self.add_material_dependence),
##'MATS3' : (MATS3, self.add_material_dependence),
##'MATS8' : (MATS8, self.add_material_dependence),
#'MATT1' : (MATT1, self.add_material_dependence),
#'MATT2' : (MATT2, self.add_material_dependence),
##'MATT3' : (MATT3, self.add_material_dependence),
#'MATT4' : (MATT4, self.add_material_dependence),
#'MATT5' : (MATT5, self.add_material_dependence),
##'MATT8' : (MATT8, self.add_material_dependence),
##'MATT9' : (MATT9, self.add_material_dependence),
## hasnt been verified, links up to MAT1, MAT2, MAT9 w/ same MID
#'CREEP' : (CREEP, self.add_creep_material),
#'CONM1' : (CONM1, self.add_mass),
#'CONM2' : (CONM2, self.add_mass),
#'CMASS1' : (CMASS1, self.add_mass),
#'CMASS2' : (CMASS2, self.add_mass),
#'CMASS3' : (CMASS3, self.add_mass),
## CMASS4 - added later because documentation is wrong
#'MPC' : (MPC, self.add_constraint_MPC),
#'MPCADD' : (MPCADD, self.add_constraint_MPC),
#'SPC' : (SPC, self.add_constraint_SPC),
#'SPC1' : (SPC1, self.add_constraint_SPC1),
#'SPCAX' : (SPCAX, self.add_constraint_SPC),
#'SPCADD' : (SPCADD, self.add_constraint_SPC),
#'GMSPC' : (GMSPC, self.add_constraint_SPC),
#'SESUP' : (SESUP, self.add_sesuport), # pseudo-constraint
#'SUPORT' : (SUPORT, self.add_suport), # pseudo-constraint
#'SUPORT1' : (SUPORT1, self.add_suport1), # pseudo-constraint
#'FORCE' : (FORCE, self.add_load),
#'FORCE1' : (FORCE1, self.add_load),
#'FORCE2' : (FORCE2, self.add_load),
#'MOMENT' : (MOMENT, self.add_load),
#'MOMENT1' : (MOMENT1, self.add_load),
#'MOMENT2' : (MOMENT2, self.add_load),
#'LSEQ' : (LSEQ, self.add_LSEQ),
#'LOAD' : (LOAD, self.add_load),
#'LOADCYN' : (LOADCYN, self.add_load),
#'GRAV' : (GRAV, self.add_load),
#'ACCEL' : (ACCEL, self.add_load),
#'ACCEL1' : (ACCEL1, self.add_load),
#'PLOAD' : (PLOAD, self.add_load),
#'PLOAD1' : (PLOAD1, self.add_load),
#'PLOAD2' : (PLOAD2, self.add_load),
#'PLOAD4' : (PLOAD4, self.add_load),
#'PLOADX1' : (PLOADX1, self.add_load),
#'RFORCE' : (RFORCE, self.add_load),
#'RFORCE1' : (RFORCE1, self.add_load),
#'SLOAD' : (SLOAD, self.add_load),
#'RANDPS' : (RANDPS, self.add_load),
#'GMLOAD' : (GMLOAD, self.add_load),
#'SPCD' : (SPCD, self.add_load), # enforced displacement
#'QVOL' : (QVOL, self.add_load), # thermal
#'DLOAD' : (DLOAD, self.add_dload),
#'ACSRCE' : (ACSRCE, self._add_dload_entry),
#'TLOAD1' : (TLOAD1, self._add_dload_entry),
#'TLOAD2' : (TLOAD2, self._add_dload_entry),
#'RLOAD1' : (RLOAD1, self._add_dload_entry),
#'RLOAD2' : (RLOAD2, self._add_dload_entry),
#'FREQ' : (FREQ, self.add_FREQ),
#'FREQ1' : (FREQ1, self.add_FREQ),
#'FREQ2' : (FREQ2, self.add_FREQ),
#'FREQ4' : (FREQ4, self.add_FREQ),
'DOPTPRM' : (DOPTPRM, add_methods._add_doptprm_object),
'DESVAR' : (DESVAR, add_methods._add_desvar_object),
# BCTSET
#'TEMP' : (TEMP, self.add_thermal_load),
#'QBDY1' : (QBDY1, self.add_thermal_load),
#'QBDY2' : (QBDY2, self.add_thermal_load),
#'QBDY3' : (QBDY3, self.add_thermal_load),
#'QHBDY' : (QHBDY, self.add_thermal_load),
#'PHBDY' : (PHBDY, self.add_PHBDY),
#'CHBDYE' : (CHBDYE, self.add_thermal_element),
#'CHBDYG' : (CHBDYG, self.add_thermal_element),
#'CHBDYP' : (CHBDYP, self.add_thermal_element),
#'PCONV' : (PCONV, self.add_convection_property),
#'PCONVM' : (PCONVM, self.add_convection_property),
# aero
'AECOMP' : (AECOMP, add_methods._add_aecomp_object),
'AEFACT' : (AEFACT, add_methods._add_aefact_object),
'AELINK' : (AELINK, add_methods._add_aelink_object),
'AELIST' : (AELIST, add_methods._add_aelist_object),
'AEPARM' : (AEPARM, add_methods._add_aeparm_object),
'AESTAT' : (AESTAT, add_methods._add_aestat_object),
'AESURF' : (AESURF, add_methods._add_aesurf_object),
'AESURFS' : (AESURFS, add_methods._add_aesurfs_object),
'CAERO1' : (CAERO1, add_methods._add_caero_object),
'CAERO2' : (CAERO2, add_methods._add_caero_object),
'CAERO3' : (CAERO3, add_methods._add_caero_object),
'CAERO4' : (CAERO4, add_methods._add_caero_object),
'CAERO5' : (CAERO5, add_methods._add_caero_object),
'PAERO1' : (PAERO1, add_methods._add_paero_object),
'PAERO2' : (PAERO2, add_methods._add_paero_object),
'PAERO3' : (PAERO3, add_methods._add_paero_object),
'PAERO4' : (PAERO4, add_methods._add_paero_object),
'PAERO5' : (PAERO5, add_methods._add_paero_object),
'SPLINE1' : (SPLINE1, add_methods._add_spline_object),
'SPLINE2' : (SPLINE2, add_methods._add_spline_object),
'SPLINE3' : (SPLINE3, add_methods._add_spline_object),
'SPLINE4' : (SPLINE4, add_methods._add_spline_object),
'SPLINE5' : (SPLINE5, add_methods._add_spline_object),
# SOL 144
'AEROS' : (AEROS, add_methods._add_aeros_object),
'TRIM' : (TRIM, add_methods._add_trim_object),
'DIVERG' : (DIVERG, add_methods._add_diverg_object),
# SOL 145
'AERO' : (AERO, add_methods._add_aero_object),
'FLUTTER' : (FLUTTER, add_methods._add_flutter_object),
'FLFACT' : (FLFACT, add_methods._add_flfact_object),
'MKAERO1' : (MKAERO1, add_methods._add_mkaero_object),
'MKAERO2' : (MKAERO2, add_methods._add_mkaero_object),
'GUST' : (GUST, add_methods._add_gust_object),
'CSSCHD' : (CSSCHD, add_methods._add_csschd_object),
'MONPNT1' : (MONPNT1, add_methods._add_monpnt_object),
'MONPNT2' : (MONPNT2, add_methods._add_monpnt_object),
'MONPNT3' : (MONPNT3, add_methods._add_monpnt_object),
'NLPARM' : (NLPARM, add_methods._add_nlparm_object),
'NLPCI' : (NLPCI, add_methods._add_nlpci_object),
'TSTEP' : (TSTEP, add_methods._add_tstep_object),
'TSTEPNL' : (TSTEPNL, add_methods._add_tstepnl_object),
#'TF' : (TF, self.add_TF),
#'DELAY' : (DELAY, self.add_DELAY),
'DCONADD' : (DCONADD, add_methods._add_dconstr_object),
'DCONSTR' : (DCONSTR, add_methods._add_dconstr_object),
'DDVAL' : (DDVAL, add_methods._add_ddval_object),
'DLINK' : (DLINK, add_methods._add_dlink_object),
#'DTABLE' : (DTABLE, self.add_dtable),
'DRESP1' : (DRESP1, add_methods._add_dresp_object),
'DRESP2' : (DRESP2, add_methods._add_dresp_object), # deqatn
'DRESP3' : (DRESP3, add_methods._add_dresp_object),
'DVCREL1' : (DVCREL1, add_methods._add_dvcrel_object), # dvcrels
'DVCREL2' : (DVCREL2, add_methods._add_dvcrel_object),
'DVPREL1' : (DVPREL1, add_methods._add_dvprel_object), # dvprels
'DVPREL2' : (DVPREL2, add_methods._add_dvprel_object),
'DVMREL1' : (DVMREL1, add_methods._add_dvmrel_object), # ddvmrels
'DVMREL2' : (DVMREL2, add_methods._add_dvmrel_object),
'DVGRID' : (DVGRID, add_methods._add_dvgrid_object), # dvgrids
#'TABLED1' : (TABLED1, self.add_table),
#'TABLED2' : (TABLED2, self.add_table),
#'TABLED3' : (TABLED3, self.add_table),
#'TABLED4' : (TABLED4, self.add_table),
#'TABLEM1' : (TABLEM1, self.add_table),
#'TABLEM2' : (TABLEM2, self.add_table),
#'TABLEM3' : (TABLEM3, self.add_table),
#'TABLEM4' : (TABLEM4, self.add_table),
#'TABLES1' : (TABLES1, self.add_table),
#'TABLEST' : (TABLEST, self.add_table),
#'TABDMP1' : (TABDMP1, self.add_table_sdamping),
#'TABRND1' : (TABRND1, self.add_random_table),
#'TABRNDG' : (TABRNDG, self.add_random_table),
'EIGB' : (EIGB, add_methods._add_method_object),
'EIGR' : (EIGR, add_methods._add_method_object),
'EIGRL' : (EIGRL, add_methods._add_method_object),
'EIGC' : (EIGC, add_methods._add_cmethod_object),
'EIGP' : (EIGP, add_methods._add_cmethod_object),
'BCRPARA' : (BCRPARA, add_methods._add_bcrpara_object),
'BCTADD' : (BCTADD, add_methods._add_bctadd_object),
'BCTPARA' : (BCTPARA, add_methods._add_bctpara_object),
'BSURF' : (BSURF, add_methods._add_bsurf_object),
'BSURFS' : (BSURFS, add_methods._add_bsurfs_object),
'ASET' : (ASET, add_methods._add_aset_object),
'ASET1' : (ASET1, add_methods._add_aset_object),
'BSET' : (BSET, add_methods._add_bset_object),
'BSET1' : (BSET1, add_methods._add_bset_object),
'CSET' : (CSET, add_methods._add_cset_object),
'CSET1' : (CSET1, add_methods._add_cset_object),
'QSET' : (QSET, add_methods._add_qset_object),
'QSET1' : (QSET1, add_methods._add_qset_object),
'USET' : (USET, add_methods._add_uset_object),
'USET1' : (USET1, add_methods._add_uset_object),
'SET1' : (SET1, add_methods._add_set_object),
'SET3' : (SET3, add_methods._add_set_object),
'SESET' : (SESET, add_methods._add_seset_object),
'SEBSET' : (SEBSET, add_methods._add_sebset_object),
'SEBSET1' : (SEBSET1, add_methods._add_sebset_object),
'SECSET' : (SECSET, add_methods._add_secset_object),
'SECSET1' : (SECSET1, add_methods._add_secset_object),
'SEQSET' : (SEQSET, add_methods._add_seqset_object),
'SEQSET1' : (SEQSET1, add_methods._add_seqset_object),
#'SESUP' : (SESUP, self.add_SESUP), # pseudo-constraint
#'SEUSET' : (SEUSET, self.add_SEUSET),
#'SEUSET1' : (SEUSET1, self.add_SEUSET),
# BCTSET
}
self._card_parser_prepare = {
#'CORD2R' : (CORD2R, self._add_coord_object), # not vectorized
#'CORD2C' : (CORD2C, self._add_coord_object),
#'CORD2S' : (CORD2S, self._add_coord_object),
'CORD2R' : self._prepare_cord2, # vectorized
'CORD2C' : self._prepare_cord2,
'CORD2S' : self._prepare_cord2,
#'CORD1R' : self._prepare_cord1r,
#'CORD1C' : self._prepare_cord1c,
#'CORD1S' : self._prepare_cord1s,
##'CORD3G' : self._prepare_CORD3G,
#'DAREA' : self._prepare_darea,
#'DPHASE' : self._prepare_dphase,
#'PMASS' : self._prepare_pmass,
#'CMASS4' : self._prepare_cmass4,
#'CDAMP4' : self._prepare_cdamp4,
'DMIG' : self._prepare_dmig,
'DMI' : self._prepare_dmi,
'DMIJ' : self._prepare_dmij,
'DMIK' : self._prepare_dmik,
'DMIJI' : self._prepare_dmiji,
'DEQATN' : self._prepare_dequatn,
#'PVISC' : self._prepare_pvisc,
#'PELAS' : self._prepare_pelas,
#'PDAMP' : self._prepare_pdamp,
#'TEMPD' : self._prepare_tempd,
#'CONVM' : self._prepare_convm,
#'CONV' : self._prepare_conv,
#'RADM' : self._prepare_radm,
#'RADBC' : self._prepare_radbc,
## GRDSET-will be last card to update from _card_parser_prepare
#'GRDSET' : self._prepare_grdset,
#'BCTSET' : self._prepare_bctset,
}
def reject_card_obj2(self, card_name, card_obj):
"""rejects a card object"""
self.reject_cards.append(card_obj)
def reject_card_lines(self, card_name: str, card_lines: List[str],
show_log: bool=True, comment: str='') -> None:
"""rejects a card"""
if card_name.isdigit():
# TODO: this should technically work (I think), but it's a problem
# for the code
#
# prevents:
# spc1,100,456,10013832,10013833,10013830,10013831,10013836,10013837,
# 10013834,10013835,10013838,10013839,10014508,10008937,10008936,10008935,
msg = 'card_name=%r was misparsed...\ncard_lines=%s' % (
card_name, card_lines)
raise RuntimeError(msg)
if card_name not in self.card_count:
if ' ' in card_name:
msg = (
'No spaces allowed in card name %r. '
'Should this be a comment?\n%s%s' % (
card_name, comment, card_lines))
raise RuntimeError(msg)
if card_name in ['SUBCASE ', 'CEND']:
raise RuntimeError('No executive/case control deck was defined.')
self.log.info(' rejecting card_name = %s' % card_name)
self.increase_card_count(card_name)
self.rejects.append([comment] + card_lines)
def _prepare_bctset(self, card, card_obj, comment=''):
"""adds a GRDSET"""
card = BCTSET.add_card(card_obj, comment=comment, sol=self.sol)
self._add_bctset_object(card)
def _prepare_grdset(self, card, card_obj, comment=''):
"""adds a GRDSET"""
self.grdset = GRDSET.add_card(card_obj, comment=comment)
#def _prepare_cdamp4(self, card, card_obj, comment=''):
#"""adds a CDAMP4"""
#self.add_damper(CDAMP4.add_card(card_obj, comment=comment))
#if card_obj.field(5):
#self.add_damper(CDAMP4.add_card(card_obj, 1, comment=''))
#return card_obj
def _prepare_convm(self, card, card_obj, comment=''):
"""adds a CONVM"""
boundary_condition = CONVM.add_card(card_obj, comment=comment)
self._add_thermal_bc_object(boundary_condition, boundary_condition.eid)
def _prepare_conv(self, card, card_obj, comment=''):
"""adds a CONV"""
boundary_condition = CONV.add_card(card_obj, comment=comment)
self._add_thermal_bc_object(boundary_condition, boundary_condition.eid)
def _prepare_radm(self, card, card_obj, comment=''):
"""adds a RADM"""
boundary_condition = RADM.add_card(card, comment=comment)
self._add_thermal_bc_object(boundary_condition, boundary_condition.radmid)
def _prepare_radbc(self, card, card_obj, comment=''):
"""adds a RADBC"""
boundary_condition = RADBC(card_obj, comment=comment)
self._add_thermal_bc_object(boundary_condition, boundary_condition.nodamb)
def _prepare_tempd(self, card, card_obj, comment=''):
"""adds a TEMPD"""
self.add_tempd(TEMPD.add_card(card_obj, 0, comment=comment))
if card_obj.field(3):
self.add_tempd(TEMPD.add_card(card_obj, 1, comment=''))
if card_obj.field(5):
self.add_tempd(TEMPD.add_card(card_obj, 2, comment=''))
if card_obj.field(7):
self.add_tempd(TEMPD.add_card(card_obj, 3, comment=''))
def _add_doptprm(self, doptprm, comment=''):
"""adds a DOPTPRM"""
self.doptprm = doptprm
def _prepare_dequatn(self, card, card_obj, comment=''):
"""adds a DEQATN"""
if hasattr(self, 'test_deqatn') or 1:
self.add_deqatn(DEQATN.add_card(card_obj, comment=comment))
else:
if comment:
self.rejects.append([comment])
self.rejects.append(card)
def _prepare_dmig(self, card, card_obj, comment=''):
"""adds a DMIG"""
name = string(card_obj, 1, 'name')
field2 = integer_or_string(card_obj, 2, 'flag')
#print('name=%r field2=%r' % (name, field2))
if name == 'UACCEL': # special DMIG card
if field2 == 0:
card = DMIG_UACCEL.add_card(card_obj, comment=comment)
self.add_dmig(card)
else:
self._dmig_temp[name].append((card_obj, comment))
else:
field2 = integer_or_string(card_obj, 2, 'flag')
if field2 == 0:
card = DMIG(card_obj, comment=comment)
self.add_dmig(card)
else:
self._dmig_temp[name].append((card_obj, comment))
def _prepare_dmix(self, class_obj, add_method, card_obj, comment=''):
"""adds a DMIx"""
#elif card_name in ['DMI', 'DMIJ', 'DMIJI', 'DMIK']:
field2 = integer(card_obj, 2, 'flag')
if field2 == 0:
add_method(class_obj(card_obj, comment=comment))
else:
name = string(card_obj, 1, 'name')
self._dmig_temp[name].append((card_obj, comment))
def _prepare_dmi(self, card, card_obj, comment=''):
"""adds a DMI"""
self._prepare_dmix(DMI, self._add_dmi_object, card_obj, comment=comment)
def _prepare_dmij(self, card, card_obj, comment=''):
"""adds a DMIJ"""
self._prepare_dmix(DMIJ, self._add_dmij_object, card_obj, comment=comment)
def _prepare_dmik(self, card, card_obj, comment=''):
"""adds a DMIK"""
self._prepare_dmix(DMIK, self._add_dmik_object, card_obj, comment=comment)
def _prepare_dmiji(self, card, card_obj, comment=''):
"""adds a DMIJI"""
self._prepare_dmix(DMIJI, self._add_dmiji_object, card_obj, comment=comment)
#def _prepare_cmass4(self, card, card_obj, comment=''):
#"""adds a CMASS4"""
#class_instance = CMASS4.add_card(card_obj, icard=0, comment=comment)
#self.add_mass(class_instance)
#if card_obj.field(5):
#class_instance = CMASS4.add_card(card_obj, icard=1, comment=comment)
#self.add_mass(class_instance)
#def _prepare_pelas(self, card, card_obj, comment=''):
#"""adds a PELAS"""
#class_instance = PELAS.add_card(card_obj, icard=0, comment=comment)
#self.add_property(class_instance)
#if card_obj.field(5):
#class_instance = PELAS.add_card(card_obj, icard=1, comment=comment)
#self.add_property(class_instance)
#def _prepare_pvisc(self, card, card_obj, comment=''):
#"""adds a PVISC"""
#class_instance = PVISC.add_card(card_obj, icard=0, comment=comment)
#self.add_property(class_instance)
#if card_obj.field(5):
#class_instance = PVISC.add_card(card_obj, icard=1, comment=comment)
#self.add_property(class_instance)
#def _prepare_pdamp(self, card, card_obj, comment=''):
#"""adds a PDAMP"""
#class_instance = PDAMP.add_card(card_obj, icard=0, comment=comment)
#self.add_property(class_instance)
#if card_obj.field(3):
#class_instance = PDAMP.add_card(card_obj, icard=1, comment=comment)
#self.add_property(class_instance)
#if card_obj.field(5):
#class_instance = PDAMP.add_card(card_obj, icard=2, comment=comment)
#self.add_property(class_instance)
#if card_obj.field(7):
#class_instance = PDAMP.add_card(card_obj, icard=3, comment=comment)
#self.add_property(class_instance)
#def _prepare_pmass(self, card, card_obj, comment=''):
#"""adds a PMASS"""
#card_instance = PMASS(card_obj, icard=0, comment=comment)
#self.add_property_mass(card_instance)
#for (i, j) in enumerate([3, 5, 7]):
#if card_obj.field(j):
#card_instance = PMASS(card_obj, icard=i+1, comment=comment)
#self.add_property_mass(card_instance)
#def _prepare_dphase(self, card, card_obj, comment=''):
#"""adds a DPHASE"""
#class_instance = DPHASE.add_card(card_obj, comment=comment)
#self.add_dphase(class_instance)
#if card_obj.field(5):
#print('card_obj = ', card_obj)
#class_instance = DPHASE(card_obj, icard=1, comment=comment)
#self.add_DPHASE(class_instance)
def _prepare_cord1r(self, card, card_obj, comment=''):
"""adds a CORD1R"""
class_instance = CORD1R.add_card(card_obj, comment=comment)
self._add_methods._add_coord_object(class_instance)
if card_obj.field(5):
class_instance = CORD1R.add_card(card_obj, icard=1, comment=comment)
self._add_methods._add_coord_object(class_instance)
def _prepare_cord1c(self, card, card_obj, comment=''):
"""adds a CORD1C"""
class_instance = CORD1C.add_card(card_obj, comment=comment)
self._add_methods._add_coord_object(class_instance)
if card_obj.field(5):
class_instance = CORD1C.add_card(card_obj, icard=1, comment=comment)
self._add_methods._add_coord_object(class_instance)
def _prepare_cord1s(self, card, card_obj, comment=''):
"""adds a CORD1S"""
class_instance = CORD1S.add_card(card_obj, comment=comment)
self._add_methods._add_coord_object(class_instance)
if card_obj.field(5):
class_instance = CORD1S.add_card(card_obj, icard=1, comment=comment)
self._add_methods._add_coord_object(class_instance)
def _prepare_cord2(self, card, card_obj, comment=''):
"""adds a CORD2x"""
self.coords.add_cord2x(card, card_obj, comment)
def add_card(self, card_lines, card_name, comment='', is_list=True, has_none=True):
"""
Adds a card object to the BDF object.
Parameters
----------
card_lines: list[str]
the list of the card fields
card_name : str
the card_name -> 'GRID'
comment : str
an optional the comment for the card
is_list : bool, optional
False : input is a list of card fields -> ['GRID', 1, None, 3.0, 4.0, 5.0]
True : input is list of card_lines -> ['GRID, 1,, 3.0, 4.0, 5.0']
has_none : bool; default=True
can there be trailing Nones in the card data (e.g. ['GRID', 1, 2, 3.0, 4.0, 5.0, None])
can there be trailing Nones in the card data (e.g. ['GRID, 1, 2, 3.0, 4.0, 5.0, '])
Returns
-------
card_object : BDFCard()
the card object representation of card
.. code-block:: python
>>> model = BDF()
# is_list is a somewhat misleading name; is it a list of card_lines
# where a card_line is an unparsed string
>>> card_lines = ['GRID,1,2']
>>> comment = 'this is a comment'
>>> model.add_card(card_lines, 'GRID', comment, is_list=True)
# here is_list=False because it's been parsed
>>> card = ['GRID', 1, 2,]
>>> model.add_card(card_lines, 'GRID', comment, is_list=False)
# here is_list=False because it's been parsed
# Note the None at the end of the 1st line, which is there
# because the CONM2 card has a blank field.
# It must be there.
# We also set i32 on the 2nd line, so it will default to 0.0
>>> card = [
'CONM2', eid, nid, cid, mass, x1, x2, x3, None,
i11, i21, i22, i31, None, i33,
]
>>> model.add_card(card_lines, 'CONM2', comment, is_list=False)
# here's an alternate approach for the CONM2
# we use Nastran's CSV format
# There are many blank fields, but it's parsed exactly like a
# standard CONM2.
>>> card = [
'CONM2,1,2,3,10.0',
',1.0,,5.0'
]
>>> model.add_card(card_lines, 'CONM2', comment, is_list=True)
Notes
-----
This is a very useful method for interfacing with the code.
The card_object is not a card-type object...so not a GRID
card or CQUAD4 object. It's a BDFCard Object. However,
you know the type (assuming a GRID), so just call the
*mesh.Node(nid)* to get the Node object that was just
created.
"""
card_name = card_name.upper()
card_obj, card = self.create_card_object(card_lines, card_name,
is_list=is_list, has_none=has_none)
self._add_card_helper(card_obj, card_name, card_name, comment)
return card_obj
def add_card_fields(self, card_lines, card_name, comment='', has_none=True):
"""
Adds a card object to the BDF object.
Parameters
----------
card_lines: list[str]
the list of the card fields
input is a list of card fields -> ['GRID', 1, 2, 3.0, 4.0, 5.0]
card_name : str
the card_name -> 'GRID'
comment : str
an optional the comment for the card
has_none : bool; default=True
can there be trailing Nones in the card data (e.g. ['GRID', 1, 2, 3.0, 4.0, 5.0, None])
Returns
-------
card_object : BDFCard()
the card object representation of card
"""
card_name = card_name.upper()
card_obj, card = self.create_card_object(card_lines, card_name,
is_list=True, has_none=has_none)
self._add_card_helper(card_obj, card, card_name, comment)
@property
def nodes(self):
ngrids = len(self.grid)
assert ngrids > 0, ngrids
nspoints = 0
nepoints = 0
if self.spoint.n:
spoints = self.spoint.points
nspoints = len(spoints)
if self.epoint.n:
epoints = self.epoint.points
nepoints = len(epoints)
raise NotImplementedError('EPOINT')
assert ngrids + nspoints + nepoints > 0, 'ngrids=%s nspoints=%s nepoints=%s' % (ngrids, nspoints, nepoints)
nodes = np.zeros(ngrids + nspoints + nepoints, dtype='int32')
nodes[:ngrids] = self.grid.node_id
if nspoints:
nodes[ngrids:ngrids+nspoints] = self.spoint.points
if nepoints:
nodes[ngrids+nspoints:] = self.epoint.points
return nodes
def get_xyz_in_coord(self, cid=0, fdtype='float64', sort_ids=True, dtype='float64'):
"""
Gets the xyz points (including SPOINTS) in the desired coordinate frame
Parameters
----------
cid : int; default=0
the desired coordinate system
fdtype : str; default='float64'
the data type of the xyz coordinates
sort_ids : bool; default=True
sort the ids
Returns
-------
xyz : (n, 3) ndarray
the xyz points in the cid coordinate frame
.. warning:: doesn't support EPOINTs
"""
ngrids = len(self.grid)
nspoints = 0
nepoints = 0
spoints = None
if self.spoint.n:
spoints = self.point.points
nspoints = len(spoints)
if self.epoint.n:
epoints = self.point.points
nepoints = len(epoints)
raise NotImplementedError('EPOINT')
assert ngrids + nspoints + nepoints > 0, 'ngrids=%s nspoints=%s nepoints=%s' % (ngrids, nspoints, nepoints)
xyz_cid0 = np.zeros((ngrids + nspoints + nepoints, 3), dtype=dtype)
if cid == 0:
xyz_cid0 = self.grid.get_position_by_node_index()
assert nspoints == 0, nspoints
else:
assert cid == 0, cid
assert nspoints == 0, nspoints
if sort_ids:
isort = np.argsort(all_nodes)
xyz_cid0 = xyz_cid0[isort, :]
return xyz_cid0
def _add_card_helper(self, card_obj, card, card_name, comment=''):
"""
Adds a card object to the BDF object.
Parameters
----------
card_object : BDFCard()
the card object representation of card
card : List[str]
the fields of the card object; used for rejection and special cards
card_name : str
the card_name -> 'GRID'
comment : str
an optional the comment for the card
"""
if card_name == 'ECHOON':
self.echo = True
return
elif card_name == 'ECHOOFF':
self.echo = False
return
if self.echo:
try:
print(print_card_8(card_obj).rstrip())
except Exception:
print(print_card_16(card_obj).rstrip())
if card_name in self._card_parser:
card_class, add_card_function = self._card_parser[card_name]
try:
class_instance = card_class.add_card(card_obj, comment=comment)
add_card_function(class_instance)
except TypeError:
#msg = 'problem adding %s' % card_obj
raise
#raise TypeError(msg)
except (SyntaxError, AssertionError, KeyError, ValueError) as exception:
raise
# WARNING: Don't catch RuntimeErrors or a massive memory leak can occur
#tpl/cc451.bdf
#raise
# NameErrors should be caught
#self._iparse_errors += 1
#self.log.error(card_obj)
#var = traceback.format_exception_only(type(exception), exception)
#self._stored_parse_errors.append((card, var))
#if self._iparse_errors > self._nparse_errors:
#self.pop_parse_errors()
#raise
#except AssertionError as exception:
#self.log.error(card_obj)
elif card_name in self._card_parser_prepare:
add_card_function = self._card_parser_prepare[card_name]
try:
add_card_function(card, card_obj, comment)
except (SyntaxError, AssertionError, KeyError, ValueError) as exception:
raise
# WARNING: Don't catch RuntimeErrors or a massive memory leak can occur
#tpl/cc451.bdf
#raise
# NameErrors should be caught
#self._iparse_errors += 1
#self.log.error(card_obj)
#var = traceback.format_exception_only(type(exception), exception)
#self._stored_parse_errors.append((card, var))
#if self._iparse_errors > self._nparse_errors:
#self.pop_parse_errors()
#except AssertionError as exception:
#self.log.error(card_obj)
#raise
else:
#raise RuntimeError(card_obj)
self.reject_cards.append(card_obj)
def get_bdf_stats(self, return_type='string'):
"""
Print statistics for the BDF
Parameters
----------
return_type : str (default='string')
the output type ('list', 'string')
'list' : list of strings
'string' : single, joined string
Returns
-------
return_data : str, optional
the output data
Notes
-----
If a card is not supported and not added to the proper
lists, this method will fail.
"""
return ''
card_stats = [
'params', 'nodes', 'points', 'elements', 'rigid_elements',
'properties', 'materials', 'creep_materials',
'MATT1', 'MATT2', 'MATT3', 'MATT4', 'MATT5', 'MATT8', 'MATT9',
'MATS1', 'MATS3', 'MATT8',
'coords', 'mpcs', 'mpcadds',
# dynamic cards
'dareas', 'dphases', 'nlparms', 'nlpcis', 'tsteps', 'tstepnls',
# direct matrix input - DMIG - dict
'dmi', 'dmig', 'dmij', 'dmiji', 'dmik',
'dequations',
# frequencies - dict
'frequencies',
# optimization - dict
'dconadds', 'dconstrs', 'desvars', 'ddvals', 'dlinks', 'dresps',
'dvcrels', 'dvmrels', 'dvprels', 'dvgrids',
# SESETx - dict
'suport1',
'se_sets',
'se_usets',
# tables
'tables', 'tables_d', 'tables_m', 'random_tables',
# methods
'methods', 'cMethods',
# aero
'caeros', 'paeros', 'aecomps', 'aefacts', 'aelinks',
'aelists', 'aeparams', 'aesurfs', 'aestats', 'gusts', 'flfacts',
'flutters', 'splines', 'trims',
# thermal
'bcs', 'thermal_materials', 'phbdys',
'convection_properties', ]
# These are ignored because they're lists
ignored_types = set([
'spoints', 'spointi', # singleton
'grdset', # singleton
'spcs',
'suport', 'se_suport', # suport, suport1 - list
'doptprm', # singleton
# SETx - list
'sets', 'asets', 'bsets', 'csets', 'qsets',
'se_bsets', 'se_csets', 'se_qsets',
])
## TODO: why are some of these ignored?
ignored_types2 = set([
'case_control_deck', 'caseControlDeck',
'spcObject2', 'mpcObject2',
# done
'sol', 'loads', 'mkaeros',
'rejects', 'reject_cards',
# not cards
'debug', 'executive_control_lines',
'case_control_lines', 'cards_to_read', 'card_count',
'is_structured', 'uniqueBulkDataCards',
'model_type', 'include_dir',
'sol_method', 'log',
'sol_iline',
'reject_count', '_relpath',
#'foundEndData',
'special_cards',])
unsupported_types = ignored_types.union(ignored_types2)
all_params = object_attributes(self, keys_to_skip=unsupported_types)
msg = ['---BDF Statistics---']
# sol
msg.append('SOL %s\n' % self.sol)
# loads
for (lid, loads) in sorted(self.loads.items()):
msg.append('bdf.loads[%s]' % lid)
groups_dict = {}
for loadi in loads:
groups_dict[loadi.type] = groups_dict.get(loadi.type, 0) + 1
for name, count_name in sorted(groups_dict.items()):
msg.append(' %-8s %s' % (name + ':', count_name))
msg.append('')
# dloads
for (lid, loads) in sorted(self.dloads.items()):
msg.append('bdf.dloads[%s]' % lid)
groups_dict = {}
for loadi in loads:
groups_dict[loadi.type] = groups_dict.get(loadi.type, 0) + 1
for name, count_name in sorted(groups_dict.items()):
msg.append(' %-8s %s' % (name + ':', count_name))
msg.append('')
for (lid, loads) in sorted(self.dload_entries.items()):
msg.append('bdf.dload_entries[%s]' % lid)
groups_dict = {}
for loadi in loads:
groups_dict[loadi.type] = groups_dict.get(loadi.type, 0) + 1
for name, count_name in sorted(groups_dict.items()):
msg.append(' %-8s %s' % (name + ':', count_name))
msg.append('')
# aero
if self.aero:
msg.append('bdf:aero')
msg.append(' %-8s %s' % ('AERO:', 1))
# aeros
if self.aeros:
msg.append('bdf:aeros')
msg.append(' %-8s %s' % ('AEROS:', 1))
#mkaeros
if self.mkaeros:
msg.append('bdf:mkaeros')
msg.append(' %-8s %s' % ('MKAERO:', len(self.mkaeros)))
for card_group_name in card_stats:
card_group = getattr(self, card_group_name)
groups = set()
if not isinstance(card_group, dict):
msg = '%s is a %s; not dictionary' % (card_group_name, type(card_group))
raise RuntimeError(msg)
for card in card_group.values():
if isinstance(card, list):
for card2 in card:
groups.add(card2.type)
else:
groups.add(card.type)
group_msg = []
for card_name in sorted(groups):
try:
ncards = self.card_count[card_name]
group_msg.append(' %-8s : %s' % (card_name, ncards))
except KeyError:
group_msg.append(' %-8s : ???' % card_name)
#assert card_name == 'CORD2R', self.card_count
if group_msg:
msg.append('bdf.%s' % card_group_name)
msg.append('\n'.join(group_msg))
msg.append('')
# rejects
if self.rejects:
msg.append('Rejected Cards')
for name, counter in sorted(self.card_count.items()):
if name not in self.cards_to_read:
msg.append(' %-8s %s' % (name + ':', counter))
msg.append('')
if return_type == 'string':
return '\n'.join(msg)
else:
return msg
def get_displacement_index_xyz_cp_cd(self, fdtype='float64', idtype='int32'):
"""
Get index and transformation matricies for nodes with
their output in coordinate systems other than the global.
Used in combination with ``OP2.transform_displacements_to_global``
Parameters
----------
fdtype : str
the type of xyz_cp
int32 : str
the type of nid_cp_cd
Returns
-------
icd_transform : dict{int cd : (n,) int ndarray}
Dictionary from coordinate id to index of the nodes in
``self.point_ids`` that their output (`CD`) in that
coordinate system.
icp_transform : dict{int cp : (n,) int ndarray}
Dictionary from coordinate id to index of the nodes in
``self.point_ids`` that their input (`CP`) in that
coordinate system.
xyz_cp : (n, 3) float ndarray
points in the CP coordinate system
nid_cp_cd : (n, 3) int ndarray
node id, CP, CD for each node
Examples
--------
# assume GRID 1 has a CD=10
# assume GRID 2 has a CD=10
# assume GRID 5 has a CD=50
>>> model.point_ids
[1, 2, 5]
>>> i_transform = model.get_displacement_index_xyz_cp_cd()
>>> i_transform[10]
[0, 1]
>>> i_transform[50]
[2]
"""
nids_cd_transform = defaultdict(list)
nids_cp_transform = defaultdict(list)
i_transform = {}
nnodes = len(self.nodes)
nspoints = 0
nepoints = 0
spoints = None
epoints = None
if 0 and self.new_spoints:
if self.new_spoints:
if self.spoints:
spoints = list(self.spoints)
nspoints = len(spoints)
all_nodes += spoints
if self.epoints:
epoints = list(self.epoints)
nepoints = len(epoints)
all_nodes += epoints
else:
if self.spoints:
spoints = self.spoints.points
nspoints = len(spoints)
if self.epoints is not None:
epoints = self.epoints.points
nepoints = len(epoints)
#raise NotImplementedError('EPOINTs')
if nnodes + nspoints + nepoints == 0:
msg = 'nnodes=%s nspoints=%s nepoints=%s' % (nnodes, nspoints, nepoints)
raise ValueError(msg)
#xyz_cid0 = np.zeros((nnodes + nspoints, 3), dtype=dtype)
xyz_cp = np.zeros((nnodes + nspoints, 3), dtype=fdtype)
nid_cp_cd = np.zeros((nnodes + nspoints, 3), dtype=idtype)
i = 0
for nid, node in sorted(self.nodes.items()):
cd = node.Cd()
cp = node.Cp()
nids_cd_transform[cp].append(nid)
nids_cd_transform[cd].append(nid)
nid_cp_cd[i, :] = [nid, cp, cd]
xyz_cp[i, :] = node.xyz
i += 1
if nspoints:
for nid in sorted(self.spoints.points):
nid_cp_cd[i] = nid
i += 1
if nepoints:
for nid in sorted(self.epoints.points):
nid_cp_cd[i] = nid
i += 1
if sort_ids:
nids = nid_cp_cd[:, 0]
isort = nids.argsort()
nid_cp_cd = nid_cp_cd[isort, :]
xyz_cp = xyz_cp[isort, :]
icp_transform = {}
icd_transform = {}
nids_all = np.array(sorted(self.point_ids))
for cd, nids in sorted(nids_cd_transform.items()):
if cd in [0, -1]:
continue
nids = np.array(nids)
icd_transform[cd] = np.where(np.in1d(nids_all, nids))[0]
if cd in nids_cp_transform:
icp_transform[cd] = icd_transform[cd]
for cp, nids in sorted(nids_cd_transform.items()):
if cp in [0, -1]:
continue
if cp in icd_transform:
continue
nids = np.array(nids)
icd_transform[cd] = np.where(np.in1d(nids_all, nids))[0]
return icd_transform, icp_transform, xyz_cp, nid_cp_cd
def transform_xyzcp_to_xyz_cid(self, xyz_cp, nids, icp_transform, in_place=False, cid=0):
"""
Working on faster method for calculating node locations
Not validated...
Parameters
----------
xyz_cp : (n, 3) float ndarray
points in the CP coordinate system
icp_transform : dict{int cp : (n,) int ndarray}
Dictionary from coordinate id to index of the nodes in
``self.point_ids`` that their input (`CP`) in that
coordinate system.
cid : int; default=0
the coordinate system to get xyz in
Returns
-------
xyz_cid : (n, 3) float ndarray
points in the CID coordinate system
"""
coord2 = self.coords[cid]
beta2 = coord2.beta()
assert in_place is False, 'in_place=%s' % in_place
if in_place:
xyz_cid0 = xyz_cp
else:
xyz_cid0 = np.copy(xyz_cp)
# transform the grids to the global coordinate system
for cp, inode in icp_transform.items():
if cp == 0:
continue
coord = self.coords[cp]
beta = coord.beta()
is_beta = np.abs(np.diagonal(beta)).min() == 1.
is_origin = np.abs(coord.origin).max() == 0.
xyzi = coord.coord_to_xyz_array(xyz_cp[inode, :])
if is_beta and is_origin:
xyz_cid0[inode, :] = xyzi @ beta + coord.origin
elif is_beta:
xyz_cid0[inode, :] = xyzi @ beta
else:
xyz_cid0[inode, :] = xyzi + coord.origin
if cid == 0:
return xyz_cid0
is_beta = np.abs(np.diagonal(beta2)).min() == 1.
is_origin = np.abs(coord2.origin).max() == 0.
if is_beta and is_origin:
xyzi = (xyz_cid0 - coord2.origin) @ beta2.T
xyz_cid = coord2.xyz_to_coord_array(xyzi)
elif is_beta:
xyzi = xyz_cid0 @ beta2.T
xyz_cid = coord2.xyz_to_coord_array(xyzi)
else:
xyzi = xyz_cid0 - coord2.origin
xyz_cid = coord2.xyz_to_coord_array(xyzi)
return xyz_cid
def get_displacement_index(self):
"""
Get index and transformation matricies for nodes with
their output in coordinate systems other than the global.
Used in combination with ``OP2.transform_displacements_to_global``
Returns
-------
icd_transform : dict{int cid : (n,) int ndarray}
Dictionary from coordinate id to index of the nodes in
``self.point_ids`` that their output (`CD`) in that
coordinate system.
Examples
--------
# assume GRID 1 has a CD=10
# assume GRID 2 has a CD=10
# assume GRID 5 has a CD=50
>>> model.point_ids
[1, 2, 5]
>>> icd_transform = model.get_displacement_index()
>>> icd_transform[10]
[0, 1]
>>> icd_transform[50]
[2]
"""
nids_transform = defaultdict(list)
icd_transform = {}
if len(self.coords) == 1: # was ncoords > 2; changed b/c seems dangerous
return icd_transform
for nid, node in sorted(self.nodes.items()):
cid_d = node.Cd()
if cid_d:
nids_transform[cid_d].append(nid)
nids_all = np.array(sorted(self.point_ids))
for cid in sorted(nids_transform.keys()):
nids = np.array(nids_transform[cid])
icd_transform[cid] = np.where(np.in1d(nids_all, nids))[0]
return nids_all, nids_transform, icd_transform
def get_displacement_index_transforms(self):
"""
Get index and transformation matricies for nodes with
their output in coordinate systems other than the global.
Used in combination with ``OP2.transform_displacements_to_global``
Returns
-------
icd_transform : dict{int cid : (n,) int ndarray}
Dictionary from coordinate id to index of the nodes in
``self.point_ids`` that their output (`CD`) in that
coordinate system.
beta_transforms : dict{in:3x3 float ndarray}
Dictionary from coordinate id to 3 x 3 transformation
matrix for that coordinate system.
Examples
--------
# assume GRID 1 has a CD=10
# assume GRID 2 has a CD=10
# assume GRID 5 has a CD=50
>>> model.point_ids
[1, 2, 5]
>>> icd_transform, beta_transforms = model.get_displacement_index_transforms()
>>> icd_transform[10]
[0, 1]
>>> beta_transforms[10]
[1., 0., 0.]
[0., 0., 1.]
[0., 1., 0.]
>>> icd_transform[50]
[2]
>>> beta_transforms[50]
[1., 0., 0.]
[0., 1., 0.]
[0., 0., 1.]
"""
self.deprecated('icd_transform, beta_transforms= model.get_displacement_index_transforms()',
'nids_all, nids_transform, icd_transform = model.get_displacement_index()', '1.0')
nids_transform = defaultdict(list)
icd_transform = {}
beta_transforms = {}
if len(self.coords) == 1: # was ncoords > 2; changed b/c seems dangerous
return icd_transform, beta_transforms
for nid, node in sorted(self.nodes.items()):
cid_d = node.Cd()
if cid_d:
nids_transform[cid_d].append(nid)
nids_all = np.array(sorted(self.point_ids))
for cid in sorted(nids_transform.keys()):
nids = np.array(nids_transform[cid])
icd_transform[cid] = np.where(np.in1d(nids_all, nids))[0]
beta_transforms[cid] = self.coords[cid].beta()
return icd_transform, beta_transforms
def _get_card_name(self, lines):
"""
Returns the name of the card defined by the provided lines
Parameters
----------
lines : list[str]
the lines of the card
Returns
-------
card_name : str
the name of the card
"""
card_name = lines[0][:8].rstrip('\t, ').split(',')[0].split('\t')[0].strip('*\t ')
if len(card_name) == 0:
return None
if ' ' in card_name or len(card_name) == 0:
msg = 'card_name=%r\nline=%r in filename=%r is invalid' \
% (card_name, lines[0], self.active_filename)
print(msg)
raise CardParseSyntaxError(msg)
return card_name.upper()
def increase_card_count(self, card_name, count_num=1):
"""
Used for testing to check that the number of cards going in is the
same as each time the model is read verifies proper writing of cards
Parameters
----------
card_name : str
the card_name -> 'GRID'
count_num : int, optional
the amount to increment by (default=1)
>>> bdf.read_bdf(bdf_filename)
>>> bdf.card_count['GRID']
50
"""
if card_name in self.card_count:
self.card_count[card_name] += count_num
else:
self.card_count[card_name] = count_num
def _parse_spc1(self, card_name, cards):
"""adds SPC1s"""
for comment, card_lines in cards:
card_obj = self._cardlines_to_card_obj(card_lines, card_name)
constraint_id, dofs, node_ids = get_spc1_constraint(card_obj)
if constraint_id in self.spc1:
spc1 = self.spc1[constraint_id]
else:
spc1 = SPC1(self)
self.spc1[constraint_id] = spc1
#spc1 = self.spc1.setdefault(constraint_id, SPC1(self))
#spc1.add_card(card_obj, comment=comment)
spc1.add(constraint_id, dofs, node_ids, comment=comment)
self.increase_card_count(card_name, len(cards))
def _parse_mpc(self, card_name, cards):
"""adds MPCs"""
for comment, card_lines in cards:
card_obj = self._cardlines_to_card_obj(card_lines, card_name)
constraint_id, constraint = get_mpc_constraint(card_obj)
if constraint_id in self.mpc:
mpc = self.mpc[constraint_id]
else:
mpc = MPC(self)
self.mpc[constraint_id] = mpc
#mpc = self.mpc.setdefault(constraint_id, MPC(self))
#mpc.add_card(card_obj, comment=comment)
mpc.add(constraint_id, constraint, comment=comment)
for constraint_id, constraint in self.mpc.items():
constraint.build()
self.increase_card_count(card_name, len(cards))
def _parse_spcadd(self, card_name, cards):
"""adds SPCADDs"""
for comment, card_lines in cards:
card_obj = self._cardlines_to_card_obj(card_lines, card_name)
constraint_id, node_ids = get_spcadd_constraint(card_obj)
if constraint_id in self.spcadd:
spcadd = self.spcadd[constraint_id]
else:
spcadd = SPCADD(self)
self.spcadd[constraint_id] = spcadd
#spcadd.add_card(card_obj, comment=comment)
spcadd.add(constraint_id, node_ids, comment=comment)
self.increase_card_count(card_name, len(cards))
def _parse_mpcadd(self, card_name, cards):
"""adds MPCADDs"""
for comment, card_lines in cards:
card_obj = self._cardlines_to_card_obj(card_lines, card_name)
constraint_id, node_ids = get_spcadd_constraint(card_obj)
if constraint_id in self.mpcadd:
mpcadd = self.mpcadd[constraint_id]
else:
mpcadd = MPCADD(self)
self.mpcadd[constraint_id] = mpcadd
#mpcadd.add_card(card_obj, comment=comment)
mpcadd.add(constraint_id, node_ids, comment=comment)
self.increase_card_count(card_name, len(cards))
def _parse_spc(self, card_name, cards):
"""SPC"""
self._parse_spci(card_name, cards, SPC, self.spc)
def _parse_spcd(self, card_name, cards):
"""SPCD"""
self._parse_spci(card_name, cards, SPCD, self.spcd)
def _parse_spci(self, card_name, cards, obj, slot):
"""SPC, SPCD"""
#ncards = defaultdict(int)
data_comments = defaultdict(list)
for comment, card_lines in cards:
card_obj = self._cardlines_to_card_obj(card_lines, card_name)
for i in [0, 1]:
data = get_spc_constraint(card_obj, i)
constraint_id, node_id = data[:2]
#self.log.debug('constraint_id=%s node_id=%s dofs=%s enforced=%s' % (
#constraint_id, node_id, dofs, enforced_motion))
if node_id is None:
continue
data_comments[constraint_id].append((data, comment))
comment = ''
for constraint_id, data_commentsi in data_comments.items():
instance = obj(self)
slot[constraint_id] = instance
instance.allocate({card_name : len(data_commentsi)})
for data_comment in data_commentsi:
data, comment = data_comment
constraint_id, node_id, dofs, enforced_motion = data
instance.add(constraint_id, node_id, dofs, enforced_motion, comment=comment)
def _parse_ctetra(self, card_name, card):
"""adds ctetras"""
self._parse_solid(
'CTETRA', card, 7,
('CTETRA4', self.ctetra4),
('CTETRA10', self.ctetra10),
)
def _parse_cpenta(self, card_name, card):
"""adds cpentas"""
self._parse_solid(
'CPENTA', card, 9,
('CPENTA6', self.cpenta6),
('CPENTA15', self.cpenta15),
)
def _parse_cpyram(self, card_name, card):
"""adds cpyrams"""
self._parse_solid(
'CPENTA', card, 8,
('CPENTA6', self.cpenta6),
('CPENTA15', self.cpenta15),
)
def _parse_chexa(self, card_name, card):
"""adds chexas"""
self._parse_solid(
'CHEXA', card, 11,
('CHEXA8', self.chexa8),
('CHEXA20', self.chexa20),
)
@staticmethod
def _cardlines_to_card_obj(card_lines, card_name):
"""makes a BDFCard object"""
fields = to_fields(card_lines, card_name)
card = wipe_empty_fields(fields)
card_obj = BDFCard(card, has_none=False)
return card_obj
def _parse_darea(self, card_name, cards):
"""adds dareas"""
self._parse_multi(card_name, cards, self.darea, [5])
def _parse_dphase(self, card_name, cards):
"""adds dphases"""
self._parse_multi(card_name, cards, self.dphase, [5])
def _parse_cmass4(self, card_name, cards):
"""adds cmass4"""
self._parse_multi(card_name, cards, self.cmass4, [5])
def _parse_cdamp4(self, card_name, cards):
"""adds cdamp4"""
self._parse_multi(card_name, cards, self.cdamp4, [5])
def _parse_pvisc(self, card_name, cards):
"""adds pvisc"""
self._parse_multi(card_name, cards, self.pvisc, [5])
def _parse_pdamp(self, card_name, cards):
"""adds pdamp"""
self._parse_multi(card_name, cards, self.pdamp, [3, 5, 7])
def _parse_pmass(self, card_name, cards):
"""adds pmass"""
self._parse_multi(card_name, cards, self.pmass, [3, 5, 7])
def _parse_multi(self, card_name: str, cards, card_cls, icard: List[int]):
"""parses a DAREA, DPHASE, CDAMP4, CMASS4, CVISC, PMASS, PDAMP, ???"""
datas = []
for comment, card_lines in cards:
card_obj = self._cardlines_to_card_obj(card_lines, card_name)
data = card_cls.parse(card_obj, icard=0, comment=comment)
datas.append(data)
for icardi in icard:
if card_obj.field(icardi):
data = card_cls.parse(card_obj, icard=icardi)
datas.append(data)
ncards = len(datas)
self.increase_card_count(card_name, ncards)
self.log.debug(' allocating %r' % card_cls.type)
card_cls.allocate(self.card_count)
for datai, comment in datas:
card_cls.add_card(datai, comment)
self.log.debug(' building %r; n=%s' % (card_cls.type, card_cls.n))
card_cls.build()
#def _prepare_darea(self, card, card_obj, comment=''):
#"""adds a DAREA"""
##def add_darea(self, darea, allow_overwrites=False):
##key = (darea.sid, darea.p, darea.c)
##if key in self.dareas and not allow_overwrites:
##if not darea == self.dareas[key]:
##assert key not in self.dareas, '\ndarea=\n%s oldDArea=\n%s' % (darea, self.dareas[key])
##else:
##assert darea.sid > 0
##self.dareas[key] = darea
##self._type_to_id_map[darea.type].append(key)
#class_instance = DAREA.add_card(card_obj, comment=comment)
#self.add_darea(class_instance)
#if card_obj.field(5):
#class_instance = DAREA.add_card(card_obj, icard=1, comment=comment)
#self.add_darea(class_instance)
def _parse_solid(self, card_name, cards, nsplit, pair1, pair2):
"""
adds the cards to the object
Parameters
----------
card_name : str
the card name
cards : List[(comment, card_obj), ...]
an series of comments and cards
nsplit : int >= 0
the location to identify for a card split (7 for CTETRA4/CTETRA10)
pair1 : (card_name, slot)
card_name : str
the card_name; (e.g., CTETRA4)
slot : obj
the place to put the data (e.g., self.ctetra4)
pair2 : (card_name, slot)
card_name : str
the card_name; (e.g., CTETRA10)
slot : obj
the place to put the data (e.g., self.ctetra10)
"""
cards1 = []
cards2 = []
ncards1 = 0
ncards2 = 0
for comment, card_lines in cards:
card_obj = self._cardlines_to_card_obj(card_lines, card_name)
if card_obj.nfields == nsplit:
ncards1 += 1
cards1.append((comment, card_obj))
else:
ncards2 += 1
cards2.append((comment, card_obj))
if ncards1:
name1, obj1 = pair1
self.increase_card_count(name1, ncards1)
self.log.debug(' allocating %r' % obj1.type)
obj1.allocate(self.card_count)
for comment, card_obj in cards1:
obj1.add_card(card_obj, comment)
self.log.debug(' building %r; n=%s' % (obj1.type, obj1.n))
obj1.build()
if ncards2:
name2, obj2 = pair2
self.increase_card_count(name2, ncards2)
self.log.debug(' allocating %r' % obj2.type)
obj2.allocate(self.card_count)
for comment, card_obj in cards2:
obj2.add_card(card_obj, comment)
self.log.debug(' building %r; n=%s' % (obj2.type, obj2.n))
obj2.build()
def _parse_cards(self, cards, card_count):
"""creates card objects and adds the parsed cards to the deck"""
#print('card_count = %s' % card_count)
if isinstance(cards, dict):
# TODO: many others...
cards_to_get_lengths_of = {
'CTETRA' : self._parse_ctetra,
'CPENTA' : self._parse_cpenta,
'CPYRAM' : self._parse_cpyram,
'CHEXA' : self._parse_chexa,
'SPC1' : self._parse_spc1,
'SPCADD' : self._parse_spcadd,
'SPC' : self._parse_spc,
'SPCD' : self._parse_spcd,
'MPC' : self._parse_mpc,
'MPCADD' : self._parse_mpcadd,
'DAREA' : self._parse_darea,
'DPHASE' : self._parse_dphase,
#'PELAS' : self._parse_pelas,
'PVISC' : self._parse_pvisc,
'PDAMP' : self._parse_pdamp,
'CMASS4' : self._parse_cmass4,
'PMASS' : self._parse_pmass,
'CDAMP1' : self._parse_cdamp1,
'CDAMP2' : self._parse_cdamp2,
'CDAMP3' : self._parse_cdamp3,
'CDAMP4' : self._parse_cdamp4,
}
# self._is_cards_dict = True
# this is the loop that hits...
card_names = sorted(list(cards.keys()))
for card_name in card_names:
if card_name in cards_to_get_lengths_of:
card = cards[card_name]
ncards = len(card)
method = cards_to_get_lengths_of[card_name]
self.log.info('dynamic vectorized parse of n%s = %s' % (card_name, ncards))
method(card_name, card)
del cards[card_name]
continue
card_name_to_obj_mapper = self.card_name_to_obj
for card_name in card_names:
card = cards[card_name]
ncards = len(card)
if self.is_reject(card_name):# and card_name not in :
self.log.warning('n%s = %s (rejecting)' % (card_name, ncards))
asdf
#self.log.info(' rejecting card_name = %s' % card_name)
for comment, card_lines in card:
self.rejects.append([_format_comment(comment)] + card_lines)
self.increase_card_count(card_name, count_num=ncards)
elif card_name in cards_to_get_lengths_of:
#raise RuntimeError('this shouldnt happen because we deleted the cards above')
continue
else:
ncards = len(card)
self.log.info('n%s = %r' % (card_name, ncards))
if card_name not in card_name_to_obj_mapper:
self.log.debug(' card_name=%r is not vectorized' % card_name)
for comment, card_lines in card:
self.add_card(card_lines, card_name, comment=comment,
is_list=False, has_none=False)
del cards[card_name]
continue
obj = card_name_to_obj_mapper[card_name]
if obj is None:
self.log.debug('card_name=%r is not vectorized, but should be' % card_name)
for comment, card_lines in card:
self.add_card(card_lines, card_name, comment=comment,
is_list=False, has_none=False)
del cards[card_name]
continue
self.increase_card_count(card_name, ncards)
obj.allocate(self.card_count)
self.log.debug(' allocating %r' % card_name)
for comment, card_lines in card:
#print('card_lines', card_lines)
fields = to_fields(card_lines, card_name)
card = wipe_empty_fields(fields)
card_obj = BDFCard(card, has_none=False)
obj.add_card(card_obj, comment=comment)
obj.build()
self.log.debug(' building %r; n=%s' % (obj.type, obj.n))
del cards[card_name]
#if self.is_reject(card_name):
#self.log.info(' rejecting card_name = %s' % card_name)
#for cardi in card:
#self.increase_card_count(card_name)
#self.rejects.append([cardi[0]] + cardi[1])
#else:
#for comment, card_lines in card:
#print('card_lines', card_lines)
#self.add_card(card_lines, card_name, comment=comment,
#is_list=False, has_none=False)
else:
# list - this is the one that's used in the non-vectorized case
raise NotImplementedError('dict...')
#for card in cards:
#card_name, comment, card_lines = card
#if card_name is None:
#msg = 'card_name = %r\n' % card_name
#msg += 'card_lines = %s' % card_lines
#raise RuntimeError(msg)
#if self.is_reject(card_name):
#self.reject_card_lines(card_name, card_lines, comment)
self.coords.build()
self.elements.build()
self.properties.build()
self.materials.build()
def _parse_dynamic_syntax(self, key):
"""
Applies the dynamic syntax for %varName
Parameters
----------
key : str
the uppercased key
Returns
-------
value : int/float/str
the dynamic value defined by dict_of_vars
.. seealso:: :func: `set_dynamic_syntax`
"""
key = key.strip()[1:]
self.log.debug("dynamic key = %r" % key)
#self.dict_of_vars = {'P5':0.5,'ONEK':1000.}
if key not in self.dict_of_vars:
msg = "key=%r not found in keys=%s" % (key, self.dict_of_vars.keys())
raise KeyError(msg)
return self.dict_of_vars[key]
#def _is_case_control_deck(self, line):
#line_upper = line.upper().strip()
#if 'CEND' in line.upper():
#raise SyntaxError('invalid Case Control Deck card...CEND...')
#if '=' in line_upper or ' ' in line_upper:
#return True
#for card in self.uniqueBulkDataCards:
#lenCard = len(card)
#if card in line_upper[:lenCard]:
#return False
#return True
def _parse_primary_file_header(self, bdf_filename):
"""
Extract encoding, nastran_format, and punch from the primary BDF.
Parameters
----------
bdf_filename : str
the input filename
..code-block :: python
$ pyNastran: version=NX
$ pyNastran: encoding=latin-1
$ pyNastran: punch=True
$ pyNastran: dumplines=True
$ pyNastran: nnodes=10
$ pyNastran: nelements=100
$ pyNastran: skip_cards=PBEAM,CBEAM
$ pyNastran: units=in,lb,s
..warning :: pyNastran lines must be at the top of the file
"""
with open(bdf_filename, 'r') as bdf_file:
check_header = True
while check_header:
try:
line = bdf_file.readline()
except Exception:
break
if line.startswith('$'):
key, value = _parse_pynastran_header(line)
if key:
#print('pyNastran key=%s value=%s' % (key, value))
if key == 'version':
self.nastran_format = value
elif key == 'encoding':
self._encoding = value
elif key == 'punch':
self.punch = True if value == 'true' else False
elif key in ['nnodes', 'nelements']:
pass
elif key == 'dumplines':
self.dumplines = True if value == 'true' else False
elif key == 'skip_cards':
cards = {value.strip() for value in value.upper().split(',')}
self.cards_to_read = self.cards_to_read - cards
elif 'skip ' in key:
type_to_skip = key[5:].strip()
#values = [int(value) for value in value.upper().split(',')]
values = parse_patran_syntax(value)
if type_to_skip not in self.object_attributes():
raise RuntimeError('%r is an invalid key' % type_to_skip)
if type_to_skip not in self.values_to_skip:
self.values_to_skip[type_to_skip] = values
else:
self.values_to_skip[type_to_skip] = np.hstack([
self.values_to_skip[type_to_skip],
values
])
#elif key == 'skip_elements'
#elif key == 'skip_properties'
elif key == 'units':
self.units = [value.strip() for value in value.upper().split(',')]
else:
raise NotImplementedError(key)
else:
break
else:
break
def _verify_bdf(self, xref=None):
"""
Cross reference verification method.
"""
if xref is None:
xref = self._xref
#for key, card in sorted(self.params.items()):
#card._verify(xref)
for key, card in sorted(self.nodes.items()):
try:
card._verify(xref)
except Exception:
print(str(card))
raise
for key, card in sorted(self.coords.items()):
try:
card._verify(xref)
except Exception:
print(str(card))
raise
for key, card in sorted(self.elements.items()):
try:
card._verify(xref)
except Exception:
exc_type, exc_value, exc_traceback = sys.exc_info()
print(repr(traceback.format_exception(exc_type, exc_value,
exc_traceback)))
print(str(card))
#raise
for key, card in sorted(self.properties.items()):
try:
card._verify(xref)
except Exception:
print(str(card))
raise
for key, card in sorted(self.materials.items()):
try:
card._verify(xref)
except Exception:
print(str(card))
raise
for key, card in sorted(self.dresps.items()):
try:
card._verify(xref)
except Exception:
print(str(card))
raise
for key, card in sorted(self.dvcrels.items()):
try:
card._verify(xref)
except Exception:
print(str(card))
raise
for key, card in sorted(self.dvmrels.items()):
try:
card._verify(xref)
except Exception:
print(str(card))
raise
for key, card in sorted(self.dvprels.items()):
try:
card._verify(xref)
except Exception:
print(str(card))
raise
for key, cards in sorted(self.dvgrids.items()):
for card in cards:
try:
card._verify(xref)
except Exception:
print(str(card))
raise
IGNORE_COMMENTS = (
'$EXECUTIVE CONTROL DECK',
'$CASE CONTROL DECK',
'NODES', 'SPOINTS', 'EPOINTS', 'ELEMENTS',
'PARAMS', 'PROPERTIES', 'ELEMENTS_WITH_PROPERTIES',
'ELEMENTS_WITH_NO_PROPERTIES (PID=0 and unanalyzed properties)',
'UNASSOCIATED_PROPERTIES',
'MATERIALS', 'THERMAL MATERIALS',
'CONSTRAINTS', 'SPCs', 'MPCs', 'RIGID ELEMENTS',
'LOADS', 'AERO', 'STATIC AERO', 'AERO CONTROL SURFACES',
'FLUTTER', 'GUST', 'DYNAMIC', 'OPTIMIZATION',
'COORDS', 'THERMAL', 'TABLES', 'RANDOM TABLES',
'SETS', 'CONTACT', 'REJECTS', 'REJECT_LINES',
'PROPERTIES_MASS', 'MASSES')
def _prep_comment(comment):
return comment.rstrip()
#print('comment = %r' % comment)
#comment = ' this\n is\n a comment\n'
#print(comment.rstrip('\n').split('\n'))
#sline = [comment[1:] if len(comment) and comment[0] == ' ' else comment
#for comment in comment.rstrip().split('\n')]
#print('sline = ', sline)
#asdh
def _clean_comment(comment):
"""
Removes specific pyNastran comment lines so duplicate lines aren't
created.
Parameters
----------
comment : str
the comment to possibly remove
Returns
-------
updated_comment : str
the comment
"""
if comment == '':
pass
elif comment in IGNORE_COMMENTS:
comment = ''
elif 'pynastran' in comment.lower():
comment = ''
#if comment:
#print(comment)
return comment
def _lines_to_decks(lines, punch):
"""
Splits the lines into their deck.
"""
executive_control_lines = []
case_control_lines = []
bulk_data_lines = []
if punch:
bulk_data_lines = lines
else:
flag = 1
for i, line in enumerate(lines):
if flag == 1:
#line = line.upper()
if line.upper().startswith('CEND'):
assert flag == 1
flag = 2
executive_control_lines.append(line.rstrip())
elif flag == 2:
uline = line.upper()
if 'BEGIN' in uline and ('BULK' in uline or 'SUPER' in uline):
assert flag == 2
flag = 3
case_control_lines.append(line.rstrip())
else:
break
for line in lines[i:]:
bulk_data_lines.append(line.rstrip())
_check_valid_deck(flag)
del lines
#for line in bulk_data_lines:
#print(line)
# clean comments
executive_control_lines = [_clean_comment(line) for line in executive_control_lines]
case_control_lines = [_clean_comment(line) for line in case_control_lines]
return executive_control_lines, case_control_lines, bulk_data_lines
def _check_valid_deck(flag):
"""Crashes if the flag is set wrong"""
if flag != 3:
if flag == 1:
found = ' - Executive Control Deck\n'
missing = ' - Case Control Deck\n'
missing += ' - Bulk Data Deck\n'
elif flag == 2:
found = ' - Executive Control Deck\n'
found += ' - Case Control Deck\n'
missing = ' - Bulk Data Deck\n'
else:
raise RuntimeError('flag=%r is not [1, 2, 3]' % flag)
msg = 'This is not a valid BDF (a BDF capable of running Nastran).\n\n'
msg += 'The following sections were found:\n%s\n' % found
msg += 'The following sections are missing:\n%s\n' % missing
msg += 'If you do not have an Executive Control Deck or a Case Control Deck:\n'
msg += ' 1. call read_bdf(...) with `punch=True`\n'
msg += " 2. Add '$ pyNastran : punch=True' to the top of the main file\n"
msg += ' 3. Name your file *.pch\n\n'
msg += 'You cannot read a deck that has an Executive Control Deck, but\n'
msg += 'not a Case Control Deck (or vice versa), even if you have a Bulk Data Deck.\n'
raise RuntimeError(msg)
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,625
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/bdf/cards/aero/zona.py
|
# coding: utf-8
# pylint: disable=W0212,C0103
"""
All ZONA aero cards are defined in this file. This includes:
* TRIM
All cards are BaseCard objects.
"""
from __future__ import annotations
from itertools import count
from typing import List, Optional, TYPE_CHECKING
import numpy as np
from pyNastran.utils import object_attributes, object_methods
from pyNastran.utils.numpy_utils import integer_types
from pyNastran.bdf.cards.aero.dynamic_loads import Aero
from pyNastran.bdf.field_writer_8 import set_blank_if_default, print_card_8
from pyNastran.bdf.cards.base_card import BaseCard
from pyNastran.bdf.bdf_interface.assign_type import (
integer, integer_or_blank, double, double_or_blank, string,
filename_or_blank, string_or_blank, double_or_string, blank,
)
from pyNastran.bdf.cards.aero.aero import (Spline, CAERO1, CAERO2, PAERO2, # PAERO1,
SPLINE1, AESURF, AELIST, # SPLINE2, SPLINE3,
AELINK, AEFACT)
from pyNastran.bdf.cards.aero.static_loads import TRIM, AEROS
from pyNastran.bdf.cards.aero.dynamic_loads import AERO # MKAERO1,
from pyNastran.bdf.cards.aero.utils import (
elements_from_quad, points_elements_from_quad_points, create_ellipse)
from pyNastran.bdf.cards.coordinate_systems import Coord
if TYPE_CHECKING: # pragma: no cover
from pyNastran.bdf.bdf import BDF
import matplotlib
AxesSubplot = matplotlib.axes._subplots.AxesSubplot
class ZONA:
def __init__(self, model):
self.model = model
self.caero_to_name_map = {}
#: store PANLST1,PANLST2,PANLST3
self.panlsts = {}
self.mkaeroz = {}
self.trimvar = {}
self.trimlnk = {}
#: store PAFOIL7/PAFOIL8
self.pafoil = {}
@classmethod
def _init_from_self(cls, model):
"""helper method for dict_to_h5py"""
return cls(model)
def clear(self):
"""clears out the ZONA object"""
self.panlsts = {}
self.mkaeroz = {}
self.trimvar = {}
self.trimlnk = {}
self.pafoil = {}
#self.aeroz = {}
def object_attributes(self, mode:str='public', keys_to_skip: Optional[List[str]]=None,
filter_properties: bool=False):
"""
List the names of attributes of a class as strings. Returns public
attributes as default.
Parameters
----------
mode : str
defines what kind of attributes will be listed
* 'public' - names that do not begin with underscore
* 'private' - names that begin with single underscore
* 'both' - private and public
* 'all' - all attributes that are defined for the object
keys_to_skip : List[str]; default=None -> []
names to not consider to avoid deprecation warnings
Returns
-------
attribute_names : List[str]
sorted list of the names of attributes of a given type or None
if the mode is wrong
"""
if keys_to_skip is None:
keys_to_skip = []
my_keys_to_skip = [
'log', 'model',
]
return object_attributes(self, mode=mode, keys_to_skip=keys_to_skip+my_keys_to_skip,
filter_properties=filter_properties)
def object_methods(self, mode: str='public',
keys_to_skip: Optional[List[str]]=None) -> List[str]:
"""
List the names of methods of a class as strings. Returns public methods
as default.
Parameters
----------
obj : instance
the object for checking
mode : str
defines what kind of methods will be listed
* "public" - names that do not begin with underscore
* "private" - names that begin with single underscore
* "both" - private and public
* "all" - all methods that are defined for the object
keys_to_skip : List[str]; default=None -> []
names to not consider to avoid deprecation warnings
Returns
-------
method : List[str]
sorted list of the names of methods of a given type
or None if the mode is wrong
"""
if keys_to_skip is None:
keys_to_skip = []
my_keys_to_skip = [] # type: List[str]
my_keys_to_skip = ['log',]
return object_methods(self, mode=mode, keys_to_skip=keys_to_skip+my_keys_to_skip)
def verify(self, xref):
if self.model.nastran_format != 'zona':
return
for panlst in self.panlsts.values():
panlst._verify(xref)
for mkaeroz in self.mkaeroz.values():
mkaeroz._verify(xref)
for trimvar in self.trimvar.values():
trimvar._verify(xref)
for trimlnk in self.trimlnk.values():
trimlnk._verify(xref)
for pafoil in self.pafoil.values():
pafoil._verify(xref)
def validate(self):
if self.model.nastran_format != 'zona':
return
for panlst in self.panlsts.values():
panlst.validate()
for mkaeroz in self.mkaeroz.values():
mkaeroz.validate()
for trimvar in self.trimvar.values():
trimvar.validate()
for trimlnk in self.trimlnk.values():
trimlnk.validate()
for pafoil in self.pafoil.values():
pafoil.validate()
def PAFOIL(self, pid, msg=''):
"""gets a pafoil profile (PAFOIL7/PAFOIL8)"""
try:
return self.pafoil[pid]
except KeyError:
pafoils = np.unique(list(self.pafoil.keys()))
raise KeyError(f'pid={pid} not found{msg}. Allowed pafoils={pafoils}')
def update_for_zona(self):
"""updates for zona"""
card_parser = self.model._card_parser
add_methods = self.model._add_methods
card_parser['TRIM'] = (TRIM_ZONA, add_methods._add_trim_object)
card_parser['CAERO7'] = (CAERO7, add_methods._add_caero_object)
card_parser['AEROZ'] = (AEROZ, add_methods._add_aeros_object)
card_parser['AESURFZ'] = (AESURFZ, self._add_aesurfz_object)
card_parser['FLUTTER'] = (FLUTTER_ZONA, add_methods._add_flutter_object)
card_parser['SPLINE1'] = (SPLINE1_ZONA, add_methods._add_spline_object)
card_parser['SPLINE2'] = (SPLINE2_ZONA, add_methods._add_spline_object)
card_parser['SPLINE3'] = (SPLINE3_ZONA, add_methods._add_spline_object)
card_parser['PANLST1'] = (PANLST1, self._add_panlst_object)
card_parser['PANLST3'] = (PANLST3, self._add_panlst_object)
card_parser['PAFOIL7'] = (PAFOIL7, self._add_pafoil_object)
card_parser['MKAEROZ'] = (MKAEROZ, self._add_mkaeroz_object)
card_parser['SEGMESH'] = (SEGMESH, add_methods._add_paero_object)
card_parser['BODY7'] = (BODY7, add_methods._add_caero_object)
card_parser['ACOORD'] = (ACOORD, add_methods._add_coord_object)
card_parser['TRIMVAR'] = (TRIMVAR, self._add_trimvar_object)
card_parser['TRIMLNK'] = (TRIMLNK, self._add_trimlnk_object)
cards = [
'CAERO7', 'AEROZ', 'AESURFZ', 'PANLST1', 'PANLST3', 'PAFOIL7',
'SEGMESH', 'BODY7', 'ACOORD', 'MKAEROZ',
'TRIMVAR', 'TRIMLNK', 'FLUTTER']
self.model.cards_to_read.update(set(cards))
def _add_panlst_object(self, panlst: Union[PANLST1, PANLST3]) -> None:
"""adds an PANLST1/PANLST2/PANLST3 object"""
assert panlst.eid not in self.panlsts
assert panlst.eid > 0
key = panlst.eid
self.panlsts[key] = panlst
self.model._type_to_id_map[panlst.type].append(key)
def _add_pafoil_object(self, pafoil: PAFOIL7) -> None:
"""adds an PAFOIL7/PAFOIL8 object"""
assert pafoil.pid not in self.pafoil
assert pafoil.pid > 0
key = pafoil.pid
self.pafoil[key] = pafoil
self.model._type_to_id_map[pafoil.type].append(key)
def _add_aesurfz_object(self, aesurf: AESURFZ) -> None:
"""adds an AESURFZ object"""
key = aesurf.aesid
model = self.model
assert key not in model.aesurf, '\naesurf=\n%s old=\n%s' % (
aesurf, model.aesurf[key])
model.aesurf[key] = aesurf
model._type_to_id_map[aesurf.type].append(key)
def _add_mkaeroz_object(self, mkaeroz: MKAEROZ) -> None:
"""adds an MKAEROZ object"""
assert mkaeroz.sid not in self.mkaeroz
assert mkaeroz.sid > 0
key = mkaeroz.sid
self.mkaeroz[key] = mkaeroz
self.model._type_to_id_map[mkaeroz.type].append(key)
def _add_trimvar_object(self, trimvar: TRIMVAR) -> None:
"""adds an TRIMVAR object"""
assert trimvar.var_id not in self.trimvar
assert trimvar.var_id > 0
key = trimvar.var_id
self.trimvar[key] = trimvar
self.model._type_to_id_map[trimvar.type].append(key)
def _add_trimlnk_object(self, trimlnk: TRIMLNK) -> None:
"""adds an TRIMLNK object"""
assert trimlnk.link_id not in self.trimlnk
assert trimlnk.link_id > 0
key = trimlnk.link_id
self.trimlnk[key] = trimlnk
self.model._type_to_id_map[trimlnk.type].append(key)
def cross_reference(self):
if self.model.nastran_format != 'zona':
return
for mkaeroz in self.mkaeroz.values():
mkaeroz.cross_reference(self.model)
for trimvar in self.trimvar.values():
trimvar.cross_reference(self.model)
for trimlnk in self.trimlnk.values():
trimlnk.cross_reference(self.model)
for unused_id, pafoil in self.pafoil.items():
pafoil.cross_reference(self.model)
#for aeroz in self.aeroz.values():
#aeroz.cross_reference(self.model)
for caero in self.model.caeros.values():
#print('%s uses CAERO eid=%s' % (caero.label, caero.eid))
self.caero_to_name_map[caero.label] = caero.eid
def safe_cross_reference(self):
self.cross_reference()
def write_bdf(self, bdf_file, size=8, is_double=False):
#if self.model.nastran_format != 'zona':
#return
for unused_id, panlst in self.panlsts.items():
bdf_file.write(panlst.write_card(size=size, is_double=is_double))
for unused_id, mkaeroz in self.mkaeroz.items():
bdf_file.write(mkaeroz.write_card(size=size, is_double=is_double))
for unused_id, trimvar in self.trimvar.items():
bdf_file.write(trimvar.write_card(size=size, is_double=is_double))
for unused_id, trimlnk in self.trimlnk.items():
bdf_file.write(trimlnk.write_card(size=size, is_double=is_double))
for unused_id, pafoil in self.pafoil.items():
bdf_file.write(pafoil.write_card(size=size, is_double=is_double))
def convert_to_nastran(self, save=True):
"""Converts a ZONA model to Nastran"""
if self.model.nastran_format != 'zona':
caeros = {}
caero2s = []
make_paero1 = False
return caeros, caero2s, make_paero1
caeros, caero2s, make_paero1 = self._convert_caeros()
splines = self._convert_splines()
aesurf, aelists = self._convert_aesurf_aelist()
trims = self._convert_trim()
aeros, aero = self.model.aeros.convert_to_zona(self.model)
aelinks = self._convert_trimlnk()
if save:
self.clear()
self.model.splines = splines
self.model.aesurf = aesurf
self.model.aelists = aelists
self.model.aelinks = aelinks
self.model.trims = trims
self.model.aeros = aeros
self.model.aero = aero
return caeros, caero2s, make_paero1
def _convert_caeros(self):
"""Converts ZONA CAERO7/BODY7 to CAERO1/CAERO2"""
model = self.model
caeros = {}
caero2s = []
make_paero1 = False
for caero_id, caero in sorted(model.caeros.items()):
if caero.type == 'CAERO7':
caero_new = caero.convert_to_nastran()
make_paero1 = True
elif caero.type == 'BODY7':
caero2s.append(caero)
continue
else:
raise NotImplementedError(caero)
caeros[caero_id] = caero_new
self._add_caero2s(caero2s, add=False)
return caeros, caero2s, make_paero1
def _add_caero2s(self, caero2s, add=False):
"""Converts ZONA BODY7 to CAERO2/PAERO2/AEFACT"""
model = self.model
add_methods = model._add_methods
caero_body_ids = []
for caero2 in caero2s:
caero_id = caero2.eid
out = caero2.convert_to_nastran(model)
caero_new, paero2, aefact_xs, aefact_width, aefact_theta1, aefact_theta2 = out
caero_body_ids.append(caero_id)
if add:
add_methods._add_aefact_object(aefact_xs)
add_methods._add_aefact_object(aefact_width)
add_methods._add_aefact_object(aefact_theta1)
add_methods._add_aefact_object(aefact_theta2)
add_methods._add_paero_object(paero2)
add_methods._add_caero_object(caero_new)
return
def _convert_splines(self):
"""Converts ZONA splines to splines"""
splines = {}
for unused_spline_id, spline in self.model.splines.items():
#print(spline)
if spline.type == 'SPLINE1_ZONA':
splines_new = spline.convert_to_nastran(self.model)
elif spline.type == 'SPLINE3_ZONA':
splines_new = spline.convert_to_nastran(self.model)
else:
raise NotImplementedError(spline)
for spline_new in splines_new:
splines[spline.eid] = spline_new
return splines
def _convert_aesurf_aelist(self):
"""
Converts ZONA AESURFZ to AESURF/AELIST
+---------+--------+-------+-------+-------+--------+--------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 |
+=========+========+=======+=======+=======+========+========+
| AESURFZ | LABEL | TYPE | CID | SETK | SETG | ACTID |
+---------+--------+-------+-------+-------+--------+--------+
| AESURFZ | RUDDER | ASYM | 1 | 10 | 20 | 0 |
+---------+--------+-------+-------+-------+--------+--------+
"""
model = self.model
aelist_id = max(model.aelists) + 1 if model.aelists else 1
aesurf_id = aelist_id
aesurf = {}
aelists = {}
for unused_aesurf_name, aesurfi in sorted(model.aesurf.items()):
aelist, aesurfi2 = aesurfi.convert_to_nastran(model, aesurf_id, aelist_id)
aelists[aelist.sid] = aelist
aesurf[aesurfi2.aesid] = aesurfi2
aesurf_id += 1
aelist_id += 1
return aesurf, aelists
def _convert_trim(self):
"""Converts ZONA TRIM to TRIM"""
trims = {}
model = self.model
for trim_id, trim in sorted(model.trims.items()):
trim_new = trim.convert_to_nastran(model)
trims[trim_id] = trim_new
return trims
def _convert_trimlnk(self):
"""Converts ZONA TRIMLNK to AELINK"""
model = self.model
assert isinstance(model.aelinks, dict), model.aelinks
aelinks = {}
for trim_id, trimlnk in sorted(self.trimlnk.items()):
aelink = trimlnk.convert_to_nastran(model)
aelinks[trim_id] = aelink
return aelinks
def __repr__(self):
msg = '<ZONA>; nPANLSTs=%s nmkaeroz=%s' % (
len(self.panlsts), len(self.mkaeroz),
)
return msg
class ACOORD(Coord): # not done
"""
Defines a general coordinate system using three rotational angles as
functions of coordinate values in the reference coordinate system.
The CORD3G entry is used with the MAT9 entry to orient material principal
axes for 3-D composite analysis.
+--------+-----+--------+--------+--------+-------+-------+--------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
+========+=====+========+========+========+=======+=======+========+
| ACOORD | ID | XORIGN | YORIGN | ZORIGN | DELTA | THETA | |
+--------+-----+--------+--------+--------+-------+-------+--------+
| ACOORD | 10 | 250.0 | 52.5 | 15.0 | 0.0 | 0.0 | |
+--------+-----+--------+--------+--------+-------+-------+--------+
"""
type = 'ACOORD'
Type = 'R'
@property
def rid(self):
return None
def __init__(self, cid, origin, delta, theta, comment=''):
"""
Defines the CORD3G card
Parameters
----------
cid : int
coordinate system id
origin : List[float]
the xyz origin
delta : float
pitch angle
theta : float
roll angle
comment : str; default=''
a comment for the card
"""
Coord.__init__(self)
if comment:
self.comment = comment
self.cid = cid
self.origin = origin
self.delta = delta
self.theta = theta
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a ACOORD card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
cid = integer(card, 1, 'cid')
origin_x = double(card, 2, 'origin_x')
origin_y = double(card, 3, 'origin_y')
origin_z = double(card, 4, 'origin_z')
origin = [origin_x, origin_y, origin_z]
delta = double(card, 5, 'delta')
theta = double(card, 6, 'theta')
assert len(card) <= 7, f'len(ACOORD card) = {len(card):d}\ncard={card}'
return ACOORD(cid, origin, delta, theta, comment=comment)
def setup(self):
self.i = np.array([1., 0., 0.])
self.j = np.array([0., 1., 0.])
self.k = np.array([0., 0., 1.])
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
pass
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
pass
def coord_to_xyz(self, p):
return p
#return self.acoord_transform_to_global(p)
def acoord_transform_to_global(self, p):
"""
Parameters
----------
p : (3,) float ndarray
the point to transform
.. warning:: not done, just setting up how you'd do this
.. note:: per http://en.wikipedia.org/wiki/Euler_angles
"This means for example that a convention named (YXZ) is the result
of performing first an intrinsic Z rotation, followed by X and
Y rotations, in the moving axes (Note: the order of multiplication
of matrices is the opposite of the order in which they're
applied to a vector)."
"""
ct = np.cos(np.radians(self.theta))
st = np.sin(np.radians(self.theta))
#if rotation == 1:
#p = self.rotation_x(ct, st) @ p
#elif rotation == 2:
p = self.rotation_y(ct, st) @ p
#elif rotation == 3:
#p = self.rotation_z(ct, st) @ p
#else:
#raise RuntimeError('rotation=%s rotations=%s' % (rotation, rotations))
return p
def rotation_x(self, ct, st):
matrix = np.array([[1., 0., 0.],
[ct, 0., -st],
[-st, 0., ct]])
return matrix
def rotation_y(self, ct, st):
matrix = np.array([[ct, 0., st],
[0., 1., 0.],
[-st, 0., ct]])
return matrix
def rotation_z(self, ct, st):
matrix = np.array([[ct, st, 0.],
[-st, ct, 0.],
[0., 0., 1.]])
return matrix
def raw_fields(self):
list_fields = ['ACOORD', self.cid] + self.origin + [self.delta, self.theta]
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class AESURFZ(BaseCard):
"""
Specifies an aerodynamic control surface for aeroservoelastic, static
aeroelastic/trim analysis, or the transient response analysis.
+---------+--------+-------+-------+-------+--------+--------+--------+--------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+========+=======+=======+=======+========+========+========+========+
| AESURFZ | LABEL | TYPE | CID | SETK | SETG | ACTID | | |
+---------+--------+-------+-------+-------+--------+--------+--------+--------+
| AESURFZ | RUDDER | ASYM | 1 | 10 | 20 | 0 | | |
+---------+--------+-------+-------+-------+--------+--------+--------+--------+
"""
type = 'AESURFZ'
@property
def aesid(self):
return self.label
@property
def alid1_ref(self):
return None
def __init__(self, label, surface_type, cid, panlst, setg, actuator_tf,
comment=''):
"""
Creates an AESURF card, which defines a control surface
Parameters
----------
label : str
controller name
surface_type : str
defines the control surface type {SYM, ASYM}
cid : int
coordinate system id to define the hinge axis
panlst : int
aero panels defined by PANLST
setg : int
???
actuator_tf : int
???
comment : str; default=''
a comment for the card
"""
BaseCard.__init__(self)
if comment:
self.comment = comment
#: Controller name.
self.label = label
self.surface_type = surface_type
self.panlst = panlst
self.setg = setg
self.actuator_tf = actuator_tf
#: Identification number of a rectangular coordinate system with a
#: y-axis that defines the hinge line of the control surface
#: component.
self.cid = cid
self.cid_ref = None
self.panlst_ref = None
self.aero_element_ids = None
@classmethod
def add_card(cls, card, comment=''):
"""
Adds an AESURF card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
label = string(card, 1, 'label')
surface_type = string(card, 2, 'TYPE')
cid = integer(card, 3, 'CID')
panlst = integer(card, 4, 'PANLST/SETK') # PANLST1, PANLST2, PANLST3
setg = integer(card, 5, 'SETG') # SET1, SETADD
actuator_tf = integer_or_blank(card, 6, 'ACTID') # ACTU card
assert len(card) <= 7, f'len(AESURFZ card) = {len(card):d}\ncard={card}'
assert surface_type in ['SYM', 'ANTISYM', 'ASYM']
return AESURFZ(label, surface_type, cid, panlst, setg, actuator_tf, comment=comment)
def Cid(self):
if self.cid_ref is not None:
return self.cid_ref.cid
return self.cid
def SetK(self):
if self.panlst_ref is not None:
return self.panlst_ref.eid
return self.panlst
#def aelist_id1(self):
#if self.alid1_ref is not None:
#return self.alid1_ref.sid
#return self.alid1
#def aelist_id2(self):
#if self.alid2_ref is not None:
#return self.alid2_ref.sid
#return self.alid2
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
self.cid_ref = model.Coord(self.cid)
#self.alid1_ref = model.AELIST(self.alid1)
#if self.alid2:
#self.alid2_ref = model.AELIST(self.alid2)
#if self.tqllim is not None:
#self.tqllim_ref = model.TableD(self.tqllim)
#if self.tqulim is not None:
#self.tqulim_ref = model.TableD(self.tqulim)
self.panlst_ref = model.zona.panlsts[self.panlst]
self.panlst_ref.cross_reference(model)
self.aero_element_ids = self.panlst_ref.aero_element_ids
def safe_cross_reference(self, model: BDF, xref_errors):
msg = ', which is required by AESURF aesid=%s' % self.aesid
self.cid_ref = model.safe_coord(self.cid, self.aesid, xref_errors, msg=msg)
#if self.cid2 is not None:
#self.cid2_ref = model.safe_coord(self.cid2, self.aesid, xref_errors, msg=msg)
#try:
#self.alid1_ref = model.AELIST(self.alid1)
#except KeyError:
#pass
#if self.alid2:
#try:
#self.alid2_ref = model.AELIST(self.alid2)
#except KeyError:
#pass
#if self.tqllim is not None:
#try:
#self.tqllim_ref = model.TableD(self.tqllim)
#except KeyError:
#pass
#if self.tqulim is not None:
#try:
#self.tqulim_ref = model.TableD(self.tqulim)
#except KeyError:
#pass
self.panlst_ref = model.zona.panlsts[self.panlst]
self.panlst_ref.cross_reference(model)
self.aero_element_ids = self.panlst_ref.aero_element_ids
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.cid = self.Cid()
self.cid_ref = None
self.panlst = self.SetK()
self.panlst_ref = None
def convert_to_nastran(self, model, aesurf_id, aelist_id):
"""
+--------+--------+-------+-------+-------+--------+--------+--------+--------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+========+========+=======+=======+=======+========+========+========+========+
| AESURF | ID | LABEL | CID1 | ALID1 | CID2 | ALID2 | EFF | LDW |
+--------+--------+-------+-------+-------+--------+--------+--------+--------+
| | CREFC | CREFS | PLLIM | PULIM | HMLLIM | HMULIM | TQLLIM | TQULIM |
+--------+--------+-------+-------+-------+--------+--------+--------+--------+
+---------+--------+-------+-------+-------+--------+--------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 |
+=========+========+=======+=======+=======+========+========+
| AESURFZ | LABEL | TYPE | CID | SETK | SETG | ACTID |
+---------+--------+-------+-------+-------+--------+--------+
| AESURFZ | RUDDER | ASYM | 1 | 10 | 20 | 0 |
+---------+--------+-------+-------+-------+--------+--------+
"""
assert self.surface_type == 'ASYM', str(self)
aelist = AELIST(aelist_id, self.aero_element_ids)
aesurf = AESURF(aesurf_id, self.label, self.cid, aelist_id, cid2=None, alid2=None,
eff=1.0, ldw='LDW', crefc=1.0,
crefs=1.0, pllim=-np.pi/2.,
pulim=np.pi/2., hmllim=None,
hmulim=None, tqllim=None,
tqulim=None, comment=self.comment)
aesurf.validate()
aelist.validate()
return aelist, aesurf
def raw_fields(self):
"""
Gets the fields in their unmodified form
Returns
-------
fieldsreset_camera[int/float/str]
the fields that define the card
"""
list_fields = ['AESURFZ', self.label, self.surface_type, self.cid,
self.panlst, self.setg, self.actuator_tf]
return list_fields
def repr_fields(self):
"""
Gets the fields in their simplified form
Returns
-------
fields : List[int/float/str]
the fields that define the card
"""
return self.raw_fields()
def write_card(self, size: int=8, is_double: bool=False) -> str:
"""
Writes the card with the specified width and precision
Parameters
----------
size : int (default=8)
size of the field; {8, 16}
is_double : bool (default=False)
is this card double precision
Returns
-------
msg : str
the string representation of the card
"""
card = self.repr_fields()
return self.comment + print_card_8(card)
class AEROZ(Aero):
"""
Gives basic aerodynamic parameters for unsteady aerodynamics.
+-------+-------+-------+------+------+-------+-------+-------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
+=======+=======+=======+======+======+=======+=======+=======+
| AEROS | ACSID | RCSID | REFC | REFB | REFS | SYMXZ | SYMXY |
+-------+-------+-------+------+------+-------+-------+-------+
| AEROS | 10 | 20 | 10. | 100. | 1000. | 1 | |
+-------+-------+-------+------+------+-------+-------+-------+
"""
type = 'AEROZ'
#_field_map = {
#1: 'acsid', 2:'rcsid', 3:'cRef', 4:'bRef', 5:'Sref',
#6:'symXZ', 7:'symXY',
#}
def __init__(self, fm_mass_unit, fm_length_unit,
cref, bref, sref,
flip='NO', acsid=0, rcsid=0, sym_xz=0, xyz_ref=None, comment=''):
"""
Creates an AEROZ card
Parameters
----------
cref : float
the aerodynamic chord
bref : float
the wing span
for a half model, this should be the full span
for a full model, this should be the full span
sref : float
the wing area
for a half model, this should be the half area
for a full model, this should be the full area
acsid : int; default=0
aerodyanmic coordinate system
defines the direction of the wind
rcsid : int; default=0
coordinate system for rigid body motions
sym_xz : int; default=0
xz symmetry flag (+1=symmetry; -1=antisymmetric)
comment : str; default=''
a comment for the card
"""
Aero.__init__(self)
if comment:
self.comment = comment
self.fm_mass_unit = fm_mass_unit
self.fm_length_unit = fm_length_unit
self.flip = flip
#: Aerodynamic coordinate system identification.
self.acsid = acsid
#: Reference coordinate system identification for rigid body motions.
self.rcsid = rcsid
#: Reference chord length
self.cref = cref
#: Reference span
self.bref = bref
#: Reference wing area
self.sref = sref
#: Symmetry key for the aero coordinate x-z plane. See Remark 6.
#: (Integer = +1 for symmetry, 0 for no symmetry, and -1 for antisymmetry;
#: Default = 0)
self.sym_xz = sym_xz
self.xyz_ref = xyz_ref
if self.acsid is None:
self.acsid = 0
if self.rcsid is None:
self.rcsid = 0
if self.sym_xz is None:
self.sym_xz = 0
if self.sym_xy is None:
self.sym_xy = 0
self.acsid_ref = None
self.rcsid_ref = None
@classmethod
def add_card(cls, card, comment=''):
"""
Adds an AEROZ card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
$ ACSID XZSYM FLIP FMMUNIT FMLUNIT REFC REFB REFS
$+ABC REFX REFY REFZ
AEROZ 0 YES NO SLIN IN 22.73 59.394 1175.8
59.53 0.0 0.0
"""
acsid = integer_or_blank(card, 1, 'acsid', 0)
sym_xz = string(card, 2, 'sym_xz')
flip = string(card, 3, 'flip')
fm_mass_unit = string(card, 4, 'fm_mass_unit')
fm_length_unit = string(card, 5, 'fm_length_unit')
# YES-aero=half,structure=half
# NO-aero=full; structure=full
# H2F-aero=full; structure=half
assert sym_xz in ['YES', 'NO', 'H2F'], 'sym_xz=%r' % flip
# YES-structure=left,aero=right
assert flip in ['YES', 'NO'], 'flip=%r' % flip
assert fm_mass_unit in ['SLIN', 'LBM'], 'fm_mass_unit=%r' % fm_mass_unit
assert fm_length_unit in ['IN'], 'fm_length_unit=%r' % fm_length_unit
#rcsid = integer_or_blank(card, 2, 'rcsid', 0)
cref = double_or_blank(card, 6, 'cRef', 1.)
bref = double_or_blank(card, 7, 'bRef', 1.)
sref = double_or_blank(card, 8, 'Sref', 1.)
xref = double_or_blank(card, 9, 'xRef', 0.)
yref = double_or_blank(card, 10, 'yRef', 0.)
zref = double_or_blank(card, 11, 'zref', 0.)
xyz_ref = [xref, yref, zref]
assert len(card) <= 12, f'len(AEROZ card) = {len(card):d}\ncard={card}'
# faking data to not change gui
rcsid = 0
#sym_xy = 0
return AEROZ(fm_mass_unit, fm_length_unit,
cref, bref, sref, acsid=acsid, rcsid=rcsid,
sym_xz=sym_xz, flip=flip, xyz_ref=xyz_ref,
comment=comment)
def Acsid(self):
try:
return self.acsid_ref.cid
except AttributeError:
return self.acsid
def Rcsid(self):
try:
return self.rcsid_ref.cid
except AttributeError:
return self.rcsid
#def validate(self):
#msg = ''
#if not isinstance(self.acsid, integer_types):
#msg += 'acsid=%s must be an integer; type=%s\n' % (self.acsid, type(self.acsid))
#if not isinstance(self.rcsid, integer_types):
#msg += 'rcsid=%s must be an integer; type=%s\n' % (self.rcsid, type(self.rcsid))
#if not isinstance(self.cref, float):
#msg += 'cref=%s must be an float; type=%s\n' % (self.cref, type(self.cref))
#if not isinstance(self.bref, float):
#msg += 'bref=%s must be an float; type=%s\n' % (self.bref, type(self.bref))
#if not isinstance(self.sref, float):
#msg += 'sref=%s must be an float; type=%s\n' % (self.sref, type(self.sref))
#if not isinstance(self.sym_xz, integer_types):
#msg += 'sym_xz=%s must be an integer; type=%s\n' % (self.sym_xz, type(self.sym_xz))
#if not isinstance(self.sym_xy, integer_types):
#msg += 'sym_xy=%s must be an integer; type=%s\n' % (self.sym_xy, type(self.sym_xy))
#if msg:
#raise TypeError('There are errors on the AEROS card:\n%s%s' % (msg, self))
def cross_reference(self, model: BDF) -> None:
"""
Cross refernece aerodynamic coordinate system.
Parameters
----------
model : BDF
The BDF object.
"""
msg = ', which is required by AEROZ'
self.acsid_ref = model.Coord(self.acsid, msg=msg)
self.rcsid_ref = model.Coord(self.rcsid, msg=msg)
def safe_cross_reference(self, model: BDF, xref_errors):
"""
Safe cross refernece aerodynamic coordinate system.
Parameters
----------
model : BDF
The BDF object.
"""
msg = ', which is required by AEROZ'
self.acsid_ref = model.safe_coord(self.acsid, None, xref_errors, msg=msg)
self.rcsid_ref = model.safe_coord(self.rcsid, None, xref_errors, msg=msg)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.acsid_ref = None
self.rcsid_ref = None
def convert_to_zona(self, unused_model):
#$ ACSID XZSYM FLIP FMMUNIT FMLUNIT REFC REFB REFS
#$+ABC REFX REFY REFZ
#AEROZ 0 YES NO SLIN IN 22.73 59.394 1175.8
#59.53 0.0 0.0
cref = self.cref
bref = self.bref
sref = self.sref
acsid = self.acsid
rho_ref = 1.0
if self.sym_xz == 'NO':
sym_xz = 0
elif self.sym_xz == 'YES':
sym_xz = 1
else:
raise NotImplementedError(self.sym_xz)
assert sym_xz in [0, 1], sym_xz
aeros = AEROS(cref, bref, sref, acsid=acsid, rcsid=0, sym_xz=sym_xz, sym_xy=0,
comment=str(self))
velocity = 1.
aero = AERO(velocity, cref, rho_ref, acsid=acsid, sym_xz=sym_xz, sym_xy=0,
comment='')
return aeros, aero
def raw_fields(self):
"""
Gets the fields in their unmodified form
Returns
-------
fields : list[varies]
the fields that define the card
"""
raise NotImplementedError()
#list_fields = ['AEROS', self.Acsid(), self.Rcsid(), self.cref,
#self.bref, self.sref, self.sym_xz, self.sym_xy]
#return list_fields
def repr_fields(self):
"""
Gets the fields in their simplified form
Returns
-------
fields : List[varies]
the fields that define the card
"""
unused_sym_xz = set_blank_if_default(self.sym_xz, 0)
unused_sym_xy = set_blank_if_default(self.sym_xy, 0)
#$ ACSID XZSYM FLIP FMMUNIT FMLUNIT REFC REFB REFS
#$+ABC REFX REFY REFZ
#AEROZ 0 YES NO SLIN IN 22.73 59.394 1175.8
#59.53 0.0 0.0
list_fields = ['AEROZ', self.Acsid(), self.sym_xz, self.flip,
self.fm_mass_unit, self.fm_length_unit,
self.cref, self.bref, self.sref] + list(self.xyz_ref)
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class MKAEROZ(BaseCard):
type = 'MKAEROZ'
def __init__(self, sid, mach, flt_id, filename, print_flag, freqs,
method=0, save=None, comment=''):
"""
Parameters
==========
sid : int
the MKAEROZ id
mach : float
the mach number for the TRIM solution
save : str
save the AIC data to the filename
SAVE save the AICs
ACQUIRE load an AIC database
ADD append the new acids to the existing AIC database
RESTART continue an analysis
filename : str
the length of the file must be at most 56 characters
print_flag : int
???
freqs : List[float]
???
method : int
???
save : ???
???
comment : str; default=''
a comment for the card
"""
BaseCard.__init__(self)
if comment:
self.comment = comment
self.sid = sid
self.mach = mach
self.method = method
self.flt_id = flt_id
self.save = save
self.freqs = freqs
self.filename = filename
self.print_flag = print_flag
@classmethod
def add_card(cls, card, comment=''):
sid = integer(card, 1, 'IDMK')
mach = double(card, 2, 'MACH')
method = integer(card, 3, 'METHOD')
flt_id = integer(card, 4, 'IDFLT')
save = string_or_blank(card, 5, 'SAVE')
filename_a = filename_or_blank(card, 6, 'FILENAMEA', '')
filename_b = filename_or_blank(card, 7, 'FILENAMEB', '')
#print(filename_a, filename_b)
filename = (filename_a + filename_b).rstrip()
print_flag = integer_or_blank(card, 8, 'PRINT_FLAG', 0)
freqs = []
ifreq = 1
for ifield in range(9, len(card)):
freq = double(card, ifield, 'FREQ%i'% ifreq)
freqs.append(freq)
ifreq += 1
return MKAEROZ(sid, mach, flt_id, filename, print_flag, freqs,
method=method, save=save, comment=comment)
def cross_reference(self, model: BDF) -> None:
return
def repr_fields(self):
"""
Gets the fields in their simplified form
Returns
-------
fields : List[varies]
the fields that define the card
"""
filename_a = self.filename[:8]
filename_b = self.filename[8:]
list_fields = ['MKAEROZ', self.sid, self.mach, self.method, self.flt_id,
self.save, filename_a, filename_b, self.print_flag] + self.freqs
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class PANLST1(Spline):
"""
Defines a set of aerodynamic boxes by the LABEL entry in CAERO7 or BODY7
bulk data cards.
+---------+------+-------+-------+------+------+----+-----+-------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+======+=======+=======+======+======+====+=====+=======+
| SPLINE1 | EID | MODEL | CP | SETK | SETG | DZ | EPS | |
+---------+------+-------+-------+------+------+----+-----+-------+
| SPLINE1 | 100 | | | 1 | 10 | 0. | | |
+---------+------+-------+-------+------+------+----+-----+-------+
+---------+-------+---------+------+------+------+----+-----+-------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+=======+=========+======+======+======+====+=====+=======+
| PANLST1 | SETID | MACROID | BOX1 | BOX2 | | | | |
+---------+-------+---------+------+------+------+----+-----+-------+
| PANLST1 | 100 | 111 | 111 | 118 | | | | |
+---------+-------+---------+------+------+------+----+-----+-------+
PANLST1 is referred to by SPLINEi, ATTACH, LOADMOD, CPFACT, JETFRC, and/or
AESURFZ bulk data card.
"""
type = 'PANLST1'
def __init__(self, eid, macro_id, box1, box2, comment=''):
"""
Creates a PANLST1 card
Parameters
----------
eid : int
spline id
comment : str; default=''
a comment for the card
"""
# https://www.zonatech.com/Documentation/ZAERO_9.2_Users_3rd_Ed.pdf
Spline.__init__(self)
if comment:
self.comment = comment
self.eid = eid
self.macro_id = macro_id # points to CAERO7 / BODY7
self.box1 = box1
self.box2 = box2
self.aero_element_ids = []
self.caero_ref = None
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a PANLST3 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
eid = integer(card, 1, 'eid')
macro_id = integer(card, 2, 'macro_id')
box1 = integer(card, 3, 'box1')
box2 = integer(card, 4, 'box2')
assert len(card) == 5, f'len(PANLST1 card) = {len(card):d}\ncard={card}'
return PANLST1(eid, macro_id, box1, box2, comment=comment)
def cross_reference(self, model: BDF) -> None:
msg = ', which is required by PANLST1 eid=%s' % self.eid
self.caero_ref = model.CAero(self.macro_id, msg=msg)
self.aero_element_ids = np.arange(self.box1, self.box2)
def safe_cross_reference(self, model: BDF, xref_errors):
self.cross_reference(model)
def raw_fields(self):
list_fields = ['PANLST1', self.eid, self.macro_id, self.box1, self.box2]
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class PANLST3(Spline):
"""
Defines a set of aerodynamic boxes by the LABEL entry in CAERO7 or BODY7
bulk data cards.
+---------+------+-------+-------+------+------+----+-----+-------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+======+=======+=======+======+======+====+=====+=======+
| SPLINE1 | EID | MODEL | CP | SETK | SETG | DZ | EPS | |
+---------+------+-------+-------+------+------+----+-----+-------+
| SPLINE1 | 100 | | | 1 | 10 | 0. | | |
+---------+------+-------+-------+------+------+----+-----+-------+
+---------+-------+--------+--------+--------+-----+----+-----+-------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+=======+========+========+========+=====+====+=====+=======+
| PANLST3 | SETID | LABEL1 | LABEL2 | LABEL3 | etc | | | |
+---------+-------+--------+--------+--------+-----+----+-----+-------+
| PANLST3 | 100 | WING | HTAIL | | | | | |
+---------+-------+--------+--------+--------+-----+----+-----+-------+
PANLST3 is referred to by SPLINEi, ATTACH, LOADMOD, CPFACT, JETFRC, and/or
AESURFZ bulk data card.
"""
type = 'PANLST3'
def __init__(self, eid, panel_groups, comment=''):
"""
Creates a PANLST3 card
Parameters
----------
eid : int
spline id
comment : str; default=''
a comment for the card
"""
# https://www.zonatech.com/Documentation/ZAERO_9.2_Users_3rd_Ed.pdf
Spline.__init__(self)
if comment:
self.comment = comment
self.eid = eid
self.panel_groups = panel_groups # points to CAERO7 / BODY7
self.aero_element_ids = []
self.caero_refs = None
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a PANLST3 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
eid = integer(card, 1, 'eid')
group_id = 1
panel_groups = []
for ifield in range(2, len(card)):
name = string(card, ifield, 'group_%i'% (group_id))
panel_groups.append(name)
assert len(card) > 2, 'len(PANLST3 card) = %i; no panel_groups were defined\ncard=%s' % (len(card), card)
return PANLST3(eid, panel_groups, comment=comment)
def cross_reference(self, model: BDF) -> None:
msg = ', which is required by PANLST3 eid=%s' % self.eid
#self.nodes_ref = model.Nodes(self.nodes, msg=msg)
caero_refs = []
aero_element_ids = []
for caero_label in self.panel_groups:
caero_eid = model.zona.caero_to_name_map[caero_label]
caero_ref = model.CAero(caero_eid, msg=msg)
caero_refs.append(caero_ref)
eid = caero_ref.eid
npanels = caero_ref.npanels
if npanels == 0:
model.log.warning('skipping PANLST3 because there are 0 panels in:\n%r' % caero_ref)
continue
aero_element_ids2 = range(eid, eid + npanels)
assert len(aero_element_ids2) == npanels, npanels
aero_element_ids += aero_element_ids2
self.caero_refs = caero_refs
self.aero_element_ids = aero_element_ids
def safe_cross_reference(self, model: BDF, xref_errors):
self.cross_reference(model)
def raw_fields(self):
list_fields = ['PANLST3', self.eid] + self.panel_groups
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class PAFOIL7(BaseCard):
"""
Defines an aerodynamic body macroelement of a body-like component.
Similar to Nastran's CAERO2.
+---------+----+------+------+-------+------+------+-------+------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+====+======+======+=======+======+======+=======+======+
| PAFOIL7 | ID | ITAX | ITHR | ICAMR | RADR | ITHT | ICAMT | RADT |
+---------+----+------+------+-------+------+------+-------+------+
| PAFOIL7 | 1 | -201 | 202 | 203 | 0.1 | 211 | 212 | 0.1 |
+---------+----+------+------+-------+------+------+-------+------+
"""
type = 'PAFOIL7'
def __init__(self, pid, i_axial,
i_thickness_root, i_camber_root, le_radius_root,
i_thickness_tip, i_camber_tip, le_radius_tip,
comment=''):
"""
Defines a BODY7 card, which defines a slender body
(e.g., fuselage/wingtip tank).
Parameters
----------
pid : int
PAFOIL7 identification number.
i_axial : str
Identification number of an AEFACT bulk data card used to
specify the xcoordinate locations, in percentage of the
chord length, where the thickness and camber are specified.
ITAX can be a negative number (where ABS (ITAX) = AEFACT
bulk data card identification number) to request linear
interpolation.
i_thickness_root / i_thickness_tip : int
Identification number of an AEFACT bulk data card used to
specify the half thickness of the airfoil at the wing
root/tip.
i_camber : int; default=0
Identification number of an AEFACT bulk data card used to
specify the camber of the airfoil at the wing root.
le_radius_root / le_radius_root: float
Leading edge radius at the root/tip normalized by the
root/tip chord.
i_thickness_tip : int
Identification number of an AEFACT bulk data card used to
specify the half thickness at the wing tip.
comment : str; default=''
a comment for the card
"""
BaseCard.__init__(self)
if comment:
self.comment = comment
self.pid = pid
self.i_axial = i_axial
self.i_thickness_root = i_thickness_root
self.i_camber_root = i_camber_root
self.le_radius_root = le_radius_root
self.i_camber_tip = i_camber_tip
self.le_radius_tip = le_radius_tip
self.i_thickness_tip = i_thickness_tip
self.i_thickness_root_ref = None
self.i_camber_root_ref = None
self.i_thickness_tip_ref = None
self.i_camber_tip_ref = None
self.i_axial_ref = None
#@property
#def cp(self):
#return self.acoord
#@property
#def cp_ref(self):
#return self.acoord_ref
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a PAFOIL7 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
pid = integer(card, 1, 'pid')
i_axial = integer(card, 2, 'i_axial')
i_thickness_root = integer(card, 3, 'i_thickness_root')
i_camber_root = integer(card, 4, 'i_camber_root')
le_radius_root = double_or_blank(card, 5, 'le_radius_root')
i_thickness_tip = integer(card, 6, 'i_thickness_tip')
i_camber_tip = integer(card, 7, 'i_camber_tip')
le_radius_tip = double_or_blank(card, 8, 'le_radius_tip')
assert len(card) <= 9, f'len(PAFOIL7 card) = {len(card):d}\ncard={card}'
return PAFOIL7(pid, i_axial,
i_thickness_root, i_camber_root, le_radius_root,
i_thickness_tip, i_camber_tip, le_radius_tip,
comment=comment)
#def ACoord(self):
#if self.acoord_ref is not None:
#return self.acoord_ref.cid
#return self.acoord
#def Pid(self):
#if self.pid_ref is not None:
#return self.pid_ref.pid
#return self.pid
#def Lsb(self): # AEFACT
#if self.lsb_ref is not None:
#return self.lsb_ref.sid
#return self.lsb
#def Lint(self): # AEFACT
#if self.lint_ref is not None:
#return self.lint_ref.sid
#return self.lint
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by PAFOIL7 pid=%s' % self.pid
self.i_axial_ref = model.AEFact(abs(self.i_axial), msg=msg)
self.i_thickness_root_ref = model.AEFact(self.i_thickness_root, msg=msg)
self.i_camber_root_ref = model.AEFact(self.i_camber_root, msg=msg)
self.i_thickness_tip_ref = model.AEFact(self.i_thickness_tip, msg=msg)
self.i_camber_tip_ref = model.AEFact(self.i_camber_tip, msg=msg)
def safe_cross_reference(self, model: BDF, xref_errors):
self.cross_reference(model)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.i_thickness_root_ref = None
self.i_camber_root_ref = None
self.i_thickness_tip_ref = None
self.i_camber_tip_ref = None
def convert_to_nastran(self, model):
"""
Should this be converted to a DMIG?
+---------+----+------+------+-------+------+------+-------+------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+====+======+======+=======+======+======+=======+======+
| PAFOIL7 | ID | ITAX | ITHR | ICAMR | RADR | ITHT | ICAMT | RADT |
+---------+----+------+------+-------+------+------+-------+------+
| PAFOIL7 | 1 | -201 | 202 | 203 | 0.1 | 211 | 212 | 0.1 |
+---------+----+------+------+-------+------+------+-------+------+
"""
raise NotImplementedError('PAFOIL7: convert_to_nastran')
def raw_fields(self):
"""
Gets the fields in their unmodified form
Returns
-------
fields : list
The fields that define the card
"""
#pid = integer(card, 1, 'pid')
#i_axial = integer(card, 2, 'i_axial')
#i_thickness_root = integer(card, 3, 'i_thickness_root')
#i_camber_root = integer(card, 4, 'i_camber_root')
#le_radius_root = double_or_blank(card, 5, 'le_radius_root')
#i_thickness_tip = integer(card, 6, 'i_thickness_tip')
#le_radius_tip = integer(card, 7, 'le_radius_tip')
#i_camber_tip = double_or_blank(card, 8, 'i_camber_tip')
list_fields = [
'PAFOIL7', self.pid, self.i_axial,
self.i_thickness_root, self.i_camber_root, self.le_radius_root,
self.i_thickness_tip, self.i_camber_tip, self.le_radius_tip,
]
return list_fields
def repr_fields(self):
"""
Gets the fields in their simplified form
Returns
-------
fields : list
The fields that define the card
"""
return self.raw_fields()
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class BODY7(BaseCard):
"""
Defines an aerodynamic body macroelement of a body-like component.
Similar to Nastran's CAERO2.
+--------+-----+-----+----+-----+------+-----+------+------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+========+=====+=====+====+=====+======+=====+======+======+
| CAERO2 | EID | PID | CP | NSB | NINT | LSB | LINT | IGID |
+--------+-----+-----+----+-----+------+-----+------+------+
| | X1 | Y1 | Z1 | X12 | | | | |
+--------+-----+-----+----+-----+------+-----+------+------+
+-------+---------+-------+---------+--------+------+---------+---------+---------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=======+=========+=======+=========+========+======+=========+=========+=========+
| BODY7 | BID | LABEL | IPBODY7 | ACOORD | NSEG | IDMESH1 | IDMESH2 | IDMESH3 |
+-------+---------+-------+---------+--------+------+---------+---------+---------+
| | IDMESH4 | etc | | | | | | |
+-------+---------+-------+---------+--------+------+---------+---------+---------+
| BODY7 | 4 | BODY | 2 | 8 | 4 | 20 | 21 | 22 |
+-------+---------+-------+---------+--------+------+---------+---------+---------+
| | 23 | | | | | | | |
+-------+---------+-------+---------+--------+------+---------+---------+---------+
"""
type = 'BODY7'
def __init__(self, eid, label, pid, nseg, idmeshes, acoord=0, comment=''):
"""
Defines a BODY7 card, which defines a slender body
(e.g., fuselage/wingtip tank).
Parameters
----------
eid : int
body id
label : str
An arbitrary character string used to define the body.
pid : int; default=0
Identification number of PBODY7 bulk data card
(specifying body wake and/or inlet aerodynamic boxes)
acoord : int; default=0
Identification number of ACOORD bulk data card
(specifying body center line location and orientation)
nseg : int
Number of body segments
idmeshes : List[int]
Identification number of SEGMESH bulk data card (specifying body segments).
comment : str; default=''
a comment for the card
"""
BaseCard.__init__(self)
if comment:
self.comment = comment
#: Element identification number
self.eid = eid
self.label = label
#: Property identification number of a PAERO7 entry.
self.pid = pid
self.nseg = nseg
self.idmeshes = idmeshes
self.acoord = acoord
self.pid_ref = None
self.acoord_ref = None
self.ascid_ref = None
self.segmesh_refs = None
#@property
#def cp(self):
#return self.acoord
#@property
#def cp_ref(self):
#return self.acoord_ref
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a BODY7 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
eid = integer(card, 1, 'eid')
label = string(card, 2, 'label')
assert len(card) >= 3, f'len(BODY7 card) = {len(card):d}\ncard={card}'
pid = integer_or_blank(card, 3, 'pid')
acoord = integer_or_blank(card, 4, 'acoord', 0)
nseg = integer_or_blank(card, 5, 'nseg')
idmeshes = []
for i, ifield in enumerate(range(6, len(card))):
segmesh = integer(card, ifield, 'idmesh_%i' % (i+1))
idmeshes.append(segmesh)
assert len(card) <= 13, f'len(BODY7 card) = {len(card):d}\ncard={card}'
return BODY7(eid, label, pid, nseg, idmeshes, acoord=acoord, comment=comment)
def ACoord(self):
if self.acoord_ref is not None:
return self.acoord_ref.cid
return self.acoord
def Pid(self):
if self.pid_ref is not None:
return self.pid_ref.pid
return self.pid
@property
def nboxes(self):
if self.nsb > 0:
return self.nsb
return len(self.lsb_ref.fractions) # AEFACT
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by BODY7 eid=%s' % self.eid
self.segmesh_refs = []
for segmesh_id in self.idmeshes:
segmesh_ref = model.PAero(segmesh_id, msg=msg) # links to SEGMESH/PAERO7
self.segmesh_refs.append(segmesh_ref)
#if self.pid is not None:
#self.pid_ref = model.PAero(self.pid, msg=msg) # links to PAERO7
#self.acoord_ref = model.Coord(self.acoord, msg=msg)
#if self.nsb == 0:
#self.lsb_ref = model.AEFact(self.lsb, msg=msg)
#if self.nint == 0:
#self.lint_ref = model.AEFact(self.lint, msg=msg)
if self.acoord is not None:
self.acoord_ref = model.Coord(self.acoord, msg=msg)
#self.ascid_ref = model.Acsid(msg=msg)
self.ascid_ref = model.Coord(0, msg=msg)
def safe_cross_reference(self, model: BDF, xref_errors):
self.cross_reference(model)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.pid = self.Pid()
self.acoord = self.ACoord()
self.pid_ref = None
self.acoord_ref = None
def convert_to_nastran(self, model):
"""
+--------+-----+-----+----+-----+------+-----+------+------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+========+=====+=====+====+=====+======+=====+======+======+
| CAERO2 | EID | PID | CP | NSB | NINT | LSB | LINT | IGID |
+--------+-----+-----+----+-----+------+-----+------+------+
| | X1 | Y1 | Z1 | X12 | | | | |
+--------+-----+-----+----+-----+------+-----+------+------+
+-------+---------+-------+---------+--------+------+---------+---------+---------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=======+=========+=======+=========+========+======+=========+=========+=========+
| BODY7 | BID | LABEL | IPBODY7 | ACOORD | NSEG | IDMESH1 | IDMESH2 | IDMESH3 |
+-------+---------+-------+---------+--------+------+---------+---------+---------+
| | IDMESH4 | etc | | | | | | |
+-------+---------+-------+---------+--------+------+---------+---------+---------+
| BODY7 | 4 | BODY | 2 | 8 | 4 | 20 | 21 | 22 |
+-------+---------+-------+---------+--------+------+---------+---------+---------+
| | 23 | | | | | | | |
+-------+---------+-------+---------+--------+------+---------+---------+---------+
"""
pid = max(model.paeros) + 1000
igroup = 1
orient = 'ZY'
cp = 0
#width = 1
#nsb : AEFACT id for defining the location of the slender body elements
#lsb : AEFACT id for defining the location of interference elements
#nint : Number of slender body elements
#lint : Number of interference elements
aefact_id = len(model.aefacts) + 1
xs_id = aefact_id
half_width_id = aefact_id + 1
theta1_id = aefact_id + 2
theta2_id = aefact_id + 3
lsb = xs_id
lint = xs_id
#+---------+--------+-------+------+---------+-------+------+------+-----+
#| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
#+=========+========+=======+======+=========+=======+======+======+=====+
#| SEGMESH | IDMESH | NAXIS | NRAD | NOSERAD | IAXIS | | | |
#| | ITYPE1 | X1 | CAM1 | YR1 | ZR1 | IDY1 | IDZ1 | |
#| | ITYPE2 | X2 | CAM2 | YR2 | ZR2 | IDY2 | IDZ2 | |
#| | ITYPE3 | X3 | CAM3 | YR3 | ZR3 | IDY3 | IDZ3 | |
#+---------+--------+-------+------+---------+-------+------+------+-----+
xpoints = []
half_widths = []
try:
origin_x, origin_y, origin_z = self.acoord_ref.origin
except AttributeError: # pragma: no cover
print(self.get_stats())
raise
#x_offset = origin_x + x
#y_offset = origin_y + y
#z_offset = origin_z + z
nsegmesh = len(self.segmesh_refs)
if nsegmesh == 0:
raise RuntimeError('Number of SEGMESH references on BODY7=0\n%s' % str(self))
for isegmesh, segmesh in enumerate(self.segmesh_refs):
itypes = segmesh.itypes
#xs = segmesh.xs
#idys_ref = segmesh.idys_ref
#idzs_ref = segmesh.idzs_ref
nitypes = len(itypes)
idys_ref = [None] * nitypes if segmesh.idys_ref is None else segmesh.idys_ref
idzs_ref = [None] * nitypes if segmesh.idzs_ref is None else segmesh.idzs_ref
cambers = segmesh.cambers
yrads = segmesh.ys
zrads = segmesh.zs
# what????
if isegmesh in [0, nsegmesh - 1]:
xs2 = segmesh.xs
idys_ref2 = idys_ref
idzs_ref2 = idzs_ref
else:
xs2 = segmesh.xs[1:]
idys_ref2 = idys_ref[1:]
idzs_ref2 = idzs_ref[1:]
xpoints += xs2
yz_mean = []
thetas = self._get_thetas()
for itype, camber, yrad, zrad, idy_ref, idz_ref in zip(
itypes, cambers, yrads, zrads, idys_ref2, idzs_ref2):
out = self._get_body7_width_height_radius(
thetas, itype, camber, yrad, zrad, idy_ref, idz_ref)
width, height, average_radius, ymeani, zmeani = out
half_widths.append(average_radius)
yz_mean.append([ymeani, zmeani])
# I think you could area weight this and get a better mean...
ymean, zmean = np.mean(yz_mean, axis=0)
xpoints_local = [xi for xi in xpoints]
assert len(half_widths) == len(xpoints_local)
half_width = max(half_widths)
AR = 1.0
p1 = [origin_x, origin_y + ymean, origin_z + zmean]
x12 = max(xpoints) - min(xpoints)
dash = '-' * 80 + '\n'
comment = dash
comment += self.comment
caero2 = CAERO2(self.eid, pid, igroup, p1, x12,
cp=cp,
nsb=0, lsb=lsb,
nint=0, lint=lint, comment=comment)
#
lrsb = half_width_id # slender body
lrib = half_width_id # interference
# theta arrays (AEFACTs)
#lth1 = theta1_id
#lth2 = theta2_id
# 0-57 is excluded
angles_body = [20.0, 40.0, 60.0, 80.0, 100.0, 120.0, 140.0, 160.0,
200.0, 220.0, 240.0, 260.0, 280.0, 300.0, 320.0, 340.0]
angles_fin = [20.0, 40.0, 60.0, 80.0, 100.0, 120.0, 140.0, 160.0,
200.0, 220.0, 240.0, 260.0, 280.0, 300.0, 320.0, 340.0]
aefact_xs = AEFACT(xs_id, xpoints_local, comment=dash+'Xs')
aefact_width = AEFACT(half_width_id, half_widths, comment='half_widths')
aefact_theta1 = AEFACT(theta1_id, angles_body, comment='angles_body')
aefact_theta2 = AEFACT(theta2_id, angles_fin, comment='angles_fin')
# which segments use theta1 array
lth = [1, 10] #nsegments] # t
thi = [1]
thn = [1]
paero2 = PAERO2(pid, orient, half_width, AR, thi, thn,
lrsb=lrsb, lrib=lrib,
lth=lth, comment='')
caero2.validate()
paero2.validate()
return caero2, paero2, aefact_xs, aefact_width, aefact_theta1, aefact_theta2
def _get_body7_width_height_radius(self, thetas: np.ndarray,
itype: int, camber: float,
yrad: float, zrad: float,
idy_ref, idz_ref) -> Tuple[float, float, float, float, float]:
if itype == 1:
# Body of Revolution
# Xi, CAMi, YRi
radius = yrad
aspect_ratio = 1.
yz = create_ellipse(aspect_ratio, radius, thetas=thetas)
ypoints = yz[:, 0]
zpoints = camber + yz[:, 1]
elif itype == 2:
# Elliptical body
height = zrad
width = yrad
aspect_ratio = height / width
radius = height
yz = create_ellipse(aspect_ratio, radius, thetas=thetas)
ypoints = yz[:, 0]
zpoints = yz[:, 1]
elif itype == 3:
# Arbitrary body using AEFACTss
try:
ypoints = idy_ref.fractions
zpoints = idz_ref.fractions
except AttributeError: # pragma: no cover
print('idy_ref = %s' % idy_ref)
print('idz_ref = %s' % idz_ref)
print(self.get_stats())
raise
else: # pramga: no cover
msg = f'Unsupported itype={itype} (must be 1/2/3)\n{str(self)}'
raise NotImplementedError(msg)
width = ypoints.max() - ypoints.min()
height = zpoints.max() - zpoints.min()
average_radius = (width + height) / 4.
#elliptical_area = pi * width * height
ymeani = ypoints.mean()
zmeani = zpoints.mean()
return width, height, average_radius, ymeani, zmeani
def _get_nthetas(self) -> int:
"""gets the number of thetas for the body"""
return self.segmesh_refs[0].nradial # npoints
#nthetas = 17
#for itype, idy_ref, unused_idz_ref in zip(itypes, idys_ref2, idzs_ref2):
#if itype == 3:
#fractions = idy_ref.fractions
#nthetas = len(fractions)
#break
#return nthetas
def _get_thetas(self) -> np.ndarray:
"""gets the thetas for the body"""
nthetas = self._get_nthetas()
thetas = np.radians(np.linspace(0., 360., nthetas))
return thetas
def get_points(self) -> List[np.ndarray, np.ndarray]:
"""creates a 1D representation of the BODY7"""
p1 = self.cp_ref.transform_node_to_global(self.p1)
p2 = p1 + self.ascid_ref.transform_vector_to_global(np.array([self.x12, 0., 0.]))
#print("x12 = %s" % self.x12)
#print("pcaero[%s] = %s" % (self.eid, [p1,p2]))
return [p1, p2]
@property
def npanels(self) -> int:
"""gets the number of panels for the body"""
nz = len(self.segmesh_refs)
unused_segmesh = self.segmesh_refs[0]
nthetas = self._get_nthetas()
npanels = nz * (nthetas - 1)
return npanels
def get_points_elements_3d(self):
"""
Gets the points/elements in 3d space as CQUAD4s
The idea is that this is used by the GUI to display CAERO panels.
TODO: doesn't support the aero coordinate system
"""
#paero2 = self.pid_ref
xyz = []
element = []
npoints = 0
for segmesh in self.segmesh_refs:
#print(segmesh)
xyzi, elementi = self._get_points_elements_3di(segmesh)
xyz.append(xyzi)
element.append(elementi + npoints)
npoints += xyzi.shape[0]
xyzs = np.vstack(xyz)
elements = np.vstack(element)
assert xyzs is not None, str(self)
assert elements is not None, str(self)
return xyzs, elements
def _get_points_elements_3di(self, segmesh: SEGMESH) -> Tuple[np.ndarray, np.ndarray]:
"""
points (nchord, nspan) float ndarray; might be backwards???
the points
elements (nquads, 4) int ndarray
series of quad elements
nquads = (nchord-1) * (nspan-1)
"""
#lengths_y = []
#lengths_z = []
nx = segmesh.naxial
ny = segmesh.nradial
xs = []
ys = []
zs = []
origin_x, origin_y, origin_z = self.acoord_ref.origin
nthetas = segmesh.nradial
thetas = np.radians(np.linspace(0., 360., nthetas))
for itype, x, yrad, zrad, camber, idy_ref, idz_ref in zip(
segmesh.itypes,
segmesh.xs, segmesh.ys, segmesh.zs,
segmesh.cambers,
segmesh.idys_ref, segmesh.idzs_ref):
xsi, ysi, zsi = self._get_xyzs_offset(
origin_x, origin_y, origin_z, thetas,
itype, x, yrad, zrad, camber, idy_ref, idz_ref)
xs.append(xsi)
ys.append(ysi)
zs.append(zsi)
xyz = np.vstack([
np.hstack(xs),
np.hstack(ys),
np.hstack(zs),
]).T
elements = elements_from_quad(nx, ny, dtype='int32') # nx,ny are points
return xyz, elements
def _get_xyzs_offset(self, origin_x, origin_y, origin_z, thetas,
itype: int, x: float, yrad: float, zrad: float, camber: float,
idy_ref, idz_ref) -> Tuple[List[float], np.ndarray, np.ndarray]:
y = 0.
z = 0.
if itype == 1:
# Body of Revolution
# Xi, CAMi, YRi
## TODO: doesn't consider camber
radius = yrad
aspect_ratio = 1.
yz = create_ellipse(aspect_ratio, radius, thetas=thetas)
ypoints = yz[:, 0]
zpoints = camber + yz[:, 1]
elif itype == 2:
# Elliptical body
# Xi, YRi, ZRi
height = zrad
width = yrad
aspect_ratio = height / width
radius = height
yz = create_ellipse(aspect_ratio, radius, thetas=thetas)
ypoints = yz[:, 0]
zpoints = yz[:, 1]
elif itype == 3:
# Arbitrary body using AEFACTss
# Xi, IDYi, IDZi
ypoints = idy_ref.fractions
zpoints = idz_ref.fractions
y = yrad
z = zrad
else: # pramga: no cover
msg = 'Unsupported itype=%s (must be 1/2/3)\n%s' % (itype, str(self))
raise NotImplementedError(msg)
assert len(ypoints) == len(zpoints), 'len(ypoints)=%s len(zpoints)=%s' % (len(ypoints), len(zpoints))
nnodes = len(ypoints)
x_offset = origin_x + x
y_offset = origin_y + y
z_offset = origin_z + z
xsi = [x_offset] * nnodes
ysi = y_offset + ypoints
zsi = z_offset + zpoints
return xsi, ysi, zsi
#def set_points(self, points):
#self.p1 = np.asarray(points[0])
#p2 = np.asarray(points[1])
#x12 = p2 - self.p1
#self.x12 = x12[0]
#def shift(self, dxyz):
#"""shifts the aero panel"""
#self.p1 += dxyz
def raw_fields(self) -> List[Any]:
"""
Gets the fields in their unmodified form
Returns
-------
fields : list
The fields that define the card
"""
list_fields = ['BODY7', self.eid, self.label, self.Pid(), self.ACoord(),
self.nseg] + self.idmeshes
return list_fields
def repr_fields(self) -> List[Any]:
"""
Gets the fields in their simplified form
Returns
-------
fields : list
The fields that define the card
"""
return self.raw_fields()
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class SEGMESH(BaseCard):
"""
Defines a grid system for a body segment; referenced by the BODY7 bulk data card.
+---------+--------+-------+------+---------+-------+------+------+-----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+========+=======+======+=========+=======+======+======+=====+
| SEGMESH | IDMESH | NAXIS | NRAD | NOSERAD | IAXIS | | | |
+---------+--------+-------+------+---------+-------+------+------+-----+
| | ITYPE1 | X1 | CAM1 | YR1 | ZR1 | IDY1 | IDZ1 | |
+---------+--------+-------+------+---------+-------+------+------+-----+
| | ITYPE2 | X2 | CAM2 | YR2 | ZR2 | IDY2 | IDZ2 | |
+---------+--------+-------+------+---------+-------+------+------+-----+
| | ITYPE3 | X3 | CAM3 | YR3 | ZR3 | IDY3 | IDZ3 | |
+---------+--------+-------+------+---------+-------+------+------+-----+
| SEGMESH | 2 | 3 | 6 | | | | | |
+---------+--------+-------+------+---------+-------+------+------+-----+
| | 1 | 0.0 | 0.0 | 0.0 | | | | |
+---------+--------+-------+------+---------+-------+------+------+-----+
| | 1 | 1.0 | 0.0 | 0.5 | | | | |
+---------+--------+-------+------+---------+-------+------+------+-----+
| | 3 | 2.0 | | | | 103 | 104 | |
+---------+--------+-------+------+---------+-------+------+------+-----+
"""
type = 'SEGMESH'
@property
def pid(self) -> int:
return self.segmesh_id
@pid.setter
def pid(self, segmesh_id: int) -> None:
self.segmesh_id = segmesh_id
def __init__(self, segmesh_id, naxial, nradial, nose_radius, iaxis,
itypes, xs, cambers, ys, zs, idys, idzs, comment=''):
"""
Defines a SEGMESH card, which defines a cross-section for a PBODY7.
Parameters
----------
segmesh_id : int
Body segment mesh identification number.
naxial : int
Number of axial stations (i.e., divisions) of the segment. (min=2).
nradial : int
Number of circumferential points of the segment (min=3).
nose_radius : float
Nose radius of blunt body.
NOSERAD is active only if ZONA7U (Hypersonic Aerodynamic Method)
is used (the METHOD entry of the MKAEROZ Bulk Data equals 2 or –2).
Furthermore, NOSERAD is used only if the SEGMESH bulk data card is
the first segment defined in the BODY7 bulk data card.
iaxis : int
The index of the axial station where the blunt nose ends.
IAXIS is active only if ZONA7U (Hypersonic Aerodynamic
Method) is used.
ITYPEi : int
Type of input used to define the circumferential box cuts
- 1 body of revolution
- 2 elliptical body
- 3 arbitrary body
Xi : List[float]
X-location of the axial station; Xi must be in ascending
order. (i.e., Xi+1 > Xi)
cambers : List[float]
Body camber at the Xi axial station. (Real)
YRi : List[float]
Body cross-sectional radius if ITYPEi = 1 or the semi-axis length
of the elliptical body parallel to the Y-axis if ITYPEi=2.
ZRi : List[float]
The semi-axis length of the elliptical body parallel to the Z-axis.
Used only if ITYPEi=2. (Real)
IDYi : int
Identification number of AEFACT bulk data card that specifies
NRAD number of the Y-coordinate locations of the circumferential
points at the Xi axial station. Use only if ITYPEi=3.
IDZi : int
Identification number of AEFACT bulk data card that specifies
NRAD number of the Z-coordinate locations of the circumferential
points at the Xi axial station. Use only if ITYPEi=3.
comment : str; default=''
a comment for the card
"""
if comment:
self.comment = comment
BaseCard.__init__(self)
self.segmesh_id = segmesh_id
self.naxial = naxial
self.nradial = nradial
self.nose_radius = nose_radius
self.iaxis = iaxis
self.itypes = itypes
self.cambers = cambers
self.xs = xs
self.ys = ys
self.zs = zs
self.idys = idys
self.idzs = idzs
self.idys_ref = None
self.idzs_ref = None
self.pid_ref = None
def validate(self):
for i, itype in enumerate(self.itypes):
assert itype in [1, 2, 3], 'itypes[%i]=%s is invalid; itypes=%s' % (i, itype, self.itypes)
xi_old = self.xs[0]
for i, xi in enumerate(self.xs[1:]):
if xi <= xi_old:
raise RuntimeError('xs=%s must be in ascending order\nx%i=%s x%i=%s (old)\n%s' % (
self.xs, i+2, xi, i+1, xi_old, str(self)))
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a SEGMESH card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
segmesh_id = integer(card, 1, 'segmesh_id')
naxial = integer(card, 2, 'naxial')
nradial = integer(card, 3, 'nradial')
nose_radius = double_or_blank(card, 4, 'nose_radius')
iaxis = integer_or_blank(card, 5, 'iaxis')
itypes = []
xs = []
ys = []
zs = []
cambers = []
idys = []
idzs = []
assert len(card) >= 9, f'len(SEGMESH card) = {len(card):d}\ncard={card}'
for counter, ifield in enumerate(range(9, len(card), 8)):
itype = integer(card, ifield, 'itype%i' % (counter+1))
x = double_or_blank(card, ifield+1, 'itype%i' % (counter+1), 0.)
camber = double_or_blank(card, ifield+2, 'camber%i' % (counter+1), 0.)
y = double_or_blank(card, ifield+3, 'y%i' % (counter+1), 0.)
z = double_or_blank(card, ifield+4, 'z%i' % (counter+1), 0.)
idy = integer_or_blank(card, ifield+5, 'idy%i' % (counter+1))
idz = integer_or_blank(card, ifield+6, 'idz%i' % (counter+1))
itypes.append(itype)
xs.append(x)
ys.append(y)
zs.append(z)
cambers.append(camber)
idys.append(idy)
idzs.append(idz)
assert len(itypes) == naxial, 'naxial=%s nradial=%s len(itypes)=%s' % (naxial, nradial, len(itypes))
return SEGMESH(segmesh_id, naxial, nradial, nose_radius, iaxis,
itypes, xs, cambers, ys, zs, idys, idzs, comment=comment)
def Cp(self):
if self.cp_ref is not None:
return self.cp_ref.cid
return self.cp
def Pid(self):
if self.pid_ref is not None:
return self.pid_ref.pid
return self.pid
def cross_reference(self, model: BDF) -> None:
msg = ', which is required by SEGMESH eid=%s' % self.pid
idys_ref = []
idzs_ref = []
for idy in self.idys:
idy_ref = None
if idy is not None and isinstance(idy, integer_types):
idy_ref = model.AEFact(idy, msg=msg)
assert len(idy_ref.fractions) > 2, 'idy_ref=%s' % idy_ref
idys_ref.append(idy_ref)
for idz in self.idzs:
idz_ref = None
if idz is not None and isinstance(idz, integer_types):
idz_ref = model.AEFact(idz, msg=msg)
assert len(idz_ref.fractions) > 2, 'idz_ref=%s' % idz_ref
idzs_ref.append(idz_ref)
self.idys_ref = idys_ref
self.idzs_ref = idzs_ref
#print(self.idys_ref)
def safe_cross_reference(self, model: BDF, xref_errors):
return self.cross_reference(model)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.pid = self.Pid()
#self.cp = self.Cp()
#self.idys = idys_ref
#self.idzs = idzs_ref
self.pid_ref = None
#self.cp_ref = None
self.idys_ref = None
self.idzs_ref = None
#def set_points(self, points):
#self.p1 = np.asarray(points[0])
#p2 = np.asarray(points[1])
#x12 = p2 - self.p1
#self.x12 = x12[0]
def shift(self, dxyz):
"""shifts the aero panel"""
self.p1 += dxyz
def raw_fields(self):
"""
Gets the fields in their unmodified form
Returns
-------
fields : list
The fields that define the card
"""
list_fields = [
'SEGMESH', self.segmesh_id, self.naxial, self.nradial, self.nose_radius,
self.iaxis, None, None, None]
for itype, x, camber, y, z, idy, idz in zip(self.itypes, self.xs, self.cambers,
self.ys, self.zs, self.idys, self.idzs):
list_fields += [itype, x, camber, y, z, idy, idz, None]
return list_fields
def repr_fields(self):
"""
Gets the fields in their simplified form
Returns
-------
fields : list
The fields that define the card
"""
return self.raw_fields()
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class CAERO7(BaseCard):
"""
Totally wrong...
Defines an aerodynamic macro element (panel) in terms of two leading edge
locations and side chords. This is used for Doublet-Lattice theory for
subsonic aerodynamics and the ZONA51 theory for supersonic aerodynamics.
+--------+-----+-------+--------+-------+--------+--------+--------+---------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+========+=====+=======+========+=======+========+========+========+=========+
| CAERO7 | WID | LABEL | ACOORD | NSPAN | NCHORD | LSPAN | ZTAIC | PAFOIL7 |
+--------+-----+-------+--------+-------+--------+--------+--------+---------+
| | XRL | YRL | ZRL | RCH | LRCHD | ATTCHR | ACORDR | |
+--------+-----+-------+--------+-------+--------+--------+--------+---------+
| | XTL | YTL | ZTL | TCH | LTCHD | ATTCHT | ACORDT | |
+--------+-----+-------+--------+-------+--------+--------+--------+---------+
::
1
| \
| \
| \
| 4
| |
| |
2------3
Attributes
----------
eid : int
element id
pid : int, PAERO1
int : PAERO1 ID
PAERO1 : PAERO1 object (xref)
igid : int
Group number
p1 : (1, 3) ndarray float
xyz location of point 1 (leading edge; inboard)
p4 : (1, 3) ndarray float
xyz location of point 4 (leading edge; outboard)
x12 : float
distance along the flow direction from node 1 to node 2; (typically x, root chord)
x43 : float
distance along the flow direction from node 4 to node 3; (typically x, tip chord)
cp : int, CORDx
int : coordinate system
CORDx : Coordinate object (xref)
nspan : int
int > 0 : N spanwise boxes distributed evenly
int = 0 : use lchord
nchord : int
int > 0 : N chordwise boxes distributed evenly
int = 0 : use lchord
lspan : int, AEFACT
int > 0 : AEFACT reference for non-uniform nspan
int = 0 : use nspan
AEFACT : AEFACT object (xref)
lchord : int, AEFACT
int > 0 : AEFACT reference for non-uniform nchord
int = 0 : use nchord
AEFACT : AEFACT object (xref)
comment : str; default=''
a comment for the card
"""
type = 'CAERO7'
_field_map = {
1: 'sid', 2:'pid', 3:'cp', 4:'nspan', 5:'nchord',
6:'lspan', 7:'lchord', 8:'igid', 12:'x12', 16:'x43',
}
def __init__(self, eid, label, p1, x12, p4, x43,
cp=0, nspan=0, nchord=0, lspan=0, p_airfoil=None, ztaic=None, comment=''):
"""
Defines a CAERO1 card, which defines a simplified lifting surface
(e.g., wing/tail).
Parameters
----------
eid : int
element id
pid : int, PAERO1
int : PAERO1 ID
PAERO1 : PAERO1 object (xref)
igroup : int
Group number
p1 : (1, 3) ndarray float
xyz location of point 1 (leading edge; inboard)
p4 : (1, 3) ndarray float
xyz location of point 4 (leading edge; outboard)
x12 : float
distance along the flow direction from node 1 to node 2; (typically x, root chord)
x43 : float
distance along the flow direction from node 4 to node 3; (typically x, tip chord)
cp : int, CORDx; default=0
int : coordinate system
CORDx : Coordinate object (xref)
nspan : int; default=0
int > 0 : N spanwise boxes distributed evenly
int = 0 : use lchord
nchord : int; default=0
int > 0 : N chordwise boxes distributed evenly
int = 0 : use lchord
lspan : int, AEFACT; default=0
int > 0 : AEFACT reference for non-uniform nspan
int = 0 : use nspan
AEFACT : AEFACT object (xref)
lchord : int, AEFACT; default=0
int > 0 : AEFACT reference for non-uniform nchord
int = 0 : use nchord
AEFACT : AEFACT object (xref)
comment : str; default=''
a comment for the card
"""
BaseCard.__init__(self)
if cp is None:
cp = 0
if nspan is None:
nspan = 0
if nchord is None:
nchord = 0
p1 = np.asarray(p1)
p4 = np.asarray(p4)
if comment:
self.comment = comment
#: Element identification number
self.eid = eid
#: Property identification number of a PAERO2 entry.
self.label = label
#: Coordinate system for locating point 1.
self.cp = cp
self.nspan = nspan
self.nchord = nchord
self.lspan = lspan
self.p_airfoil = p_airfoil
self.p1 = p1
self.x12 = x12
self.p4 = p4
self.x43 = x43
self.ztaic = ztaic
self.pid_ref = None
self.cp_ref = None
self.lchord_ref = None
self.lspan_ref = None
self.ascid_ref = None
self.box_ids = None
self.pafoil_ref = None
#self._init_ids() #TODO: make this work here?
def validate(self):
msg = ''
is_failed = False
if self.nspan == 0 and self.lspan == 0:
msg += 'NSPAN or LSPAN must be greater than 0; nspan=%r nlspan=%s\n' % (
self.nspan, self.lspan)
is_failed = True
if self.nspan <= 0:
msg += 'NSPAN must be greater than 0; nspan=%r\n' % (
self.nspan)
is_failed = True
if self.nchord <= 0:
msg += 'NCHORD must be greater than 0; nchord=%r\n' % (
self.nchord)
is_failed = True
if is_failed:
msg += str(self)
raise ValueError(msg)
assert len(self.p1) == 3, 'p1=%s' % self.p1
assert len(self.p4) == 3, 'p4=%s' % self.p4
assert self.nspan < 100, 'nspan=%s\n%s' % (self.nspan, str(self))
assert self.nchord < 100, 'nchord=%s\n%s' % (self.nchord, str(self))
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a CAERO1 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
CAERO7 100101 RIBW1 2 25
998.904 39.821 230.687 1298.159 310001
1121.821 61.134 236.560 1175.243
CAERO7 100201 RIBW2 2 25
1121.821 61.134 236.560 1175.243
1244.258 84.704 243.625 1052.805
"""
eid = integer(card, 1, 'eid')
name = string(card, 2, 'name')
cp = integer_or_blank(card, 3, 'cp', 0)
nspan = integer_or_blank(card, 4, 'nspan', 0)
nchord = integer_or_blank(card, 5, 'nchord', 0)
lspan = integer_or_blank(card, 6, 'aefact_lchord', 0)
if lspan:
lspan = 0
ztaic = integer_or_blank(card, 7, 'ztaic')
p_airfoil = integer_or_blank(card, 8, 'aefact')
#assert cp == 0
#igroup = integer(card, 8, 'igid')
x1 = double_or_blank(card, 9, 'x1', 0.0)
y1 = double_or_blank(card, 10, 'y1', 0.0)
z1 = double_or_blank(card, 11, 'z1', 0.0)
p1 = np.array([x1, y1, z1])
x12 = double_or_blank(card, 12, 'x12', 0.)
unused_lchord_root = integer_or_blank(card, 13, 'lchord_root')
unused_attach_root = integer_or_blank(card, 14, 'attach_root')
unused_achord_root = integer_or_blank(card, 15, 'achord_root')
x4 = double_or_blank(card, 17, 'x4', 0.0)
y4 = double_or_blank(card, 18, 'y4', 0.0)
z4 = double_or_blank(card, 19, 'z4', 0.0)
p4 = np.array([x4, y4, z4])
x43 = double_or_blank(card, 20, 'x43', 0.)
unused_lchord_tip = integer_or_blank(card, 21, 'lchord_tip')
unused_attach_tip = integer_or_blank(card, 22, 'attach_tip')
unused_achord_tip = integer_or_blank(card, 23, 'achord_tip')
assert len(card) <= 23, f'len(CAERO7 card) = {len(card):d}\ncard={card}'
return CAERO7(eid, name, p1, x12, p4, x43,
cp=cp, nspan=nspan, nchord=nchord, lspan=lspan,
p_airfoil=p_airfoil, ztaic=ztaic,
comment=comment)
def flip_normal(self):
self.p1, self.p4 = self.p4, self.p1
self.x12, self.x43 = self.x43, self.x12
def _init_ids(self, dtype='int32'):
"""
Fill `self.box_ids` with the sub-box ids. Shape is (nchord, nspan)
"""
nchord, nspan = self.shape
assert nchord >= 1, 'nchord=%s' % nchord
assert nspan >= 1, 'nspan=%s' % nspan
self.box_ids = np.zeros((nchord, nspan), dtype=dtype)
npanels = nchord * nspan
try:
self.box_ids = np.arange(self.eid, self.eid + npanels,
dtype=dtype).reshape(nspan, nchord) # .T
except OverflowError:
if dtype == 'int64':
# we already tried int64
msg = 'eid=%s nspan=%s nchord=%s' % (
self.eid, nspan, nchord)
raise OverflowError(msg)
self._init_ids(dtype='int64')
def Cp(self):
if self.cp_ref is not None:
return self.cp_ref.cid
return self.cp
#def Pid(self):
#if self.pid_ref is not None:
#return self.pid_ref.pid
#return self.pid
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by CAERO7 eid=%s' % self.eid
#self.pid_ref = model.PAero(self.pid, msg=msg)
self.cp_ref = model.Coord(self.cp, msg=msg)
self.ascid_ref = model.Acsid(msg=msg)
#if self.nchord == 0:
#assert isinstance(self.lchord, integer_types), self.lchord
#self.lchord_ref = model.AEFact(self.lchord, msg)
if self.nspan == 0:
assert isinstance(self.lspan, integer_types), self.lspan
self.lspan_ref = model.AEFact(self.lspan, msg)
if self.p_airfoil:
self.pafoil_ref = model.zona.PAFOIL(self.p_airfoil, msg)
self._init_ids()
def safe_cross_reference(self, model: BDF, xref_errors):
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by CAERO1 eid=%s' % self.eid
#try:
#self.pid_ref = model.PAero(self.pid, msg=msg)
#except KeyError:
#pass
self.cp_ref = model.safe_coord(self.cp, self.eid, xref_errors, msg=msg)
self.ascid_ref = model.safe_acsid(msg=msg)
#if self.nchord == 0:
#assert isinstance(self.lchord, integer_types), self.lchord
#self.lchord_ref = model.safe_aefact(self.lchord, self.eid, xref_errors, msg)
if self.nspan == 0:
assert isinstance(self.lspan, integer_types), self.lspan
self.lspan_ref = model.safe_aefact(self.lspan, self.eid, xref_errors, msg)
self._init_ids()
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
#self.pid = self.Pid()
self.cp = self.Cp()
self.lspan = self.get_LSpan()
#self.pid_ref = None
self.cp_ref = None
self.lspan_ref = None
self.ascid_ref = None
def convert_to_nastran(self):
"""
+--------+-----+-----+----+-------+--------+--------+--------+------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+========+=====+=====+====+=======+========+========+========+======+
| CAERO1 | EID | PID | CP | NSPAN | NCHORD | LSPAN | LCHORD | IGID |
+--------+-----+-----+----+-------+--------+--------+--------+------+
| | X1 | Y1 | Z1 | X12 | X4 | Y4 | Z4 | X43 |
+--------+-----+-----+----+-------+--------+--------+--------+------+
"""
pid = 1
igroup = 1
caero = CAERO1(self.eid, pid, igroup, self.p1, self.x12,
self.p4, self.x43, cp=self.cp,
nspan=self.nspan, lspan=self.lspan,
nchord=self.nchord, lchord=0,
comment=self.comment)
caero.validate()
return caero
@property
def min_max_eid(self):
"""
Gets the min and max element ids of the CAERO card
Returns
-------
min_max_eid : (2, ) list
The [min_eid, max_eid]
"""
nchord, nspan = self.shape
return [self.eid, self.eid + nchord * nspan]
def get_points(self):
"""
Get the 4 corner points for the CAERO card
Returns
-------
p1234 : (4, 3) list
List of 4 corner points in the global frame
"""
if self.cp_ref is None and self.cp == 0:
p1 = self.p1
p4 = self.p4
else:
p1 = self.cp_ref.transform_node_to_global(self.p1)
p4 = self.cp_ref.transform_node_to_global(self.p4)
if self.ascid_ref is None:
# yes, this really does list + array addition
p2 = p1 + np.array([self.x12, 0., 0.])
p3 = p4 + np.array([self.x43, 0., 0.])
else:
p2 = p1 + self.ascid_ref.transform_vector_to_global(np.array([self.x12, 0., 0.]))
p3 = p4 + self.ascid_ref.transform_vector_to_global(np.array([self.x43, 0., 0.]))
return [p1, p2, p3, p4]
def get_box_index(self, box_id):
"""
Get the index of ``self.box_ids`` that coresponds to the given box id.
Parameters
-----------
box_id : int
Box id to ge tthe index of.
Returns
--------
index : tuple
Index of ``self.box_ids`` that coresponds to the given box id.
"""
if box_id not in self.box_ids:
self._box_id_error(box_id)
index = np.where(self.box_ids == box_id)
index = (index[0][0], index[1][0])
return index
def get_box_quarter_chord_center(self, box_id):
"""
The the location of the quarter chord of the box along the centerline.
Parameters
-----------
box_id : int
Box id.
Returns
--------
xyz_quarter_chord : ndarray
Location of box quater chord in global.
"""
return self._get_box_x_chord_center(box_id, 0.25)
def get_box_mid_chord_center(self, box_id):
"""
The the location of the mid chord of the box along the centerline.
Parameters
-----------
box_id : int
Box id.
Returns
--------
xyz_mid_chord : ndarray
Location of box mid chord in global.
"""
return self._get_box_x_chord_center(box_id, 0.5)
def _get_box_x_chord_center(self, box_id, x_chord):
"""The the location of the x_chord of the box along the centerline."""
raise NotImplementedError('CAERO7: _get_box_x_chord_center')
#if self.lchord != 0 or self.lspan != 0:
#raise NotImplementedError()
#ichord, ispan = self.get_box_index(box_id)
#le_vector = self.p4 - self.p1
#delta_xyz = le_vector * ((ispan + 0.5)/self.nspan)
#yz = delta_xyz[1:3] + self.p1[1:3]
#chord = ((ispan + 0.5)/self.nspan) * (self.x43 - self.x12) + self.x12
#x = (ichord + x_chord)/self.nchord * chord + self.p1[0] + delta_xyz[0]
#return np.array([x, yz[0], yz[1]])
def _box_id_error(self, box_id):
"""
Raise box_id IndexError.
"""
msg = '%i not in range of aero box ids\nRange: %i to %i' % (box_id, self.box_ids[0, 0],
self.box_ids[-1, -1])
raise IndexError(msg)
@property
def npanels(self):
nchord, nspan = self.shape
return nchord * nspan
@property
def shape(self):
"""returns (nelements_nchord, nelements_span)"""
if self.nchord == 0:
x = self.lchord_ref.fractions
nchord = len(x) - 1
else:
nchord = self.nchord
if self.nspan == 0:
y = self.lspan_ref.fractions
nspan = len(y) - 1
else:
nspan = self.nspan
if nchord < 1 or nspan < 1:
msg = 'CAERO7 eid=%s nchord=%s nspan=%s lchord=%s lspan=%s' % (
self.eid, self.nchord, self.nspan, self.lchord, self.lspan)
raise RuntimeError(msg)
return nchord, nspan
def get_npanel_points_elements(self):
"""
Gets the number of sub-points and sub-elements for the CAERO card
Returns
-------
npoints : int
The number of nodes for the CAERO
nelmements : int
The number of elements for the CAERO
"""
nchord, nspan = self.shape
nelements = nchord * nspan
npoints = (nchord + 1) * (nspan + 1)
return npoints, nelements
@property
def xy(self):
"""
Returns
-------
x : (nchord,) ndarray
The percentage x location in the chord-wise direction of each panel
y : (nspan,) ndarray
The percentage y location in the span-wise direction of each panel
"""
if self.nchord == 0:
x = self.lchord_ref.fractions
nchord = len(x) - 1
else:
nchord = self.nchord
x = np.linspace(0., 1., nchord + 1)
if self.nspan == 0:
y = self.lspan_ref.fractions
nspan = len(y) - 1
else:
nspan = self.nspan
y = np.linspace(0., 1., nspan + 1)
if nchord < 1 or nspan < 1:
msg = 'CAERO7 eid=%s nchord=%s nspan=%s lchord=%s lspan=%s' % (
self.eid, self.nchord, self.nspan, self.lchord, self.lspan)
raise RuntimeError(msg)
return x, y
def panel_points_elements(self):
"""
Gets the sub-points and sub-elements for the CAERO card
Returns
-------
points : (nnodes,3) ndarray of floats
the array of points
elements : (nelements,4) ndarray of integers
the array of point ids
"""
p1, p2, p3, p4 = self.get_points()
x, y = self.xy
# We're reordering the points so we get the node ids and element ids
# to be consistent with Nastran. This is only useful if you're plotting
# aero panel forces
#
# this gives us chordwise panels and chordwise nodes
return points_elements_from_quad_points(p1, p4, p3, p2, y, x, dtype='int32')
def set_points(self, points):
self.p1 = points[0]
p2 = points[1]
p3 = points[2]
self.p4 = points[3]
self.x12 = p2[0] - self.p1[0]
self.x43 = p3[0] - self.p4[0]
assert self.x12 >= 0., 'p1=%s p2=%s' % (self.p1, p2)
assert self.x43 >= 0., 'p4=%s p3=%s' % (self.p4, p3)
assert self.x12 > 0. or self.x43 > 0., 'points=%s' % (points)
def shift(self, dxyz):
"""shifts the aero panel"""
self.p1 += dxyz
self.p4 += dxyz
def plot(self, ax: AxesSubplot) -> None:
"""plots the panels"""
points, elements = self.panel_points_elements()
for eid, elem in enumerate(elements[:, [0, 1, 2, 3, 0]]):
pointsi = points[elem][:, [0, 1]]
x = pointsi[:, 0]
y = pointsi[:, 1]
ax.plot(x, y, color='b')
box_id = self.eid + eid
centroid = (x[:-1].sum() / 4, y[:-1].sum() / 4)
elem_name = f'e{box_id}'
ax.annotate(elem_name, centroid, ha='center')
for pid, point in zip(elem, pointsi):
point_name = f'p{pid}'
ax.annotate(point_name, point, ha='center')
def get_LSpan(self):
if self.lspan_ref is not None:
return self.lspan_ref.sid
return self.lspan
def raw_fields(self):
"""
Gets the fields in their unmodified form
Returns
-------
fields : list
the fields that define the card
"""
cp = self.Cp()
nspan = self.nspan
nchord = self.nchord
lspan = self.get_LSpan()
list_fields = (
['CAERO7', self.eid, self.label, cp, nspan, nchord, lspan, self.ztaic,
self.p_airfoil,] +
list(self.p1) + [self.x12, None, None, None, None] +
list(self.p4) + [self.x43, None, None, None, None])
return list_fields
def repr_fields(self):
"""
Gets the fields in their simplified form
Returns
-------
fields : LIST
The fields that define the card
"""
cp = set_blank_if_default(self.Cp(), 0)
nspan = set_blank_if_default(self.nspan, 0)
nchord = set_blank_if_default(self.nchord, 0)
#lchord = set_blank_if_default(self.get_LChord(), 0)
lspan = set_blank_if_default(self.get_LSpan(), 0)
#lchord = 0
lspan = 0
list_fields = (
['CAERO7', self.eid, self.label, cp, nspan, nchord, lspan, self.ztaic,
self.p_airfoil,] +
list(self.p1) + [self.x12, None, None, None, None] +
list(self.p4) + [self.x43, None, None, None, None])
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class TRIM_ZONA(BaseCard):
"""
Specifies constraints for aeroelastic trim variables.
"""
type = 'TRIM_ZONA'
_field_map = {
1: 'sid', 2:'mach', 3:'q', 8:'aeqr',
}
def __init__(self, sid, mkaeroz, q, cg, true_g, nxyz, pqr, loadset,
labels, uxs, comment=''):
"""
Creates a TRIM card for a static aero (144) analysis.
Parameters
----------
sid : int
the trim id; referenced by the Case Control TRIM field
q : float
dynamic pressure
true_g : float
???
nxyz : List[float]
???
pqr : List[float]
[roll_rate, pitch_rate, yaw_rate]
loadset : int
Identification number of a SET1 or SETADD bulk data card that
specifies a set of identification numbers of TRIMFNC or
TRIMADD bulk data card. All values of the trim functions
defined by the TRIMFNC or TRIMADD bulk data card are computed
and printed out.
labels : List[str]
names of the fixed variables
uxs : List[float]
values corresponding to labels
comment : str; default=''
a comment for the card
"""
BaseCard.__init__(self)
if comment:
self.comment = comment
#: Trim set identification number. (Integer > 0)
self.sid = sid
self.mkaeroz = mkaeroz
#: Dynamic pressure. (Real > 0.0)
self.q = q
self.cg = cg
self.nxyz = nxyz
self.true_g = true_g
self.pqr = pqr
self.loadset = loadset
#: The label identifying aerodynamic trim variables defined on an
#: AESTAT or AESURF entry.
self.labels = labels
#: The magnitude of the aerodynamic extra point degree-of-freedom.
#: (Real)
self.uxs = uxs
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a TRIM card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
sid = integer(card, 1, 'sid')
mkaeroz = integer(card, 2, 'mkaeroz')
qinf = double(card, 3, 'dynamic_pressure')
# 5
# 6
cg = [
double(card, 7, 'cg-x'),
double(card, 8, 'cg-y'),
double(card, 9, 'cg-z'),
]
unused_wtmass = double(card, 9, 'wtmass')
unused_weight = double(card, 10, 'weight')
unused_inertia = [
double(card, 11, 'Ixx'),
double(card, 12, 'Ixy'),
double(card, 13, 'Iyy'),
double(card, 14, 'Ixz'),
double(card, 15, 'Iyz'),
double(card, 16, 'Izz'),
]
# TRUE/G NX NY NZ PDOT QDOT RDOT LOADSET
true_g = string(card, 17, 'TRUE/G')
nx = double_or_string(card, 18, 'NX')
ny = double_or_string(card, 19, 'NY')
nz = double_or_string(card, 20, 'NZ')
nxyz = [nx, ny, nz]
p = double_or_string(card, 21, 'P')
q = double_or_string(card, 22, 'Q')
r = double_or_string(card, 23, 'R')
pqr = [p, q, r]
loadset = integer_or_blank(card, 24, 'loadset')
labels = []
uxs = []
i = 25
n = 1
while i < len(card):
label = integer(card, i, 'label%i' % n)
ux = double_or_string(card, i + 1, 'ux%i' % n)
if isinstance(ux, str):
assert ux == 'FREE', 'ux=%r' % ux
#print(' label=%s ux=%s' % (label, ux))
labels.append(label)
uxs.append(ux)
i += 2
n += 1
assert len(card) >= 25, f'len(TRIM card) = {len(card):d}\ncard={card}'
return TRIM_ZONA(sid, mkaeroz, qinf, cg, true_g, nxyz, pqr, loadset,
labels, uxs, comment=comment)
def validate(self):
assert self.true_g in ['TRUE', 'G'], 'true_g=%r' % self.true_g
assert isinstance(self.nxyz[0], float) or self.nxyz[0] in ['FREE', 'NONE'], 'nx=%r' % self.nxyz[0]
assert isinstance(self.nxyz[1], float) or self.nxyz[1] in ['FREE', 'NONE'], 'ny=%r' % self.nxyz[1]
assert isinstance(self.nxyz[2], float) or self.nxyz[2] in ['FREE', 'NONE'], 'nz=%r' % self.nxyz[2]
assert isinstance(self.pqr[0], float) or self.pqr[0] in ['FREE', 'NONE'], 'p=%r' % self.pqr[0]
assert isinstance(self.pqr[1], float) or self.pqr[1] in ['FREE', 'NONE'], 'q=%r' % self.pqr[1]
assert isinstance(self.pqr[2], float) or self.pqr[2] in ['FREE', 'NONE'], 'r=%r' % self.pqr[2]
assert self.q > 0.0, 'q=%s\n%s' % (self.q, str(self))
if len(set(self.labels)) != len(self.labels):
msg = 'not all labels are unique; labels=%s' % str(self.labels)
raise RuntimeError(msg)
if len(self.labels) != len(self.uxs):
msg = 'nlabels=%s != nux=%s; labels=%s uxs=%s' % (
len(self.labels), len(self.uxs), str(self.labels), str(self.uxs))
raise RuntimeError(msg)
def cross_reference(self, model: BDF) -> None:
pass
#self.suport = model.suport
#self.suport1 = model.suport1
#self.aestats = model.aestats
#self.aelinks = model.aelinks
#self.aesurf = model.aesurf
def safe_cross_reference(self, model):
pass
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
pass
def convert_to_nastran(self, model):
mkaeroz_id = self.mkaeroz
mkaeroz = model.zona.mkaeroz[mkaeroz_id]
#print(mkaeroz)
mach = mkaeroz.mach
labels = []
uxs = []
comment = str(self)
for label_id, ux in zip(self.labels, self.uxs):
if ux != 'FREE':
trimvar = model.zona.trimvar[label_id]
label = trimvar.label
assert isinstance(label, str), 'label=%r' % label
comment += str(trimvar)
labels.append(label)
uxs.append(ux)
assert self.q is not None
if self.q == 'NONE':
self.q = 1.
assert isinstance(self.q, float), str(self)
trim = TRIM(self.sid, mach, self.q, labels, uxs,
aeqr=1.0, comment=comment)
trim.validate()
return trim
def raw_fields(self):
"""
Gets the fields in their unmodified form
Returns
-------
fields : list[varies]
the fields that define the card
"""
mach = 1.0
aeqr = 0.0
list_fields = ['TRIM', self.sid, mach, self.q]
nlabels = len(self.labels)
assert nlabels > 0, self.labels
for (i, label, ux) in zip(count(), self.labels, self.uxs):
list_fields += [label, ux]
if i == 1:
list_fields += [aeqr]
if nlabels == 1:
list_fields += [None, None, aeqr]
return list_fields
def repr_fields(self):
# fixes a Nastran bug
#aeqr = set_blank_if_default(self.aeqr, 1.0)
aeqr = 0.
list_fields = self.raw_fields()
list_fields[8] = aeqr
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
return ''
#card = self.repr_fields()
#return self.comment + print_card_8(card)
class TRIMLNK(BaseCard):
"""
Defines a set of coefficient and trim variable identification
number pairs for trim variable linking.
+=========+========+========+========+========+========+========+========+========+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+---------+--------+--------+--------+--------+--------+--------+--------+--------+
| TRIMLNK | IDLINK | SYM | COEFF1 | IDVAR1 | COEFF2 | IDVAR2 | COEFF3 | IDVAR3 |
+---------+--------+--------+--------+--------+--------+--------+--------+--------+
| | COEFF4 | IDVAR4 | etc. | | | | | |
+---------+--------+--------+--------+--------+--------+--------+--------+--------+
"""
type = 'TRIMLNK'
def __init__(self, link_id, sym, coeffs, var_ids, comment=''):
"""
Creates a TRIMLNK card
Parameters
----------
link_id : int
the TRIMLNK id
sym : ???
???
coeffs : ???
???
var_ids : ???
???
comment : str; default=''
a comment for the card
"""
BaseCard.__init__(self)
if comment:
self.comment = comment
self.link_id = link_id
self.sym = sym
self.coeffs = coeffs
self.var_ids = var_ids
assert sym in ['SYM', 'ASYM', 'ANTI'], sym
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a TRIMLNK card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
link_id = integer(card, 1, 'var_id')
sym = string_or_blank(card, 2, 'sym')
nfields = len(card) - 3
assert nfields % 2 == 0, card
icoeff = 1
coeffs = []
var_ids = []
for ifield in range(3, len(card), 2):
coeff = double(card, ifield, 'coeff_%i' % icoeff)
var_id = integer(card, ifield + 1, 'var_%i' % icoeff)
coeffs.append(coeff)
var_ids.append(var_id)
icoeff += 1
assert len(card) >= 5, f'len(TRIMLNK card) = {len(card):d}\ncard={card}'
return TRIMLNK(link_id, sym, coeffs, var_ids, comment=comment)
def cross_reference(self, model: BDF) -> None:
pass
#self.suport = model.suport
#self.suport1 = model.suport1
#self.aestats = model.aestats
#self.aelinks = model.aelinks
#self.aesurf = model.aesurf
def safe_cross_reference(self, model):
pass
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
pass
def convert_to_nastran(self, model):
label = 'LNK_%s' % self.link_id
trimvars = model.zona.trimvar
comment = str(self)
independent_labels = []
for var_id in self.var_ids:
trimvar = trimvars[var_id]
label = trimvar.label
comment += str(trimvar)
independent_labels.append(label)
Cis = self.coeffs
aelink = AELINK(self.link_id, label, independent_labels, Cis, comment=comment)
aelink.validate()
return aelink
def raw_fields(self):
"""
Gets the fields in their unmodified form
Returns
-------
fields : list[varies]
the fields that define the card
"""
list_fields = ['TRIMLNK', self.link_id, self.sym]
for coeff, var in zip(self.coeffs, self.var_ids):
list_fields.append(coeff)
list_fields.append(var)
return list_fields
def repr_fields(self):
return self.raw_fields()
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class TRIMVAR(BaseCard):
"""
Specifies a trim variable for static aeroelastic trim variables.
"""
type = 'TRIMVAR'
def __init__(self, var_id, label, lower, upper,
trimlnk, dmi, sym, initial,
dcd, dcy, dcl, dcr, dcm, dcn, comment=''):
"""
Creates a TRIMVAR card for a static aero (144) analysis.
Parameters
----------
var_id : int
the trim id; referenced by the Case Control TRIM field
comment : str; default=''
a comment for the card
"""
BaseCard.__init__(self)
if comment:
self.comment = comment
self.var_id = var_id
self.label = label
self.lower = lower
self.upper = upper
self.trimlnk = trimlnk
self.dmi = dmi
self.sym = sym
self.initial = initial
self.dcd = dcd
self.dcy = dcy
self.dcl = dcl
self.dcr = dcr
self.dcm = dcm
self.dcn = dcn
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a TRIMVAR card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
var_id = integer(card, 1, 'var_id')
label = string(card, 2, 'label')
lower = double_or_blank(card, 3, 'lower')
upper = double_or_blank(card, 4, 'upper')
trimlnk = integer_or_blank(card, 5, 'TRIMLNK')
dmi = blank(card, 6, 'DMI')
sym = string_or_blank(card, 7, 'sym')
initial = blank(card, 8, 'initial')
dcd = double_or_blank(card, 9, 'DCD')
dcy = double_or_blank(card, 10, 'DCY')
dcl = double_or_blank(card, 11, 'DCL')
dcr = double_or_blank(card, 12, 'DCR')
dcm = double_or_blank(card, 13, 'DCM')
dcn = double_or_blank(card, 14, 'DCN')
return TRIMVAR(var_id, label, lower, upper, trimlnk, dmi, sym,
initial, dcd, dcy, dcl, dcr, dcm,
dcn, comment=comment)
def cross_reference(self, model: BDF) -> None:
pass
#self.suport = model.suport
#self.suport1 = model.suport1
#self.aestats = model.aestats
#self.aelinks = model.aelinks
#self.aesurf = model.aesurf
def safe_cross_reference(self, model):
pass
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
pass
def convert_to_nastran(self, model):
raise NotImplementedError()
#mkaeroz_id = self.mkaeroz
#mkaeroz = model.zona.mkaeroz[mkaeroz_id]
#mach = mkaeroz.mach
#labels = []
#uxs = []
#for label_id, ux in zip(self.labels, self.uxs):
#if ux != 'FREE':
#label = model.zona.trimvar[label_id]
#labels.append(label)
#uxs.append(ux)
#trim = TRIM(self.sid, mach, self.q, labels, uxs,
#aeqr=1.0, comment=str(self))
#trim.validate()
#return trim
def raw_fields(self):
"""
Gets the fields in their unmodified form
Returns
-------
fields : list[varies]
the fields that define the card
"""
list_fields = ['TRIMVAR', self.var_id, self.label, self.lower, self.upper,
self.trimlnk, self.dmi, self.sym, self.initial,
self.dcd, self.dcy, self.dcl, self.dcr, self.dcm, self.dcn]
return list_fields
def repr_fields(self):
return self.raw_fields()
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class FLUTTER_ZONA(Spline):
"""
Defines data needed to perform flutter, ASE, or a transient response analysis.
+---------+-----+--------+------+------+-------+-------+-------------+------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+=====+========+======+======+=======+=======+=============+======+
| FLUTTER | SID | METHOD | DENS | MACH | RFREQ | IMETH | NVALUE/OMAX | EPS |
+---------+-----+--------+------+------+-------+-------+-------------+------+
| FLUTTER | 19 | K | 119 | 219 | 319 | S | 5 | 1.-4 |
+---------+-----+--------+------+------+-------+-------+-------------+------+
+---------+-------+-------+-------+-------+--------+-------+---------+--------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+=======+=======+=======+=======+========+=======+=========+========+
| FLUTTER | SETID | SYM | FIX | NMODE | TABDMP | MLIST | CONMLST | NKSTEP |
+---------+-------+-------+-------+-------+--------+-------+---------+--------+
| FLUTTER | 100 | SYMM3 | 1 | 0 | 30 | 100 | 0 | 50 |
+---------+-------+-------+-------+-------+--------+-------+---------+--------+
"""
type = 'FLUTTER_ZONA'
def __init__(self, sid, sym, fix, nmode, tabdmp, mlist, conmlst, nkstep=25, comment=''):
"""
Creates a FLUTTER card, which is required for a flutter (SOL 145)
analysis.
Parameters
----------
sid : int
Unique set identification number. (Integer > 0)
sym : str
Character string up to 8 characters with no embedded blanks.
The first 4 characters can be either 'SYMM' (or 'SYM'), 'ANTI',
or 'ASYM' that defines the boundary condition of the structural
finite element model as well as the unsteady aerodynamics, where:
- SYMM Symmetric boundary condition.
- ANTI Antisymmetric boundary condition.
- ASYM Asymmetric boundary condition.
The last 4 characters are used to specify the interpolation scheme
for the generalized aerodynamic matrices. They can be either:
- blank for a cubic spline
- L for a linear interpolation.
(such as SYM = 'SYMML', 'ANTIL', or 'ASYML')
- P for a second-order-polynomial interpolation.
(such as SYM = 'SYMMP', 'ANTIP', or 'ASYMP')
- integer for a hybrid cubic spline and linear interpolation scheme.
(such as SYM = 'SYMM1', 'SYMM2', 'ANTI3', etc.)
- (Default = SYMML)
fix : int
Identification number of a FIXHATM, FIXMATM, FIXMACH, or FIXMDEN
bulk data card. (Integer > 0)
nmode : int
Number of structural modes used in the flutter analysis. (Integer >= 0)
tabdmp : int
Identification number of a TABDMP1 bulk data card specifying modal damping as
a function of natural frequency. (Integer ≥ 0)
mlist : int
Identification number of a SET1 or SETADD bulk data card
specifying a list of normal modes to be omitted from the
flutter analysis. (Integer >= 0)
conmlst : int
Identification number of a CONMLST bulk data card specifying
a list of identification numbers of the CONM1 bulk data cards
for mass perturbation. (Integer >= 0) (See Remark 8)
nkstep : int; default=25
Number of reduced frequency steps for the reduced-frequency-sweep
technique of the g-Method flutter solution. (Integer >= 0)
"""
# https://www.zonatech.com/Documentation/ZAERO_9.2_Users_3rd_Ed.pdf
Spline.__init__(self)
if comment:
self.comment = comment
self.sid = sid
self.sym = sym
self.fix = fix
self.nmode = nmode
self.tabdmp = tabdmp
self.mlist = mlist
self.conmlst = conmlst
self.nkstep = nkstep
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a FLUTTER card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
sid = integer(card, 1, 'sid')
sym = string(card, 2, 'sym')
fix = integer(card, 3, 'fix')
nmode = integer(card, 4, 'nmode')
tabdmp = integer(card, 5, 'tabdmp')
mlist = integer(card, 6, 'mlist')
conmlst = integer(card, 7, 'conmlst')
nkstep = integer_or_blank(card, 8, 'nkstep', 25)
assert len(card) <= 9, f'len(FLUTTER card) = {len(card):d}\ncard={card}'
return FLUTTER_ZONA(sid, sym, fix, nmode, tabdmp, mlist, conmlst, nkstep,
comment=comment)
def cross_reference(self, model: BDF) -> None:
return
#msg = ', which is required by SPLINE1 eid=%s' % self.eid
#self.setg_ref = model.Set(self.setg, msg=msg)
#self.setg_ref.cross_reference_set(model, 'Node', msg=msg)
#self.panlst_ref = model.zona.panlsts[self.panlst]
#self.panlst_ref.cross_reference(model)
#self.aero_element_ids = self.panlst_ref.aero_element_ids
def safe_cross_reference(self, model: BDF, xref_errors=None):
return
#msg = ', which is required by SPLINE1 eid=%s' % self.eid
#try:
#self.setg_ref = model.Set(self.setg, msg=msg)
#self.setg_ref.safe_cross_reference(model, 'Node', msg=msg)
#except KeyError:
#model.log.warning('failed to find SETx set_id=%s%s; allowed_sets=%s' % (
#self.setg, msg, np.unique(list(model.sets.keys()))))
#try:
#self.panlst_ref = model.zona.panlsts[self.panlst]
#self.panlst_ref.safe_cross_reference(model, xref_errors)
#self.aero_element_ids = self.panlst_ref.aero_element_ids
#except KeyError:
#pass
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
return
#self.panlst_ref = None
#self.setg_ref = None
def convert_to_nastran(self, model):
raise NotImplementedError()
def raw_fields(self):
raise NotImplementedError('FLUTTER - raw_fields')
#list_fields = ['FLUTTER', self.sid, self.sym, self.fix, self.nmode,
#self.tabdmp, self.mlist, self.conmlst, self.nkstep]
#return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class SPLINE1_ZONA(Spline):
"""
Defines an infinite plate spline method for displacements and loads
transferal between CAERO7 macroelement and structural grid points.
+---------+-------+-------+------+------+------+----+------+-------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+=======+=======+======+======+======+====+======+=======+
| SPLINE1 | EID | CAERO | BOX1 | BOX2 | SETG | DZ | METH | USAGE |
+---------+-------+-------+------+------+------+----+------+-------+
| | NELEM | MELEM | | | | | | |
+---------+-------+-------+------+------+------+----+------+-------+
| SPLINE1 | 3 | 111 | 115 | 122 | 14 | 0. | | |
+---------+-------+-------+------+------+------+----+------+-------+
+---------+------+-------+-------+------+------+----+-----+-------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+======+=======+=======+======+======+====+=====+=======+
| SPLINE1 | EID | MODEL | CP | SETK | SETG | DZ | EPS | |
+---------+------+-------+-------+------+------+----+-----+-------+
| SPLINE1 | 100 | | | 1 | 10 | 0. | | |
+---------+------+-------+-------+------+------+----+-----+-------+
"""
type = 'SPLINE1_ZONA'
def __init__(self, eid, panlst, setg, model=None, cp=None,
dz=None, eps=0.01, comment=''):
"""
Creates a SPLINE1 card, which is useful for control surface
constraints.
Parameters
----------
eid : int
spline id
comment : str; default=''
a comment for the card
"""
# https://www.zonatech.com/Documentation/ZAERO_9.2_Users_3rd_Ed.pdf
Spline.__init__(self)
if comment:
self.comment = comment
self.eid = eid
self.model = model
self.cp = cp
self.panlst = panlst
self.setg = setg
self.dz = dz
self.eps = eps
self.panlst_ref = None
self.setg_ref = None
self.aero_element_ids = []
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a SPLINE1 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
eid = integer(card, 1, 'eid')
model = string_or_blank(card, 2, 'model')
cp = blank(card, 3, 'cp')
panlst = integer(card, 4, 'panlst/setk')
setg = integer(card, 5, 'setg')
dz = blank(card, 6, 'dz')
eps = double_or_blank(card, 6, 'eps', 0.01)
return SPLINE1_ZONA(eid, panlst, setg, model=model, cp=cp, dz=dz, eps=eps,
comment=comment)
def cross_reference(self, model: BDF) -> None:
msg = ', which is required by SPLINE1 eid=%s' % self.eid
self.setg_ref = model.Set(self.setg, msg=msg)
self.setg_ref.cross_reference_set(model, 'Node', msg=msg)
self.panlst_ref = model.zona.panlsts[self.panlst]
self.panlst_ref.cross_reference(model)
self.aero_element_ids = self.panlst_ref.aero_element_ids
def safe_cross_reference(self, model: BDF, xref_errors):
msg = ', which is required by SPLINE1 eid=%s' % self.eid
try:
self.setg_ref = model.Set(self.setg, msg=msg)
self.setg_ref.safe_cross_reference(model, 'Node', msg=msg)
except KeyError:
model.log.warning('failed to find SETx set_id=%s%s; allowed_sets=%s' % (
self.setg, msg, np.unique(list(model.sets.keys()))))
try:
self.panlst_ref = model.zona.panlsts[self.panlst]
self.panlst_ref.safe_cross_reference(model, xref_errors)
self.aero_element_ids = self.panlst_ref.aero_element_ids
except KeyError:
pass
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.panlst_ref = None
self.setg_ref = None
def convert_to_nastran(self, model):
"""
+---------+-------+-------+------+------+------+----+------+-------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+=======+=======+======+======+======+====+======+=======+
| SPLINE1 | EID | CAERO | BOX1 | BOX2 | SETG | DZ | METH | USAGE |
+---------+-------+-------+------+------+------+----+------+-------+
| | NELEM | MELEM | | | | | | |
+---------+-------+-------+------+------+------+----+------+-------+
| SPLINE1 | 3 | 111 | 115 | 122 | 14 | 0. | | |
+---------+-------+-------+------+------+------+----+------+-------+
+---------+------+-------+-------+------+------+----+-----+-------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+======+=======+=======+======+======+====+=====+=======+
| SPLINE1 | EID | MODEL | CP | SETK | SETG | DZ | EPS | |
+---------+------+-------+-------+------+------+----+-----+-------+
| SPLINE1 | 100 | | | 1 | 10 | 0. | | |
+---------+------+-------+-------+------+------+----+-----+-------+
"""
#panlst = 100 # set_aero
#return SPLINE1(self.eid, panlst, self.setg, model=None, cp=self.cp, dz=self.dz,
#eps=0.01, comment=self.comment)
splines = []
if not hasattr(self, '_comment'):
self._comment = ''
comment = '-' * 72 + '\n' #+ self._comment
#self._comment = ''
comment += str(self)
for panel_groups in self.panlst_ref.panel_groups:
eid = model.zona.caero_to_name_map[panel_groups]
caero = model.caeros[eid]
caero_id = eid
box1 = caero.eid
box2 = box1 + caero.npanels - 1
assert caero.npanels > 0, caero
#assert box1 > 0 and box2 > 0, 'box1=%s box2=%s' % (box1, box2)
spline = SPLINE1(eid, caero_id, box1, box2, self.setg, dz=self.dz,
method='IPS', usage='BOTH',
nelements=10, melements=10, comment=comment)
spline.validate()
splines.append(spline)
comment = ''
return splines
def raw_fields(self):
list_fields = ['SPLINE1', self.eid, self.model, self.cp, self.panlst, self.setg,
self.dz, self.eps]
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class SPLINE2_ZONA(Spline):
"""
Defines an infinite plate spline method for displacements and loads
transferal between CAERO7 macroelement and structural grid points.
+---------+------+-------+------+------+----+-----+-------+-------+
| 1 | 2 | 3 | 5 | 6 | 6 | 7 | 8 | 9 |
+=========+======+=======+======+======+====+=====+=======+=======+
| SPLINE2 | EID | MODEL | SETK | SETG | DZ | EPS | CP | CURV |
+---------+------+-------+------+------+----+-----+-------+-------+
| SPLINE2 | 100 | | 1 | 10 | 0. | | | |
+---------+------+-------+------+------+----+-----+-------+-------+
"""
type = 'SPLINE2_ZONA'
def __init__(self, eid, panlst, setg, model=None, dz=None, eps=0.01,
cp=None, curvature=None, comment=''):
"""
Creates a SPLINE1 card, which is useful for control surface
constraints.
Parameters
----------
eid : int
spline id
comment : str; default=''
a comment for the card
"""
# https://www.zonatech.com/Documentation/ZAERO_9.2_Users_3rd_Ed.pdf
Spline.__init__(self)
if comment:
self.comment = comment
self.eid = eid
self.model = model
self.cp = cp
self.panlst = panlst
self.setg = setg
self.dz = dz
self.eps = eps
self.curvature = curvature
self.panlst_ref = None
self.setg_ref = None
self.aero_element_ids = []
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a SPLINE1 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
eid = integer(card, 1, 'eid')
model = string_or_blank(card, 2, 'model')
panlst = integer(card, 3, 'panlst/setk')
setg = integer(card, 4, 'setg')
dz = blank(card, 5, 'dz')
eps = double_or_blank(card, 6, 'eps', 0.01)
cp = integer_or_blank(card, 7, 'cp', 0)
curvature = double_or_blank(card, 8, 'curvature', 1.0)
return SPLINE2_ZONA(eid, panlst, setg, model=model, cp=cp, dz=dz, eps=eps,
curvature=curvature, comment=comment)
def cross_reference(self, model: BDF) -> None:
msg = ', which is required by SPLINE1 eid=%s' % self.eid
self.setg_ref = model.Set(self.setg, msg=msg)
self.setg_ref.cross_reference_set(model, 'Node', msg=msg)
#self.nodes_ref = model.Nodes(self.nodes, msg=msg)
#self.caero_ref = model.CAero(self.caero, msg=msg)
self.panlst_ref = model.zona.panlsts[self.panlst]
self.panlst_ref.cross_reference(model)
self.aero_element_ids = self.panlst_ref.aero_element_ids
def safe_cross_reference(self, model: BDF, xref_errors):
try:
msg = ', which is required by SPLINE1 eid=%s' % self.eid
self.setg_ref = model.Set(self.setg, msg=msg)
self.setg_ref.cross_reference_set(model, 'Node', msg=msg)
except Exception:
pass
#self.nodes_ref = model.Nodes(self.nodes, msg=msg)
#self.caero_ref = model.CAero(self.caero, msg=msg)
self.panlst_ref = model.zona.panlsts[self.panlst]
self.panlst_ref.safe_cross_reference(model, xref_errors)
self.aero_element_ids = self.panlst_ref.aero_element_ids
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.panlst_ref = None
self.setg_ref = None
def raw_fields(self):
list_fields = ['SPLINE2', self.eid, self.model, self.panlst, self.setg,
self.dz, self.eps, self.cp, self.curvature]
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class SPLINE3_ZONA(Spline):
"""
Defines a 3-D spline for the BODY7 and CAERO7 macroelement.
+---------+------+-------+-------+------+------+----+-----+-------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+======+=======+=======+======+======+====+=====+=======+
| SPLINE3 | EID | MODEL | CP | SETK | SETG | DZ | EPS | |
+---------+------+-------+-------+------+------+----+-----+-------+
| SPLINE3 | 100 | | N/A | 1 | 10 | 0. | | |
+---------+------+-------+-------+------+------+----+-----+-------+
"""
type = 'SPLINE3_ZONA'
def __init__(self, eid, panlst, setg, model=None, cp=None,
dz=None, eps=0.01, comment=''):
"""
Creates a SPLINE3 card, which is useful for control surface
constraints.
Parameters
----------
eid : int
spline id
comment : str; default=''
a comment for the card
"""
# https://www.zonatech.com/Documentation/ZAERO_9.2_Users_3rd_Ed.pdf
Spline.__init__(self)
if comment:
self.comment = comment
self.eid = eid
self.model = model
self.cp = cp
self.panlst = panlst
self.setg = setg
self.dz = dz
self.eps = eps
self.panlst_ref = None
self.setg_ref = None
self.aero_element_ids = []
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a SPLINE3 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
eid = integer(card, 1, 'eid')
model = blank(card, 2, 'model')
cp = blank(card, 3, 'cp')
panlst = integer(card, 4, 'panlst/setk')
setg = integer(card, 5, 'setg')
dz = blank(card, 6, 'dz')
eps = double_or_blank(card, 6, 'eps', 0.01)
return SPLINE3_ZONA(eid, panlst, setg, model=model, cp=cp, dz=dz, eps=eps,
comment=comment)
def cross_reference(self, model: BDF) -> None:
msg = ', which is required by SPLINE3 eid=%s' % self.eid
self.setg_ref = model.Set(self.setg, msg=msg)
self.setg_ref.cross_reference_set(model, 'Node', msg=msg)
#self.nodes_ref = model.Nodes(self.nodes, msg=msg)
#self.caero_ref = model.CAero(self.caero, msg=msg)
self.panlst_ref = model.zona.panlsts[self.panlst]
self.panlst_ref.cross_reference(model)
self.aero_element_ids = self.panlst_ref.aero_element_ids
def safe_cross_reference(self, model: BDF, xref_errors):
msg = ', which is required by SPLINE3 eid=%s' % self.eid
try:
self.setg_ref = model.Set(self.setg, msg=msg)
self.setg_ref.cross_reference_set(model, 'Node', msg=msg)
except Exception:
pass
self.panlst_ref = model.zona.panlsts[self.panlst]
self.panlst_ref.safe_cross_reference(model, xref_errors)
self.aero_element_ids = self.panlst_ref.aero_element_ids
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.panlst_ref = None
self.setg_ref = None
def convert_to_nastran(self, model):
return []
def raw_fields(self):
"""
+---------+------+-------+-------+------+----+----+-----+-------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+======+=======+=======+======+====+====+=====+=======+
| SPLINE3 | EID | CAERO | BOXID | COMP | G1 | C1 | A1 | USAGE |
+---------+------+-------+-------+------+----+----+-----+-------+
| | G2 | C2 | A2 | ---- | G3 | C3 | A2 | --- |
+---------+------+-------+-------+------+----+----+-----+-------+
| | G4 | C4 | A4 | etc. | | | | |
+---------+------+-------+-------+------+----+----+-----+-------+
"""
list_fields = ['SPLINE3', self.eid, self.model, self.cp, self.panlst, self.setg,
self.dz, self.eps]
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,626
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/gui/utils/version.py
|
"""
defines:
- version_latest, version_current, is_newer = check_for_newer_version(version_current=None)
"""
import urllib.request
import urllib.error
from typing import List, Tuple, Optional
import pyNastran
def get_data_from_website(target_url: str) -> Tuple[Optional[List[str]], bool]:
"""downloads the byte data from the website"""
is_failed = True
data = ''
try:
# it's a file like object and works just like a file
data_bytes = urllib.request.urlopen(target_url)
is_failed = False
except (urllib.error.HTTPError, urllib.error.URLError): # forbidden, page not found
#print(f'error...target_url={target_url}')
#import os
#print(os.environ.keys())
#raise
pass
#except Exception: # urllib2.URLError # e.g., timeout
#print(help(urllib))
#raise
lines = [] # type: List[str]
if not is_failed:
data = data_bytes.read().decode('utf-8')
lines = data.split('\n') # basically data.readlines()
return lines, is_failed
def split_version(version: str, msg: str) -> Tuple[int, int, int]:
try:
major, minor, rev = version.split('+')[0].split('.')
except ValueError:
print(f'version_{msg} = {version}')
raise
imajor = int(major)
iminor = int(minor)
irev = int(rev)
tuple_version = (imajor, iminor, irev)
return tuple_version
def get_latest_version_from_data(data: str) -> Tuple[Optional[str], List[str]]:
"""finds the latest released version"""
version_latest = None
for line in data: # files are iterable
version_latest = line.lower()
break
if version_latest is None:
return version_latest
return version_latest
def check_for_newer_version(version_current: Optional[str]=None,
version_latest: Optional[str]=None,
quiet: bool=False) -> Tuple[Optional[str], Optional[str], bool]:
"""
Checks to see if a newer version of pyNastran has been released.
Only checks this for the GUI.
Looks in latest.txt for:
0.7.2
and compares that to the current version.
Returns
-------
version_latest : str
the latest version found
'1.3.2'
version_current : str
the current version (may be a dev version)
'1.3.2'
'1.4.0+dev.5378fd363'
is_newer : bool
is there a newer version
"""
is_newer = False
if version_current is None:
version_current = pyNastran.__version__
assert version_current is not None, version_current
is_failed, version_latest = _get_latest_version(version_latest)
if is_failed:
return None, None, is_newer
is_newer = _check_if_version_is_newer(version_latest, version_current, quiet)
return version_latest, version_current, is_newer
def _get_latest_version(version_latest: Optional[str]) -> Tuple[bool, str]:
is_failed = False
if version_latest is None:
target_url = 'https://raw.githubusercontent.com/SteveDoyle2/pyNastran/master/latest.txt'
data, is_failed = get_data_from_website(target_url)
if is_failed:
return is_failed, version_latest
version_latest = get_latest_version_from_data(data) # type: ignore
if version_latest is None:
raise RuntimeError("can't parse website")
return is_failed, version_latest
def _check_if_version_is_newer(version_latest: str, version_current: str,
quiet: bool) -> bool:
"""
Not 100% on this list, but this is the general idea...
Current Release Dev? -> is_newer Result Description
========= ========= ==== ======== ======== =================
(1, 0, 0) (1, 3, 2) True -> True download typical user
(1, 4, 0) (1, 3, 2) True -> False don't download typical dev
(1, 4, 0) (1, 4, 0) False -> True download intermediate gui release
(1, 4, 0) (1, 3, 2) False -> False dont download release candidate
"""
is_newer = False
is_dev = 'dev' in version_current
tuple_current_version = split_version(version_current, 'current')
tuple_latest_version = split_version(version_latest, 'latest')
#print('tuple_latest_version = %s' % str(tuple_latest_version)) # (0,7,2)
#print('tuple_current_version = %s' % str(tuple_current_version)) # (0,8,0)
is_self_newer = tuple_current_version > tuple_latest_version
is_newer_release_version = tuple_current_version < tuple_latest_version
is_newer_dev_version = (not is_dev) and (tuple_current_version <= tuple_latest_version)
#print(f"tuple_current_version={tuple_current_version} tuple_latest_version={tuple_latest_version}")
#print(f"is_dev={is_dev} is_self_newer={is_self_newer} is_newer_release_version={is_newer_release_version} is_newer_dev_version={is_newer_dev_version}")
if is_self_newer:
pass
elif is_newer_release_version or is_newer_dev_version:
msg = 'pyNastran %s is now availible; current=%s' % (version_latest, version_current)
if not quiet: # pragma: no cover
print(msg)
is_newer = True
return is_newer
if __name__ == '__main__': # pragma: no cover
check_for_newer_version()
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,627
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/op2/tables/oes_stressStrain/oes.py
|
#pylint: disable=C0301,W0201,R0911,R0915,R0914,C0103,W0212
"""
Defines the Real/Complex Stresses/Strains created by:
STRESS = ALL
STRAIN = ALL
NX Case Control Block Description
=============== ========== ===========
NLSTRESS OESNLXR Nonlinear static stresses
BOUTPUT OESNLBR Slideline stresses
STRESS OESNLXD Nonlinear Transient Stresses
STRESS OES1C/OSTR1C Ply stresses/strains
STRESS OES1X Element stresses with intermediate (CBAR and CBEAM)
station stresses and stresses on nonlinear elements
STRESS OES/OESVM Element stresses (linear elements only)
STRAIN OSTR1 Element strains
STRESS/STRAIN DOES1/DOSTR1 Scaled Response Spectra
MODCON OSTRMC Modal contributions
"""
from __future__ import annotations
from struct import Struct
from typing import Tuple, Any, TYPE_CHECKING
from numpy import fromstring, frombuffer, radians, sin, cos, vstack, repeat, array
import numpy as np
from pyNastran.op2.op2_interface.op2_codes import SORT1_TABLES_BYTES, TABLES_BYTES
from pyNastran.op2.op2_interface.op2_reader import mapfmt
from pyNastran.op2.op2_interface.utils import apply_mag_phase, build_obj
from pyNastran.op2.op2_helper import polar_to_real_imag
from pyNastran.op2.op2_interface.function_codes import func1, func7
from pyNastran.op2.tables.utils import get_eid_dt_from_eid_device
from pyNastran.op2.tables.oes_stressStrain.real.oes_bars import RealBarStressArray, RealBarStrainArray
from pyNastran.op2.tables.oes_stressStrain.real.oes_bars100 import RealBar10NodesStressArray, RealBar10NodesStrainArray
from pyNastran.op2.tables.oes_stressStrain.real.oes_beams import (RealBeamStressArray, RealBeamStrainArray,
RealNonlinearBeamStressArray)
from pyNastran.op2.tables.oes_stressStrain.real.oes_bush import RealBushStressArray, RealBushStrainArray
from pyNastran.op2.tables.oes_stressStrain.real.oes_bush1d import RealBush1DStressArray
from pyNastran.op2.tables.oes_stressStrain.real.oes_composite_plates import RealCompositePlateStressArray, RealCompositePlateStrainArray
#RealCompositePlateStressStrengthRatioArray, RealCompositePlateStrainStrengthRatioArray = None, None
#RealCompositePlateStrainStrengthRatioArray = None
from pyNastran.op2.tables.oes_stressStrain.real.oes_composite_plates_strength_ratio import RealCompositePlateStressStrengthRatioArray # , RealCompositePlateStrainStrengthRatioArray
from pyNastran.op2.tables.oes_stressStrain.real.oes_gap import NonlinearGapStressArray
from pyNastran.op2.tables.oes_stressStrain.real.oes_plates import RealPlateStressArray, RealPlateStrainArray
from pyNastran.op2.tables.oes_stressStrain.real.oes_plate_strain import RealCPLSTRNPlateStressArray, RealCPLSTRNPlateStrainArray
from pyNastran.op2.tables.oes_stressStrain.real.oes_rods import RealRodStressArray, RealRodStrainArray
from pyNastran.op2.tables.oes_stressStrain.real.oes_shear import RealShearStrainArray, RealShearStressArray
from pyNastran.op2.tables.oes_stressStrain.real.oes_solids import RealSolidStrainArray, RealSolidStressArray
from pyNastran.op2.tables.oes_stressStrain.real.oes_solids_nx import RealSolidStressArrayNx, RealSolidStrainArrayNx
from pyNastran.op2.tables.oes_stressStrain.real.oes_solids_composite_nx import RealSolidCompositeStressArray, RealSolidCompositeStrainArray
from pyNastran.op2.tables.oes_stressStrain.real.oes_springs import (RealSpringStressArray, RealSpringStrainArray,
RealNonlinearSpringStressArray)
from pyNastran.op2.tables.oes_stressStrain.real.oes_triax import RealTriaxStressArray, RealTriaxStrainArray
from pyNastran.op2.tables.oes_stressStrain.real.oes_bend import RealBendStressArray, RealBendStrainArray
from pyNastran.op2.tables.oes_stressStrain.complex.oes_bars import ComplexBarStressArray, ComplexBarStrainArray
from pyNastran.op2.tables.oes_stressStrain.complex.oes_beams import ComplexBeamStressArray, ComplexBeamStrainArray
from pyNastran.op2.tables.oes_stressStrain.complex.oes_bush import (ComplexCBushStressArray, ComplexCBushStrainArray)
from pyNastran.op2.tables.oes_stressStrain.complex.oes_bush1d import ComplexCBush1DStressArray
from pyNastran.op2.tables.oes_stressStrain.complex.oes_plates import (
ComplexPlateStressArray, ComplexPlateStrainArray, ComplexLayeredCompositesArray)
from pyNastran.op2.tables.oes_stressStrain.complex.oes_plates_vm import (
ComplexPlateVMStressArray, ComplexPlateVMStrainArray)
from pyNastran.op2.tables.oes_stressStrain.complex.oes_triax import ComplexTriaxStressArray
from pyNastran.op2.tables.oes_stressStrain.complex.oes_rods import ComplexRodStressArray, ComplexRodStrainArray
from pyNastran.op2.tables.oes_stressStrain.complex.oes_shear import ComplexShearStressArray, ComplexShearStrainArray
from pyNastran.op2.tables.oes_stressStrain.complex.oes_solids import ComplexSolidStressArray, ComplexSolidStrainArray
from pyNastran.op2.tables.oes_stressStrain.complex.oes_springs import ComplexSpringStressArray, ComplexSpringStrainArray
from pyNastran.op2.tables.oes_stressStrain.complex.oes_bend import ComplexBendStressArray, ComplexBendStrainArray
from pyNastran.op2.tables.oes_stressStrain.random.oes_rods import RandomRodStressArray, RandomRodStrainArray
from pyNastran.op2.tables.oes_stressStrain.random.oes_bars import RandomBarStressArray, RandomBarStrainArray
from pyNastran.op2.tables.oes_stressStrain.random.oes_beams import RandomBeamStressArray, RandomBeamStrainArray
from pyNastran.op2.tables.oes_stressStrain.random.oes_bend import RandomBendStressArray, RandomBendStrainArray
from pyNastran.op2.tables.oes_stressStrain.random.oes_plates import RandomPlateStressArray, RandomPlateStrainArray
from pyNastran.op2.tables.oes_stressStrain.random.oes_plates_vm import RandomPlateVMStressArray, RandomPlateVMStrainArray
from pyNastran.op2.tables.oes_stressStrain.random.oes_solids import RandomSolidStressArray, RandomSolidStrainArray
from pyNastran.op2.tables.oes_stressStrain.random.oes_shear import RandomShearStressArray, RandomShearStrainArray
from pyNastran.op2.tables.oes_stressStrain.random.oes_composite_plates import RandomCompositePlateStressArray, RandomCompositePlateStrainArray
from pyNastran.op2.tables.oes_stressStrain.oes_nonlinear_rod import RealNonlinearRodArray
from pyNastran.op2.tables.oes_stressStrain.oes_nonlinear_bush import RealNonlinearBushArray
from pyNastran.op2.tables.oes_stressStrain.oes_hyperelastic import (
HyperelasticQuadArray)
from pyNastran.op2.tables.oes_stressStrain.oes_nonlinear import RealNonlinearPlateArray, RealNonlinearSolidArray
from pyNastran.op2.tables.oes_stressStrain.utils import (
oes_cbar_complex_19,
oes_cbar100_real_10,
oesrt_comp_shell_real_9, oesrt_cquad4_95,
oes_cbar_real_16, oes_cbar_random_10,
oes_cbeam_complex_111, oes_cbeam_random_67, oes_cbeam_real_111,
oes_cbend_real_21,
oes_cbush_complex_13, oes_cbush_real_7,
oes_celas_complex_3, oes_celas_real_2,
oes_cshear_random_3,
oes_comp_shell_real_11,
oes_cquad4_33_complex_15, oes_cquad4_33_random_9,
oes_cquad4_33_complex_vm_17, oes_cquad4_33_random_vm_11, oes_cquad4_random_vm_57,
oes_cquad4_144_complex_77, oes_cquad4_144_random, oes_cquad4_144_real,
oes_cquad4_complex_vm_87, oes_crod_real_5, oes_crod_complex_5, oes_crod_random_3,
oes_csolid_real, oes_csolid_complex, oes_csolid_random, _oes_csolid2_real,
oes_csolid_composite_real,
oes_csolid_linear_hyperelastic_cosine_real, oes_csolid_linear_hyperelastic_real,
oes_ctria3_real_17, oes_ctria3_complex_vm_17, oes_ctria3_random_9,
oes_ctriax6_real_33, oes_ctriax_complex_37,
oes_ctria3_random_vm_11,
oes_quad4_33_real_17,
oes_shell_composite_complex_11, oes_shell_composite_complex_13,
)
if TYPE_CHECKING: # pragma: no cover
from pyNastran.op2.op2 import OP2
NX_TABLES_BYTES = [b'OESVM1', b'OESVM2']
NASA_TABLES_BYTES = [b'OESC1']
class OP2Common2:
def __init__(self, op2: OP2):
self.op2 = op2
@property
def size(self) -> int:
return self.op2.size
@property
def factor(self) -> int:
return self.op2.factor
class OES(OP2Common2):
"""
Defines the OES class that is used to read stress/strain data
"""
def __init__(self, op2: OP2):
super().__init__(op2)
self.ntotal = 0
def _read_oes1_3(self, data, unused_ndata):
"""
reads OES1 subtable 3
"""
op2 = self.op2
op2._analysis_code_fmt = b'i'
op2._data_factor = 1
op2.words = [
'aCode', 'tCode', 'element_type', 'isubcase',
'???', '???', '???', 'load_set'
'format_code', 'num_wide', 's_code', '???',
'???', '???', '???', '???',
'???', '???', '???', '???',
'???', '???', '???', '???',
'???', 'Title', 'subtitle', 'label', '???']
op2.parse_approach_code(data) # 3
## element type
op2.element_type = op2.add_data_parameter(data, 'element_type', b'i', 3, False)
## load set ID
op2.load_set = op2.add_data_parameter(data, 'load_set', b'i', 8, False)
## format code
op2.format_code = op2.add_data_parameter(data, 'format_code', b'i', 9, False)
## number of words per entry in record
## .. note:: is this needed for this table ???
op2.num_wide = op2.add_data_parameter(data, 'num_wide', b'i', 10, False)
## stress/strain codes
op2.s_code = op2.add_data_parameter(data, 's_code', b'i', 11, False)
## thermal flag; 1 for heat ransfer, 0 otherwise
op2.thermal = op2.add_data_parameter(data, 'thermal', b'i', 23, False)
## assuming tCode=1
analysis_code = op2.analysis_code
if analysis_code == 1: # statics / displacement / heat flux
## load set number
op2.lsdvmn = op2.add_data_parameter(data, 'lsdvmn', b'i', 5, False)
op2.data_names = op2.apply_data_code_value('data_names', ['lsdvmn'])
op2.setNullNonlinearFactor()
elif analysis_code == 2: # real eigenvalues
#: mode number
op2.mode = op2.add_data_parameter(data, 'mode', b'i', 5)
#: eigenvalue
op2.eign = op2.add_data_parameter(data, 'eign', b'f', 6, False)
#: mode or cycle TODO confused on the type - F1 means float/int???
op2.mode2 = op2.add_data_parameter(data, 'mode2', b'i', 7, False)
op2.cycle = op2.add_data_parameter(data, 'cycle', b'f', 7, False)
op2.reader_oug.update_mode_cycle('cycle')
op2.data_names = op2.apply_data_code_value('data_names', ['mode', 'eign', 'mode2', 'cycle'])
#elif analysis_code == 3: # differential stiffness
#self.lsdvmn = self.get_values(data,'i',5) ## load set number
#op2.data_code['lsdvmn'] = self.lsdvmn
#elif analysis_code == 4: # differential stiffness
# self.lsdvmn = self.get_values(data,'i',5) ## load set number
elif analysis_code == 5: # frequency
## frequency
op2.freq = op2.add_data_parameter(data, 'freq', b'f', 5)
op2.data_names = op2.apply_data_code_value('data_names', ['freq'])
elif analysis_code == 6: # transient
## time step
op2.dt = op2.add_data_parameter(data, 'dt', b'f', 5)
op2.data_names = op2.apply_data_code_value('data_names', ['dt'])
elif analysis_code == 7: # pre-buckling
## load set
op2.lsdvmn = op2.add_data_parameter(data, 'lsdvmn', b'i', 5)
op2.data_names = op2.apply_data_code_value('data_names', ['lsdvmn'])
elif analysis_code == 8: # post-buckling
## mode number
op2.lsdvmn = op2.add_data_parameter(data, 'lsdvmn', b'i', 5)
op2.eigr = op2.add_data_parameter(data, 'eigr', b'f', 6, False) # real eigenvalue
op2.data_names = op2.apply_data_code_value('data_names', ['lsdvmn', 'eigr'])
elif analysis_code == 9: # complex eigenvalues
## mode number
op2.mode = op2.add_data_parameter(data, 'mode', b'i', 5)
## real eigenvalue
op2.eigr = op2.add_data_parameter(data, 'eigr', b'f', 6, False)
## imaginary eigenvalue
op2.eigi = op2.add_data_parameter(data, 'eigi', b'f', 7, False)
op2.data_names = op2.apply_data_code_value('data_names', ['mode', 'eigr', 'eigi'])
elif analysis_code == 10: # nonlinear statics
## load step
self.lftsfq = op2.add_data_parameter(data, 'lftsfq', b'f', 5)
op2.data_names = op2.apply_data_code_value('data_names', ['lftsfq'])
elif analysis_code == 11: # old geometric nonlinear statics
## load set number
self.lsdvmn = op2.add_data_parameter(data, 'lsdvmn', b'i', 5)
op2.data_names = op2.apply_data_code_value('data_names', ['lsdvmn'])
elif analysis_code == 12: # contran ? (may appear as aCode=6) --> straight from DMAP...grrr...
## Time step ??? --> straight from DMAP
op2.dt = op2.add_data_parameter(data, 'dt', b'f', 5)
op2.data_names = op2.apply_data_code_value('data_names', ['dt'])
else:
msg = 'invalid analysis_code...analysis_code=%s' % op2.analysis_code
raise RuntimeError(msg)
# tcode=2
#if op2.analysis_code==2: # sort2
# self.lsdvmn = self.get_values(data,'i',5)
#print(f'tCode={op2.tCode} -> result_type={result_type}')
#print(op2.code_information())
op2.fix_format_code()
self.get_oes_prefix_postfix()
op2._parse_thermal_code()
op2.reader_oef._set_force_stress_element_name()
if op2.is_debug_file:
op2.binary_debug.write(' element_name = %r\n' % op2.element_name)
op2.binary_debug.write(' approach_code = %r\n' % op2.approach_code)
op2.binary_debug.write(' tCode = %r\n' % op2.tCode)
op2.binary_debug.write(' isubcase = %r\n' % op2.isubcase)
op2._read_title(data)
try:
op2.element_name = op2.element_mapper[op2.element_type]
except KeyError:
op2.log.error(op2.code_information())
raise
assert op2.element_name != '', op2.code_information()
#if op2.element_type not in self.element_mapper:
#return op2._not_implemented_or_skip(data, ndata, op2.code_information())
self._parse_stress_code_to_stress_bits()
op2._write_debug_bits()
assert isinstance(op2.format_code, int), op2.format_code
#print('op2.nonlinear_factor =', op2.nonlinear_factor)
#assert op2.num_wide != 146, op2.code_information()
#self._check_result_type()
#print(op2.code_information())
def _check_result_type(self):
op2 = self.op2
sort_method = func1(op2.tCode)
if sort_method == 1: # SORT1
assert op2.sort_bits.is_sort1 == 1, op2.code_information()
elif sort_method == 2: # SORT2
assert op2.sort_bits.is_sort1 == 0, op2.code_information()
else:
raise NotImplementedError(sort_method)
result_type = op2.result_type # func7(op2.tCode)
if result_type == 0:
assert op2.sort_bits.is_real == 1, op2.code_information()
elif result_type == 1:
assert op2.sort_bits.is_complex == 1, op2.code_information()
elif result_type == 2:
assert op2.sort_bits.is_random == 1, op2.code_information()
else:
raise NotImplementedError(result_type)
def _parse_stress_code_to_stress_bits(self):
"""
s_code = 0 -> stress_bits = [0,0,0,0,0]
s_code = 1 -> stress_bits = [0,0,0,0,1]
s_code = 2 -> stress_bits = [0,0,0,1,0]
s_code = 3 -> stress_bits = [0,0,0,1,1]
etc.
s_code = 32 -> stress_bits = [1,1,1,1,1]
stress_bits[0] = 0 -> is_max_shear=True isVonMises=False
stress_bits[0] = 1 -> is_max_shear=False isVonMises=True
stress_bits[1] = 0 -> is_stress=True is_strain=False
stress_bits[2] = 0 -> isFiberCurvature=True isFiberDistance=False
stress_bits[3] = 0 -> duplicate of Bit[1] (stress/strain)
stress_bits[4] = 0 -> material coordinate system flag
"""
op2 = self.op2
bits = [0, 0, 0, 0, 0]
s_code = op2.s_code
i = 4
while s_code > 0:
value = s_code % 2
s_code = (s_code - value) // 2
bits[i] = value
i -= 1
op2.stress_bits = bits
op2.data_code['stress_bits'] = op2.stress_bits
def _read_oes2_4(self, data: bytes, ndata: int):
"""
Reads the Stress Table 4
"""
op2 = self.op2
if op2.table_name in NX_TABLES_BYTES:
op2.to_nx(f' because table_name={op2.table_name}')
elif op2.table_name in NASA_TABLES_BYTES:
op2.to_nasa(f' because table_name={op2.table_name}')
#assert self.isubtable == -4, self.isubtable
#if op2.is_debug_file:
#op2.binary_debug.write(' element_name = %r\n' % op2.element_name)
#print("element_name =", op2.element_name)
assert isinstance(op2.format_code, int), op2.format_code
assert op2.is_stress is True, op2.code_information()
op2.data_code['is_stress_flag'] = True
op2.data_code['is_strain_flag'] = False
op2._setup_op2_subcase('STRESS/STRAIN')
elements_to_read = [
1, 3, 10, # CROD, CTUBE, CTUBE
11, 12, 13, # CELAS1, CELAS2, CELAS3,
2, 4, 34, 33, 74, # CBEAM, CSHEAR, CBAR, CQUAD4, CTRIA3,
75, 64, 70, 82, 144, # CTRIA6, CQUAD8, CTRIAR, CQUADR, CQUAD4
69, # CBEND
67, 68, 95, 102, # #CHEXA, CPENTA, QUAD4-comp, CBUSH
39, #CTETRA
86, # GAPNL
88, # TRIA3-nonlinear
89, # ROD-nonlinear
90, # QUAD4-nonlinear
91, # PENTANL
93, # HEXANL
97, # CTRIA3-C
96, # QUAD8-nonlinear
98, # TRIA6-nonlinear
100, # CBAR-100
228, # CQUADR
232, # CQUADR-composite
243, # CQUADX4
189, # VUQUAD
190, # VUTRIA
191, # VUBEAM
256, # CPYRAM
227, # CTRIAR
275, # CPLSTS3
]
if op2.element_type in elements_to_read:
n = self._read_oes_4_sort(data, ndata)
else:
msg = op2.code_information()
n = op2._not_implemented_or_skip(data, ndata, msg)
return n
def _read_oes1_4(self, data: bytes, ndata: int):
"""
Reads the Stress Table 4
"""
op2 = self.op2
if op2.table_name in NX_TABLES_BYTES:
op2.to_nx(f' because table_name={op2.table_name}')
#assert self.isubtable == -4, self.isubtable
#if op2.is_debug_file:
#op2.binary_debug.write(' element_name = %r\n' % op2.element_name)
#print "element_name =", op2.element_name
assert isinstance(op2.format_code, int), op2.format_code
assert op2.is_stress is True, op2.code_information()
op2.data_code['is_stress_flag'] = True
op2.data_code['is_strain_flag'] = False
op2._setup_op2_subcase('STRESS/STRAIN')
n = self._read_oes_4_sort(data, ndata)
return n
def _read_oes2_3(self, data, unused_ndata):
"""
reads OES1 subtable 3
"""
op2 = self.op2
op2._data_factor = 1
op2.words = [
'aCode', 'tCode', 'element_type', 'isubcase',
'???', '???', '???', 'load_set'
'format_code', 'num_wide', 's_code', '???',
'???', '???', '???', '???',
'???', '???', '???', '???',
'???', '???', '???', '???',
'???', 'Title', 'subtitle', 'label', '???']
op2.parse_approach_code(data) # 3
op2.sort_method = 2
## element type
op2.element_type = op2.add_data_parameter(data, 'element_type', b'i', 3, False)
## load set ID
op2.load_set = op2.add_data_parameter(data, 'load_set', b'i', 8, False)
## format code
op2.format_code = op2.add_data_parameter(data, 'format_code', b'i', 9, False)
## number of words per entry in record
## .. note:: is this needed for this table ???
op2.num_wide = op2.add_data_parameter(data, 'num_wide', b'i', 10, False)
## stress/strain codes
op2.s_code = op2.add_data_parameter(data, 's_code', b'i', 11, False)
## thermal flag; 1 for heat ransfer, 0 otherwise
op2.thermal = op2.add_data_parameter(data, 'thermal', b'i', 23, False)
op2.element_id = op2.add_data_parameter(data, 'element_id', b'i', 5, fix_device_code=True)
op2._element_id = op2.add_data_parameter(data, '_element_id', b'f', 5, apply_nonlinear_factor=False, add_to_dict=True)
if op2.analysis_code == 1: # static...because reasons.
op2._analysis_code_fmt = b'i'
op2.data_names = op2.apply_data_code_value('data_names', ['element_id'])
elif op2.analysis_code == 2: # real eigenvalues
op2._analysis_code_fmt = b'i'
op2.eign = op2.add_data_parameter(data, 'eign', b'f', 6, False)
op2.mode_cycle = op2.add_data_parameter(data, 'mode_cycle', b'i', 7, False) # mode or cycle .. todo:: confused on the type - F1???
op2.data_names = op2.apply_data_code_value('data_names', ['element_id', 'eign', 'mode_cycle'])
elif op2.analysis_code == 5: # frequency
op2._analysis_code_fmt = b'f'
op2.data_names = op2.apply_data_code_value('data_names', ['element_id'])
op2.apply_data_code_value('analysis_method', 'freq')
elif op2.analysis_code == 6: # transient
op2._analysis_code_fmt = b'f'
op2.data_names = op2.apply_data_code_value('data_names', ['element_id'])
op2.apply_data_code_value('analysis_method', 'dt')
elif op2.analysis_code == 7: # pre-buckling
op2._analysis_code_fmt = b'i'
op2.data_names = op2.apply_data_code_value('data_names', ['element_id'])
op2.apply_data_code_value('analysis_method', 'lsdvmn')
elif op2.analysis_code == 8: # post-buckling
op2._analysis_code_fmt = b'f'
op2.eigr = op2.add_data_parameter(data, 'eigr', b'f', 6, False)
op2.data_names = op2.apply_data_code_value('data_names', ['element_id', 'eigr'])
op2.apply_data_code_value('analysis_method', 'eigr')
elif op2.analysis_code == 9: # complex eigenvalues
# mode number
op2._analysis_code_fmt = b'i'
op2.eigr = op2.add_data_parameter(data, 'eigr', b'f', 6, False)
op2.eigi = op2.add_data_parameter(data, 'eigi', b'f', 7, False)
op2.data_names = op2.apply_data_code_value('data_names', ['element_id', 'eigr', 'eigi'])
op2.apply_data_code_value('analysis_method', 'mode')
elif op2.analysis_code == 10: # nonlinear statics
# load step
op2._analysis_code_fmt = b'f'
op2.data_names = op2.apply_data_code_value('data_names', ['element_id'])
op2.apply_data_code_value('analysis_method', 'lftsfq')
elif op2.analysis_code == 11: # old geometric nonlinear statics
# load set number
op2.data_names = op2.apply_data_code_value('data_names', ['element_id'])
elif op2.analysis_code == 12: # contran ? (may appear as aCode=6) --> straight from DMAP...grrr...
op2.data_names = op2.apply_data_code_value('data_names', ['element_id'])
else:
msg = 'invalid analysis_code...analysis_code=%s' % op2.analysis_code
raise RuntimeError(msg)
op2.fix_format_code()
self._parse_stress_code_to_stress_bits()
self._fix_oes_sort2(data)
op2.reader_oef._set_force_stress_element_name()
#assert isinstance(op2.nonlinear_factor, int), op2.nonlinear_factor
#def get_format_code(is_sort2, is_complex, is_random):
#format_code = 0
#if is_complex:
#format_code += 1
#if is_sort2:
#format_code += 2
#if is_random:
#format_code += 4
#if format_code > 5:
#format_code = 5
##print('is_complex =', is_complex)
##print('is_sort2 =', is_sort2)
##print('is_random =', is_random)
##print('format_code =', format_code)
#return format_code
#is_sort2 = True
#is_complex = False
#is_random = True
#format_code = get_format_code(is_sort2, is_complex, is_random)
#op2.format_code = format_code
#op2.data_code['format_code'] = format_code
#self._check_result_type()
def _fix_oes_sort2(self, data):
op2 = self.op2
op2.fix_format_code()
#print('op2.format_code_original =', op2.format_code_original)
#print('op2.format_code =', op2.format_code)
#op2.fix_format_code()
#if op2.format_code == 1:
#op2.format_code = 2
#op2.data_code['format_code'] = 2
#assert op2.format_code in [2, 3], op2.code_information()
if 1:
op2.fix_format_code()
#if op2.num_wide == 8:
#op2.format_code = 1
#op2.data_code['format_code'] = 1
#else:
##op2.fix_format_code()
#if op2.format_code == 1:
#op2.format_code = 2
#op2.data_code['format_code'] = 2
#assert op2.format_code in [2, 3], op2.code_information()
op2._parse_thermal_code()
if op2.is_debug_file:
op2.binary_debug.write(' %-14s = %r %s\n' % ('approach_code', op2.approach_code,
op2.approach_code_str(op2.approach_code)))
op2.binary_debug.write(' %-14s = %r\n' % ('tCode', op2.tCode))
op2.binary_debug.write(' %-14s = %r\n' % ('isubcase', op2.isubcase))
op2._read_title(data)
op2._write_debug_bits()
#assert isinstance(op2.nonlinear_factor, int), op2.nonlinear_factor
def _read_ostr1_4(self, data: bytes, ndata: int):
"""
Reads the Strain Table 4
"""
op2 = self.op2
if op2.table_name in NX_TABLES_BYTES:
op2.to_nx(f' because table_name={op2.table_name} was found')
#assert self.isubtable == -4, self.isubtable
#if op2.is_debug_file:
#op2.binary_debug.write(' element_name = %r\n' % op2.element_name)
#print "element_name =", op2.element_name
assert op2.is_strain is True, op2.code_information()
op2.data_code['is_stress_flag'] = False
op2.data_code['is_strain_flag'] = True
op2._setup_op2_subcase('STRESS/STRAIN')
n = self._read_ostr_4_sort(data, ndata)
return n
def _read_ostr2_4(self, data: bytes, ndata: int):
"""
Reads the Strain Table 4
"""
op2 = self.op2
if op2.table_name in NX_TABLES_BYTES:
op2.to_nx(f' because table_name={op2.table_name} was found')
#assert self.isubtable == -4, self.isubtable
#if op2.is_debug_file:
#op2.binary_debug.write(' element_name = %r\n' % op2.element_name)
#print("element_name =", op2.element_name)
assert op2.is_strain is True, op2.code_information()
op2.data_code['is_stress_flag'] = False
op2.data_code['is_strain_flag'] = True
op2._setup_op2_subcase('STRESS/STRAIN')
if op2.element_type in [1, 3, 10, # CROD, CTUBE, CTUBE
11, 12, 13, # CELAS1, CELAS2, CELAS3,
2, 4, 34, 33, 74, # CBEAM, CSHEAR, CBAR, CQUAD4, CTRIA3,
75, 64, 70, 82, 144, # CTRIA6, CQUAD8, CTRIAR, CQUADR, CQUAD4
69, # CBEND
67, 68, 95, 102,#CHEXA, CPENTA, QUAD4-comp, CBUSH
96, # QUAD8-nonlinear
98, # TRIA6-nonlinear
39, #CTETRA
228, #CQUADR
232, #CQUADR-composite
233, #CTRIAR-composite
97]: # CTRIA3-C
n = self._read_ostr_4_sort(data, ndata)
else:
msg = op2.code_information()
n = op2._not_implemented_or_skip(data, ndata, msg)
return n
#def _autojit3(func):
#"""
#Debugging function to print the object name and an needed parameters
#"""
#def new_func(self, data):
#"""
#The actual function exec'd by the decorated function.
#"""
#n = func(self, data)
#return n
#return new_func
def _print_obj_name_on_crash(func):
"""Debugging function to print the object name and an needed parameters"""
def new_func(self, data):
"""The actual function exec'd by the decorated function."""
try:
n = func(self, data)
except NameError:
raise
except AttributeError:
raise
#except Exception:
#raise
#print("----------")
#print(op2.obj)
#print(op2.data_code)
#if op2.obj is not None:
##from pyNastran.utils import object_attributes
##print object_attributes(op2.obj)
#print(op2.obj.data_code)
#print("----------")
#raise
return n
return new_func
#@_print_obj_name_on_crash
def _read_oes_4_sort(self, data: bytes, ndata: int):
"""Reads OES1 subtable 4 for NX/MSC/Autodesk/Optistruct"""
op2 = self.op2
#if op2.num_wide == 146:
#assert op2.num_wide != 146
#assert ndata != 146, op2.code_information()
assert isinstance(op2.format_code, int), op2.format_code
if op2.thermal == 0:
n = self._read_oes1_loads(data, ndata)
elif op2.thermal == 1:
n = self._read_oes1_thermal(data, ndata)
else:
msg = 'thermal=%s' % op2.thermal
n = op2._not_implemented_or_skip(data, ndata, msg)
return n
#@_print_obj_name_on_crash
def _read_ostr_4_sort(self, data: bytes, ndata: int):
"""
Reads OSTR1 subtable 4
"""
op2 = self.op2
#if op2.num_wide == 146:
#assert op2.num_wide != 146
#assert ndata != 146, op2.code_information()
if op2.thermal == 0:
n = self._read_oes1_loads(data, ndata)
elif op2.thermal == 1:
n = self._read_oes1_thermal(data, ndata)
else:
msg = 'thermal=%s' % op2.thermal
n = op2._not_implemented_or_skip(data, ndata, msg)
return n
def _read_oes1_thermal(self, unused_data: bytes, ndata: int) -> int:
"""
Reads OES op2.thermal=1 tables; uses a hackish method to just skip the table
"""
return ndata
def _read_ostr1_thermal(self, unused_data: bytes, ndata: int) -> int:
"""
Reads OSTR op2.thermal=1 tables; uses a hackish method to just skip the table
"""
return ndata
def get_stress_mapper(self):
stress_mapper = {
# element_type, format_code, num_wide
# rods
(1, 1, 5, b'OES1') : ('crod_stress', RealRodStressArray), # real
(1, 1, 5, b'OES1X') : ('crod_stress', RealRodStressArray), # real
(1, 1, 5, b'OES1X1') : ('crod_stress', RealRodStressArray), # real
(1, 2, 5, b'OES1X') : ('crod_stress', ComplexRodStressArray), # real/imag
(1, 3, 5, b'OES1X') : ('crod_stress', ComplexRodStressArray), # mag/phase
(1, 2, 5, b'OESVM1') : ('crod_stress', ComplexRodStressArray), # real/imag
(1, 3, 5, b'OESVM1') : ('crod_stress', ComplexRodStressArray), # mag/phase
(3, 1, 5, b'OES1X1') : ('ctube_stress', RealRodStressArray),
(3, 1, 5, b'OES1X') : ('ctube_stress', RealRodStressArray),
(3, 2, 5, b'OES1X') : ('ctube_stress', ComplexRodStressArray),
(3, 2, 5, b'OESVM1') : ('ctube_stress', ComplexRodStressArray), # freq nx
(3, 3, 5, b'OESVM1') : ('ctube_stress', ComplexRodStressArray), # freq nx
#(3, 3, 5) : ('ctube_stress', ComplexRodStressArray),
(10, 1, 5, b'OES1') : ('conrod_stress', RealRodStressArray),
(10, 1, 5, b'OES1X') : ('conrod_stress', RealRodStressArray),
(10, 2, 5, b'OES1X') : ('conrod_stress', ComplexRodStressArray),
(10, 1, 5, b'OES1X1') : ('conrod_stress', RealRodStressArray),
(10, 2, 5, b'OESVM1') : ('conrod_stress', ComplexRodStressArray),
(10, 3, 5, b'OESVM1') : ('conrod_stress', ComplexRodStressArray),
#(10, 2, 5) : ('conrod_stress', ComplexRodStressArray),
#(10, 3, 5) : ('conrod_stress', ComplexRodStressArray),
# beams
(2, 1, 111, b'OES1X1') : ('cbeam_stress', RealBeamStressArray),
(2, 1, 111, b'OES1X') : ('cbeam_stress', RealBeamStressArray),
(2, 1, 111, b'OES1') : ('cbeam_stress', RealBeamStressArray),
(2, 2, 111, b'OES1X') : ('cbeam_stress', ComplexBeamStressArray),
(2, 3, 111, b'OES1X') : ('cbeam_stress', ComplexBeamStressArray),
(2, 3, 111, b'OESVM1') : ('cbeam_stress', ComplexBeamStressArray),
(4, 1, 4, b'OES1X1') : ('cshear_stress', RealShearStressArray),
#(4, 2, 5) : ('cshear_stress', ComplexShearStressArray),
#(4, 3, 5) : ('cshear_stress', ComplexShearStressArray),
(4, 2, 5, b'OES1X') : ('cshear_stress', ComplexShearStressArray),
(4, 2, 5, b'OESVM1') : ('cshear_stress', ComplexShearStressArray),
(4, 3, 5, b'OESVM1') : ('cshear_stress', ComplexShearStressArray),
#(4, 3, 3) : ('cshear_stress', RandomShearStressArray),
(11, 1, 2, b'OES1X1') : ('celas1_stress', RealSpringStressArray), # real
(11, 2, 3, b'OES1X') : ('celas1_stress', ComplexSpringStressArray), # real/imag
(11, 3, 3, b'OES1X') : ('celas1_stress', ComplexSpringStressArray), # mag/phase
(11, 2, 3, b'OESVM1') : ('celas1_stress', ComplexSpringStressArray), # mag/phase
(11, 3, 3, b'OESVM1') : ('celas1_stress', ComplexSpringStressArray), # mag/phase
(12, 1, 2, b'OES1X1') : ('celas2_stress', RealSpringStressArray),
(12, 1, 2, b'OES1X') : ('celas2_stress', RealSpringStressArray),
(12, 1, 2, b'OES1') : ('celas2_stress', RealSpringStressArray),
(12, 2, 3, b'OES1X') : ('celas2_stress', ComplexSpringStressArray),
(12, 3, 3, b'OES1X') : ('celas2_stress', ComplexSpringStressArray),
(12, 2, 3, b'OESVM1') : ('celas2_stress', ComplexSpringStressArray),
(12, 3, 3, b'OESVM1') : ('celas2_stress', ComplexSpringStressArray),
(13, 1, 2, b'OES1X1') : ('celas3_stress', RealSpringStressArray),
#(13, 2, 3) : ('celas3_stress', ComplexSpringStressArray),
#(13, 3, 3) : ('celas3_stress', ComplexSpringStressArray),
(13, 2, 3, b'OES1X') : ('celas3_stress', ComplexSpringStressArray),
(13, 2, 3, b'OESVM1') : ('celas3_stress', ComplexSpringStressArray),
(13, 3, 3, b'OESVM1') : ('celas3_stress', ComplexSpringStressArray),
(14, 1, 2) : ('celas4_stress', RealSpringStressArray),
(14, 2, 3) : ('celas4_stress', ComplexSpringStressArray),
(14, 3, 3) : ('celas4_stress', ComplexSpringStressArray),
(34, 1, 16, b'OES1X1') : ('cbar_stress', RealBarStressArray),
(34, 1, 16, b'OES1X') : ('cbar_stress', RealBarStressArray),
(34, 1, 16, b'OES1') : ('cbar_stress', RealBarStressArray),
(34, 2, 19, b'OES1X') : ('cbar_stress', ComplexBarStressArray),
(34, 1, 10, b'OESNO1') : ('cbar_stress', ComplexBarStressArray),
(34, 2, 10, b'OESXRMS1') : ('cbar_stress', ComplexBarStressArray),
(34, 1, 10, b'OESRMS2') : ('cbar_stress', RandomBarStressArray),
(34, 2, 10, b'OESPSD2') : ('cbar_stress', RandomBarStressArray),
(34, 2, 10, b'OESRMS2') : ('cbar_stress', RandomBarStressArray),
(34, 2, 10, b'OESNO2') : ('cbar_stress', RandomBarStressArray),
(34, 2, 10, b'OESATO2') : ('cbar_stress', RandomBarStressArray),
(34, 2, 10, b'OESCRM2') : ('cbar_stress', RandomBarStressArray),
# Missing stress_mapper key for OES1 table #501
# see cbarao_random_x_mini.op2 for an example with OES1 and OES1X...
# it looks to be an error in MSC [2008-2012)
(34, 2, 19, b'OES1') : ('cbar_stress', ComplexBarStressArray),
(34, 3, 19, b'OES1X') : ('cbar_stress', ComplexBarStressArray),
(34, 3, 19, b'OESVM1') : ('cbar_stress', ComplexBarStressArray),
#(34, 1, 19) : ('cbar_stress', RandomBarStressArray),
(100, 1, 10, b'OES1X1') : ('cbar_stress_10nodes', RealBar10NodesStressArray),
(100, 1, 10, b'OES1X') : ('cbar_stress_10nodes', RealBar10NodesStressArray),
# solids
(39, 1, 109, b'OES1X1') : ('ctetra_stress', RealSolidStressArray), # real
(39, 1, 109, b'OES1X') : ('ctetra_stress', RealSolidStressArray), # real
(39, 1, 109, b'OES1') : ('ctetra_stress', RealSolidStressArray), # real
(39, 3, 74, b'OESVM1') : ('ctetra_stress', ComplexSolidStressArray), # mag/phase
(67, 1, 193, b'OES1X1') : ('chexa_stress', RealSolidStressArray),
(67, 1, 193, b'OES1X') : ('chexa_stress', RealSolidStressArray),
(67, 1, 193, b'OES1') : ('chexa_stress', RealSolidStressArray),
(67, 1, 193, b'RASCONS') : ('chexa_stress', RealSolidStressArray),
(68, 1, 151, b'OES1X1') : ('cpenta_stress', RealSolidStressArray),
(68, 1, 151, b'OES1X') : ('cpenta_stress', RealSolidStressArray),
(68, 1, 151, b'OES1') : ('cpenta_stress', RealSolidStressArray),
(68, 3, 102, b'OESVM1') : ('cpenta_stress', ComplexSolidStressArray),
(39, 2, 69, b'OES1X') : ('ctetra_stress', ComplexSolidStressArray), # real/imag
(39, 2, 69, b'OES1') : ('ctetra_stress', ComplexSolidStressArray),
(39, 2, 74, b'OESVM1') : ('ctetra_stress', 'NA'), # real/imag
#(39, 3, 69) : ('ctetra_stress', ComplexSolidStressArray), # mag/phase
(67, 2, 121, b'OES1X') : ('chexa_stress', ComplexSolidStressArray),
(67, 3, 121, b'OES1X') : ('chexa_stress', ComplexSolidStressArray),
(67, 3, 130, b'OESVM1') : ('chexa_stress', ComplexSolidStressArray),
(67, 2, 121, b'OES1') : ('chexa_stress', ComplexSolidStressArray),
(67, 3, 121, b'OES1') : ('chexa_stress', ComplexSolidStressArray),
(68, 2, 95, b'OES1X') : ('cpenta_stress', ComplexSolidStressArray),
(68, 3, 95, b'OES1X') : ('cpenta_stress', ComplexSolidStressArray),
(68, 2, 95, b'OES1') : ('cpenta_stress', ComplexSolidStressArray),
(33, 1, 17, b'OES1X1') : ('cquad4_stress', RealPlateStressArray),
(33, 1, 17, b'OES1X') : ('cquad4_stress', RealPlateStressArray),
(33, 1, 17, b'OES1') : ('cquad4_stress', RealPlateStressArray),
(33, 2, 15, b'OES1X') : ('cquad4_stress', ComplexPlateStressArray),
(33, 3, 15, b'OES1X') : ('cquad4_stress', ComplexPlateStressArray),
#(33, 3, 0) : ('cquad4_stress', RandomPlateStressArray),
(33, 1, 9, b'OESNO1') : ('cquad4_stress', ComplexPlateStressArray),
(33, 2, 11, b'OESXRMS1') : ('cquad4_stress', ComplexPlateStressArray),
(33, 2, 9, b'OESATO2') : ('cquad4_stress', 'NA'),
(33, 2, 9, b'OESCRM2') : ('cquad4_stress', 'NA'),
(33, 2, 9, b'OESPSD2') : ('cquad4_stress', 'NA'),
(33, 2, 9, b'OESNO2') : ('cquad4_stress', 'NA'),
(33, 1, 9, b'OESRMS2') : ('cquad4_stress', 'NA'),
(33, 2, 9, b'OESRMS2') : ('cquad4_stress', 'NA'),
(74, 1, 17, b'OES1X1') : ('ctria3_stress', RealPlateStrainArray),
(74, 1, 17, b'OES1X') : ('ctria3_stress', RealPlateStrainArray),
(74, 1, 17, b'OES1') : ('ctria3_stress', RealPlateStrainArray),
(74, 2, 15, b'OES1X') : ('ctria3_stress', ComplexPlateStrainArray),
(74, 3, 15, b'OES1X') : ('ctria3_stress', ComplexPlateStrainArray),
(74, 2, 11, b'OESXRMS1') : ('ctria3_stress', ComplexPlateStrainArray),
(74, 1, 9, b'OESNO1') : ('ctria3_stress', ComplexPlateStrainArray),
(74, 2, 17, b'OESVM1') : ('ctria3_stress', 'NA'),
(74, 3, 17, b'OESVM1') : ('ctria3_stress', 'NA'),
(74, 1, 9, b'OESRMS2') : ('ctria3_stress', 'NA'),
#(74, 1, 9) : ('ctria3_stress', RandomPlateStressArray),
(82, 1, 87, b'OES1X1') : ('cquadr_stress', RealPlateStressArray),
(82, 1, 87, b'OES1X') : ('cquadr_stress', RealPlateStressArray),
(82, 2, 77, b'OES1X') : ('cquadr_stress', ComplexPlateStressArray),
(82, 3, 77, b'OES1X') : ('cquadr_stress', ComplexPlateStressArray),
(64, 1, 87, b'OES1X1') : ('cquad8_stress', RealPlateStressArray), # real
(64, 1, 87, b'OES1X') : ('cquad8_stress', RealPlateStressArray),
(64, 1, 87, b'OES1') : ('cquad8_stress', RealPlateStressArray),
(64, 2, 77, b'OES1') : ('cquad8_stress', ComplexPlateStressArray), # real/imag
(64, 3, 77, b'OES1') : ('cquad8_stress', ComplexPlateStressArray), # mag/phase
(64, 2, 77, b'OES1X') : ('cquad8_stress', ComplexPlateStressArray), # real/imag
(64, 3, 77, b'OES1X') : ('cquad8_stress', ComplexPlateStressArray), # mag/phase
(64, 2, 87, b'OESVM1') : ('cquad8_stress', ComplexPlateStressArray), # real/imag
(64, 3, 87, b'OESVM1') : ('cquad8_stress', ComplexPlateStressArray), # mag/phase
(70, 1, 70, b'OES1X1') : ('ctriar_stress', RealPlateStressArray),
(70, 1, 70, b'OES1X') : ('ctriar_stress', RealPlateStressArray),
(70, 2, 62, b'OES1X') : ('ctriar_stress', ComplexPlateStressArray),
(70, 3, 62, b'OES1X') : ('ctriar_stress', ComplexPlateStressArray),
(75, 1, 70, b'OES1X1') : ('ctria6_stress', RealPlateStressArray),
(75, 2, 62, b'OES1X') : ('ctria6_stress', ComplexPlateStressArray),
(75, 3, 62, b'OES1X') : ('ctria6_stress', ComplexPlateStressArray),
(75, 2, 70, b'OESVM1') : ('ctria6_stress', ComplexPlateStressArray),
(75, 3, 70, b'OESVM1') : ('ctria6_stress', ComplexPlateStressArray),
(144, 1, 87, b'OES1X1') : ('cquad4_stress', RealPlateStressArray),
(144, 1, 87, b'OES1') : ('cquad4_stress', RealPlateStressArray),
(144, 1, 87, b'RASCONS') : ('cquad4_stress', RealPlateStressArray),
(144, 2, 77, b'OES1X') : ('cquad4_stress', ComplexPlateStressArray),
(144, 3, 77, b'OES1X') : ('cquad4_stress', ComplexPlateStressArray),
(144, 3, 87, b'OESVM1') : ('cquad4_stress', ComplexPlateStressArray),
#(144, 3, 77) : ('cquad4_stress', ComplexPlateStressArray),
#(64, 1, 47) : ('cquad8_stress', RandomPlateStressArray), # random
#(70, 1, 39) : ('ctriar_stress', RandomPlateStressArray),
#(75, 1, 39) : ('ctria6_stress', RandomPlateStressArray),
#(82, 1, 47) : ('cquadr_stress', RandomPlateStressArray),
#(144, 1, 47) : ('cquad4_stress', RandomPlateStressArray),
(88, 1, 13, b'OESNLXR') : ('nonlinear_ctria3_stress', RealNonlinearPlateArray), # real
(88, 1, 25, b'OESNL1X') : ('nonlinear_ctria3_stress', RealNonlinearPlateArray), # real?
(88, 1, 25, b'OESNLXR') : ('nonlinear_ctria3_stress', RealNonlinearPlateArray), # real?
(90, 1, 13, b'OESNLXR') : ('nonlinear_cquad4_stress', RealNonlinearPlateArray),
(90, 1, 25, b'OESNL1X') : ('nonlinear_cquad4_stress', RealNonlinearPlateArray),
(90, 1, 25, b'OESNLXR') : ('nonlinear_cquad4_stress', RealNonlinearPlateArray),
(90, 1, 25, b'OESNLXD') : ('nonlinear_cquad4_stress', RealNonlinearPlateArray),
(95, 1, 11, b'OES1C') : ('cquad4_composite_stress', RealCompositePlateStressArray), # real
(95, 1, 11, b'OESCP') : ('cquad4_composite_stress', RealCompositePlateStressArray), # real
(95, 1, 9, b'OESRT') : ('cquad4_composite_stress', 'RandomCompositePlateStressArray'), # real
(95, 2, 11, b'OESCP') : ('cquad4_composite_stress', RealCompositePlateStressArray), # real?
(95, 2, 11, b'OESRT') : ('cquad4_composite_stress', RealCompositePlateStressArray), # real?
#(95, 2, 9) : ('cquad4_composite_stress', ComplexCompositePlateStressArray), # real/imag
#(95, 3, 9) : ('cquad4_composite_stress', ComplexCompositePlateStressArray), # mag/phase
#(96, 1, 9) : ('cquad8_composite_stress', 'RandomCompositePlateStressArray'),
(96, 1, 11, b'OES1C') : ('cquad8_composite_stress', RealCompositePlateStressArray),
#(96, 1, 11) : ('cquad8_composite_stress', RealCompositePlateStressArray),
#(96, 2, 9) : ('cquad8_composite_stress', ComplexCompositePlateStressArray),
#(96, 3, 9) : ('cquad8_composite_stress', ComplexCompositePlateStressArray),
(97, 1, 9, b'OESRT') : ('ctria3_composite_stress', 'RandomCompositePlateStressArray'),
(97, 1, 11, b'OES1C') : ('ctria3_composite_stress', RealCompositePlateStressArray),
(97, 1, 11, b'OESCP') : ('ctria3_composite_stress', RealCompositePlateStressArray),
(97, 2, 11, b'OESCP') : ('ctria3_composite_stress', RealCompositePlateStressArray),
#(97, 2, 9) : ('ctria3_composite_stress', ComplexCompositePlateStressArray),
#(97, 3, 9) : ('ctria3_composite_stress', ComplexCompositePlateStressArray),
(98, 1, 9, b'OESRT') : ('ctria6_composite_stress', 'RandomCompositePlateStressArray'),
(98, 1, 11, b'OES1C') : ('ctria6_composite_stress', RealCompositePlateStressArray),
#(98, 1, 11) : ('ctria6_composite_stress', RealCompositePlateStressArray),
#(98, 2, 9) : ('ctria6_composite_stress', ComplexCompositePlateStressArray),
#(98, 3, 9) : ('ctria6_composite_stress', ComplexCompositePlateStressArray),
(53, 1, 33, b'OES1X1') : ('ctriax_stress', RealTriaxStressArray),
(53, 1, 33, b'OES1X') : ('ctriax_stress', RealTriaxStressArray),
(53, 2, 37, b'OES1X') : ('ctriax_stress', ComplexTriaxStressArray),
#(53, 3, 37) : ('ctriax_stress', ComplexTriaxStressArray),
(102, 1, 7, b'OES1X1') : ('cbush_stress', RealBushStressArray),
(102, 1, 7, b'OES1X') : ('cbush_stress', RealBushStressArray),
(102, 1, 7, b'OES1') : ('cbush_stress', RealBushStressArray),
(102, 2, 13, b'OES1X') : ('cbush_stress', ComplexCBushStressArray),
(102, 3, 13, b'OES1X') : ('cbush_stress', ComplexCBushStressArray),
(102, 2, 13, b'OESVM1') : ('cbush_stress', 'NA'),
(102, 2, 13, b'OES1'): ('cbush_stress', ComplexCBushStressArray),
(40, 1, 8, b'OES1X1') : ('cbush1d_stress_strain', RealBushStressArray),
(40, 1, 8, b'OESNLXD') : ('cbush1d_stress_strain', RealBushStressArray),
#(40, 2, 9) : ('cbush1d_stress_strain', ComplexCBushStressArray),
#(40, 3, 9) : ('cbush1d_stress_strain', ComplexCBushStressArray),
(87, 1, 7, b'OESNL1X') : ('nonlinear_ctube_stress', RealNonlinearRodArray),
(87, 1, 7, b'OESNLXR') : ('nonlinear_ctube_stress', RealNonlinearRodArray),
(89, 1, 7, b'OESNL1X') : ('nonlinear_crod_stress', RealNonlinearRodArray),
(89, 1, 7, b'OESNLXD') : ('nonlinear_crod_stress', RealNonlinearRodArray),
(89, 1, 7, b'OESNLXR') : ('nonlinear_crod_stress', RealNonlinearRodArray),
(92, 1, 7, b'OESNL1X') : ('nonlinear_conrod_stress', RealNonlinearRodArray),
(92, 1, 7, b'OESNLXD') : ('nonlinear_conrod_stress', RealNonlinearRodArray),
(92, 1, 7, b'OESNLXR') : ('nonlinear_conrod_stress', RealNonlinearRodArray),
(224, 1, 3, b'OESNLXD') : ('nonlinear_celas1_stress', RealNonlinearSpringStressArray),
(224, 1, 3, b'OESNLXR') : ('nonlinear_celas1_stress', RealNonlinearSpringStressArray),
(225, 1, 3, b'OESNLXR') : ('nonlinear_celas3_stress', RealNonlinearSpringStressArray),
(35, 1, 18, b'OES1X1') : ('NA', 'NA'), # CCONEAX
(35, 1, 18, b'OES1') : ('NA', 'NA'), # CCONEAX
(60, 1, 10, b'OES1X') : ('NA', 'NA'), # DUM8/CCRAC2D
(61, 1, 10, b'OES1X') : ('NA', 'NA'), # DUM8/CCRAC3D
(69, 1, 21, b'OES1X1') : ('NA', 'NA'), # CBEND
(69, 2, 21, b'OES1X') : ('NA', 'NA'), # CBEND
(69, 3, 21, b'OES1X') : ('NA', 'NA'), # CBEND
(86, 1, 11, b'OESNL1X') : ('nonlinear_cgap_stress', NonlinearGapStressArray),
(86, 1, 11, b'OESNLXR') : ('nonlinear_cgap_stress', NonlinearGapStressArray),
(86, 1, 11, b'OESNLXD') : ('nonlinear_cgap_stress', NonlinearGapStressArray),
(94, 1, 51, b'OESNL1X') : ('nonlinear_cbeam_stress', RealNonlinearBeamStressArray),
(94, 1, 51, b'OESNLXR') : ('nonlinear_cbeam_stress', RealNonlinearBeamStressArray),
(85, 1, 82, b'OESNLXR') : ('NA', 'NA'), # TETRANL
(91, 1, 114, b'OESNLXD') : ('NA', 'NA'), # PENTANL
(91, 1, 114, b'OESNLXR') : ('NA', 'NA'), # PENTANL
(93, 1, 146, b'OESNL1X') : ('NA', 'NA'), # HEXANL
(93, 1, 146, b'OESNLXD') : ('NA', 'NA'), # HEXANL
(93, 1, 146, b'OESNLXR') : ('NA', 'NA'), # HEXANL
# 101-AABSF
(101, 2, 4, b'OES1X') : ('NA', 'NA'),
# 140-HEXA8FD
(140, 1, 162, b'OES1X1') : ('NA', 'NA'),
#201-QUAD4FD
(201, 1, 46, b'OESNLXD') : ('NA', 'NA'),
(201, 1, 46, b'OESNLXR') : ('NA', 'NA'),
# 145-VUHEXA (8 nodes)
(145, 1, 98, b'OES1X1') : ('NA', 'NA'),
(145, 2, 106, b'OES1X') : ('NA', 'NA'),
(145, 3, 106, b'OES1X') : ('NA', 'NA'),
# 146-VUPENTA (6 nodes)
(146, 1, 74, b'OES1X1') : ('NA', 'NA'),
(146, 2, 80, b'OES1X') : ('NA', 'NA'),
(146, 3, 80, b'OES1X') : ('NA', 'NA'),
# 147-VUTETRA (4 nodes)
(147, 1, 50, b'OES1X1') : ('NA', 'NA'),
(147, 2, 54, b'OES1X') : ('NA', 'NA'),
(147, 3, 54, b'OES1X') : ('NA', 'NA'),
# 139-QUAD4FD
# self.hyperelastic_cquad4_strain, HyperelasticQuad
(139, 1, 30, b'OES1X1') : ('NA', 'NA'),
# 189-VUQUAD
(189, 1, 74, b'OES1X1') : ('NA', 'NA'),
(189, 2, 114, b'OES1X') : ('NA', 'NA'),
# 47-AXIF2
(47, 2, 9, b'OES1X') : ('axif2', 'NA'),
# 48-AXIF3
(48, 2, 19, b'OES1X') : ('axif3', 'NA'),
# 190-VUTRIA
(190, 1, 57, b'OES1X1') : ('NA', 'NA'),
(190, 2, 87, b'OES1X') : ('NA', 'NA'),
(190, 3, 87, b'OES1X') : ('NA', 'NA'),
# 191-VUBEAM
#(191, 1, 60, b'OES1X1') : ('vubeam', 'NA'),
#(191, 2, 80, b'OES1X') : ('vubeam', 'NA'),
#(191, 3, 80, b'OES1X') : ('vubeam', 'NA'),
# 203-SLIF1D?
(203, 1, 14, b'OESNLBR') : ('slif1d', 'NA'),
# 50-SLOT3
(50, 2, 11, b'OES1X') : ('slot3', 'NA'),
# 51-SLOT4
(51, 2, 13, b'OES1X') : ('slot4', 'NA'),
# 160-PENTA6FD
(160, 1, 122, b'OES1X1') : ('cpenta', 'NA'),
# 161-TETRA4FD
(161, 1, 22, b'OES1X1') : ('ctetra', 'NA'),
# 162-TRIA3FD
(162, 1, 9, b'OES1X1') : ('ctria', 'NA'),
# 163-HEXAFD
(163, 1, 542, b'OES1X1') : ('chexa', 'NA'),
# 164-QUADFD
(164, 1, 65, b'OES1X1') : ('cquad', 'NA'),
# 165-PENTAFD
(165, 1, 422, b'OES1X1') : ('cpenta', 'NA'),
# 166-TETRAFD
(166, 1, 102, b'OES1X1') : ('ctetra', 'NA'),
# 167-TRIAFD
(167, 1, 23, b'OES1X1') : ('NA', 'NA'),
# 168-TRIAX3FD
(168, 1, 9, b'OES1X1') : ('ctriax3', 'NA'),
# 169-TRIAXFD
(169, 1, 23, b'OES1X1') : ('ctriax', 'NA'),
# 170-QUADX4FD
(170, 1, 30, b'OES1X1') : ('cquadx4fd', 'NA'),
# 171-QUADXFD
(171, 1, 65, b'OES1X1') : ('cquadx', 'NA'),
# 172-QUADRNL
(172, 1, 25, b'OESNLXR') : ('cquadrnl', 'NA'),
# 202-HEXA8FD
(202, 1, 122, b'OESNLXD') : ('chexa', 'NA'),
(202, 1, 122, b'OESNLXR') : ('chexa', 'NA'),
# 204-PENTA6FD
(204, 1, 92, b'OESNLXR') : ('cpenta', 'NA'),
# 211-TRIAFD
(211, 1, 35, b'OESNLXR') : ('ctria3', 'NA'),
# 213-TRIAXFD
(213, 1, 35, b'OESNLXR') : ('ctriax', 'NA'),
# 214-QUADX4FD
(214, 1, 46, b'OESNLXR') : ('cquadx4', 'NA'),
# 216-TETRA4FD
(216, 1, 62, b'OESNLXD') : ('NA', 'NA'),
(216, 1, 62, b'OESNLXR') : ('NA', 'NA'),
# 217-TRIA3FD
(217, 1, 35, b'OESNLXR') : ('ctria3', 'NA'),
# 218-HEXAFD
(218, 1, 122, b'OESNLXR') : ('chexa', 'NA'),
# 219-QUADFD
(219, 1, 46, b'OESNLXR') : ('cquad', 'NA'),
# 220-PENTAFD
(220, 1, 92, b'OESNLXR') : ('cpenta', 'NA'),
# 221-TETRAFD
(221, 1, 62, b'OESNLXR') : ('tetrafd', 'NA'),
# 222-TRIAX3FD
(222, 1, 35, b'OESNLXR') : ('ctriax3fd', 'NA'),
# 223-CQUADXFD
(223, 1, 46, b'OESNLXR') : ('cquadx', 'NA'),
# 226-BUSH
(226, 1, 19, b'OESNLXD') : ('cbush', 'NA'),
(226, 1, 19, b'OESNLXR') : ('cbush', 'NA'),
# 227-CTRIAR
(227, 1, 17, b'OES1X1') : ('ctriar', 'NA'),
(227, 1, 17, b'OES1X') : ('ctriar', 'NA'),
# 228-CQUADR
(228, 1, 17, b'OES1X1') : ('cquadr', 'NA'),
(228, 1, 17, b'OES1X') : ('cquadr', 'NA'),
# 232-QUADRLC
(232, 1, 11, b'OES1C') : ('cquadr', 'NA'),
(232, 1, 11, b'OESCP') : ('cquadr', 'NA'),
(232, 2, 13, b'OESVM1C') : ('cquadr', 'NA'), # freq nx
(232, 3, 13, b'OESVM1C') : ('cquadr', 'NA'), # freq nx
#(234, 1, 11) : ('cquadr', 'NA'), # bad?
# 233-TRIARLC
(233, 1, 11, b'OES1C') : ('ctriar', 'NA'),
(233, 2, 13, b'OESVM1C') : ('ctriar', 'NA'), # freq nx
(233, 3, 13, b'OESVM1C') : ('ctriar', 'NA'), # freq nx
# 235-CQUADR
(235, 1, 17, b'OES1X1') : ('NA', 'NA'),
(235, 2, 15, b'OES1X') : ('NA', 'NA'),
# 242-CTRAX
# 244-CTRAX6
(242, 1, 34, b'OES1X1') : ('ctrax', 'NA'),
(244, 1, 34, b'OES1X1') : ('ctrax6', 'NA'),
# 243-CQUADX4
# 245-CQUADX8
(243, 1, 42, b'OES1X1') : ('cquadx4', 'NA'),
(245, 1, 42, b'OES1X1') : ('cquadx8', 'NA'),
#256-CPYRAM
(255, 1, 130, b'OES1X1') : ('cpyram', 'NA'),
(255, 2, 82, b'OES1X') : ('cpyram', 'NA'),
(256, 1, 98, b'OESNLXD') : ('cpyram', 'NA'),
# 271-CPLSTN3
# 272-CPLSTN4
(271, 1, 6, b'OES1X1') : ('cplstn3', 'NA'),
(271, 1, 6, b'OES1X') : ('cplstn3', 'NA'),
(272, 1, 32, b'OES1X1') : ('cplstn4', 'NA'),
(272, 1, 32, b'OES1X') : ('cplstn4', 'NA'),
(273, 1, 26, b'OES1X1') : ('cplstn6', 'NA'),
(273, 1, 26, b'OES1X') : ('cplstn6', 'NA'),
(274, 1, 32, b'OES1X1') : ('cplstn3', 'NA'),
(274, 1, 32, b'OES1X') : ('cplstn3', 'NA'),
# 275-CPLSTS3
# 277-CPLSTS6
(275, 1, 6, b'OES1X1') : ('cplsts3', 'NA'),
(276, 1, 32, b'OES1X1') : ('cplsts4', 'NA'),
(277, 1, 26, b'OES1X1') : ('cplsts6', 'NA'),
(278, 1, 32, b'OES1X1') : ('cplsts8', 'NA'),
(1, 2, 5, 'OESVM1') : ('crod', 'NA'),
(10, 2, 5, 'OESVM1') : ('conrod', 'NA'),
(10, 2, 5, 'OES1X') : ('conrod', 'NA'),
(11, 2, 3, 'OESVM1') : ('celas1', 'NA'),
(12, 2, 3, 'OESVM1') : ('celas2', 'NA'),
(2, 2, 111, b'OESVM1') : ('cbeam', 'NA'),
(34, 2, 19, b'OESVM1') : ('cbar', 'NA'),
(4, 2, 5, 'OESVM1') : ('cshear', 'NA'),
(4, 2, 5, 'OES1X') : ('cshear', 'NA'),
(74, 2, 17, 'OESVM1') : ('ctria3', 'NA'),
(144, 2, 87, b'OESVM1') : ('cquad4', 'NA'),
(95, 2, 13, b'OESVM1C') : ('cquad4', 'NA'),
(95, 3, 13, b'OESVM1C') : ('cquad4', 'NA'),
(97, 2, 13, b'OESVM1C') : ('ctria3', 'NA'),
(97, 3, 13, b'OESVM1C') : ('ctria3', 'NA'),
(39, 2, 74, 'OESVM1') : ('ctetra', 'NA'),
(67, 2, 130, b'OESVM1') : ('chexa', 'NA'),
(68, 2, 102, b'OESVM1') : ('cpenta', 'NA'),
}
op2 = self.op2
key = (op2.element_type, op2.format_code, op2.num_wide, op2.table_name)
try:
return stress_mapper[key]
except KeyError: # pragma: no cover
op2.log.error(op2.code_information())
msg = ('stress_mapper (~line 850 of oes.py) does not contain the '
'following key and must be added\n'
'key=(element_type=%r, format_code=%r, num_wide=%r, table_name=%r) ' % key)
op2.log.error(msg)
#raise KeyError(msg)
raise
#return None, None
def get_oes_prefix_postfix(self):
"""
Creates the prefix/postfix that splits off ATO, CRM, PSD, nonlinear,
etc. results. We also fix some of the sort bits as typing:
STRESS(PLOT,SORT1,RALL) = ALL
will actually create the OESRMS2 table (depending on what else
is in your case control). However, it's in an OESATO2 table, so
we know it's really SORT2.
Also, if you're validating the sort_bit flags, *RMS2 and *NO2 are
actually SORT1 tables.
NX Case Control Block Description
=============== ========== ===========
NLSTRESS OESNLXR Nonlinear static stresses
BOUTPUT OESNLBR Slideline stresses
STRESS OESNLXD Nonlinear Transient Stresses
STRESS OES1C/OSTR1C Ply stresses/strains
STRESS OES1X Element stresses with intermediate (CBAR and CBEAM)
station stresses and stresses on nonlinear elements
STRESS OES/OESVM Element stresses (linear elements only)
STRAIN OSTR1 Element strains
STRESS/STRAIN DOES1/DOSTR1 Scaled Response Spectra
MODCON OSTRMC Modal contributions
"""
op2 = self.op2
prefix = ''
postfix = ''
table_name_bytes = op2.table_name
assert isinstance(table_name_bytes, bytes), table_name_bytes
is_sort1 = table_name_bytes in SORT1_TABLES_BYTES
if table_name_bytes in [b'OES1X1', b'OES1X', b'OSTR1X', b'OSTR1',
b'OES1C', b'OSTR1C', b'OES1', ]:
self._set_as_sort1()
elif table_name_bytes in [b'OES2', b'OSTR2', b'OES2C', b'OSTR2C']:
self._set_as_sort2()
#elif table_name_bytes in ['OESNLXR']:
#prefix = 'sideline_'
elif table_name_bytes in [b'OESNLXD', b'OESNL1X', b'OESNLXR', b'OESNL2']:
prefix = 'nonlinear_'
elif table_name_bytes in [b'OESNLXR2']:
prefix = 'nonlinear_'
elif table_name_bytes == b'OESNLBR':
prefix = 'sideline_'
elif table_name_bytes == b'OESRT':
#OESRT: Table of composite element strength ratios
prefix = 'strength_ratio.'
elif table_name_bytes in [b'OESCP', b'OESTRCP']:
# guessing
pass
#op2.sort_bits[0] = 0 # real; ???
#op2.sort_bits[1] = 0 # sort1
#op2.sort_bits[2] = 1 # random; ???
elif table_name_bytes in [b'OESVM1C', b'OSTRVM1C', b'OESVM1', b'OSTRVM1',
#b'OESVM1C', b'OSTRVM1C',
b'OESVM2', b'OSTRVM2',]:
prefix = 'modal_contribution.'
op2.to_nx(f' because table_name={table_name_bytes} was found')
#----------------------------------------------------------------
elif table_name_bytes in [b'OSTRMS1C']: #, b'OSTRMS1C']:
op2.format_code = 1
op2.sort_bits[0] = 0 # real
prefix = 'rms.'
elif table_name_bytes in [b'OESXRMS1']:
op2._analysis_code_fmt = b'i'
self._set_as_random()
self._set_as_sort1()
prefix = 'rms.'
elif table_name_bytes in [b'OESXRMS2']: # wrong?
self._set_as_random()
self._set_as_sort2()
prefix = 'rms.'
elif table_name_bytes in [b'OESXNO1']:
self._set_as_random()
self._set_as_sort1()
prefix = 'no.'
elif table_name_bytes in [b'OESXNO1C']:
# - ply-by-ply Stresses including:
# - von Mises Stress for PSDF (OESPSD1C),
# - Cumulative Root Mean Square output (OESXNO1C)
# - Positive Crossing (OESCRM1C) output sets
# - ply-by-ply Strains for:
# - PSDF (OSTPSD1C)
# - Cumulative Root Mean Square (OSTCRM1C) output sets
self._set_as_random()
self._set_as_sort1()
prefix = 'crm.'
elif table_name_bytes in [b'OESXRM1C']:
self._set_as_random()
self._set_as_sort1()
prefix = 'rms.'
#print(op2.code_information())
elif table_name_bytes in [b'OESRMS1', b'OSTRRMS1']:
op2._analysis_code_fmt = b'i'
self._set_as_random()
self._set_as_sort1()
prefix = 'rms.'
elif table_name_bytes in [b'OESRMS2', b'OSTRRMS2']:
op2._analysis_code_fmt = b'i'
self._set_as_random()
self._set_as_sort1() # it's not really SORT2...
op2.sort_method = 1
if table_name_bytes == b'OESRMS2':
op2.table_name = b'OESRMS1'
elif table_name_bytes == b'OSTRRMS2':
op2.table_name = b'OSTRRMS1'
else:
raise NotImplementedError(table_name_bytes)
#assert op2.sort_method == 2, op2.code_information()
prefix = 'rms.'
elif table_name_bytes in [b'OESNO1', b'OSTRNO1', b'OSTNO1C']:
assert op2.sort_method == 1, op2.code_information()
self._set_as_random()
prefix = 'no.'
elif table_name_bytes in [b'OESNO2', b'OSTRNO2']:
self._set_as_random()
self._set_as_sort1()
op2.data_code['nonlinear_factor'] = None
op2._analysis_code_fmt = b'i'
prefix = 'no.'
#----------------------------------------------------------------
elif table_name_bytes in [b'OESPSD1', b'OSTRPSD1']:
#op2.format_code = 1
op2.sort_bits[0] = 0 # real
op2.sort_bits[1] = 0 # sort1
op2.sort_bits[2] = 1 # random
prefix = 'psd.'
elif table_name_bytes in [b'OESPSD2', b'OSTRPSD2',
b'OESPSD2C', b'OSTPSD2C']:
if 0:
# TODO: the sort bits might not be right...isat_random
#print(op2.code_information())
#print(op2.sort_bits)
op2.format_code = 1
#op2.sort_bits[0] = 0 # real
#op2.sort_bits[1] = 1 # sort2
#op2.sort_bits[2] = 1 # random
op2.sort_bits.is_real = 1
op2.sort_bits.is_sort2 = 1
op2.sort_bits.is_random = 1
#print(op2.code_information())
#print(op2.sort_bits)
else:
op2.format_code = 1 # real
op2.result_type = 2 # random
op2.sort_bits[0] = 0 # real
op2.sort_bits[2] = 1 # random
prefix = 'psd.'
elif table_name_bytes in [b'OESATO1', b'OSTRATO1']:
prefix = 'ato.'
elif table_name_bytes in [b'OESATO2', b'OSTRATO2']:
prefix = 'ato.'
elif table_name_bytes in [b'OESCRM1', b'OSTRCRM1']:
prefix = 'crm.'
op2.result_type = 2 # random
op2.sort_bits[2] = 1 # random
elif table_name_bytes in [b'OESCRM2', b'OSTRCRM2']:
# sort2, random
op2.format_code = 1 # real
op2.result_type = 2 # random
op2.sort_bits[0] = 0 # real
op2.sort_bits[1] = 1 # sort2
op2.sort_bits[2] = 1 # random
op2.sort_method = 2
prefix = 'crm.'
#elif op2.table_name in ['DOES1', 'DOSTR1']:
#prefix = 'scaled_response_spectra_'
#elif op2.table_name in ['OESCP']:
elif table_name_bytes in [b'RASCONS']: #, b'OSTRMS1C']:
op2.format_code = 1
op2.sort_bits[0] = 0 # real
prefix = 'RASCONS.'
elif table_name_bytes in [b'RAECONS']: #, b'OSTRMS1C']:
op2.format_code = 1
op2.sort_bits[0] = 0 # real
prefix = 'RAECONS.'
elif table_name_bytes in [b'RAPCONS']: #, b'OSTRMS1C']:
op2.format_code = 1
op2.sort_bits[0] = 0 # real
prefix = 'RAPCONS.'
elif table_name_bytes in [b'RASEATC']: #, b'OSTRMS1C']:
self._set_as_real()
prefix = 'RASEATC.'
elif table_name_bytes in [b'RAEEATC']: #, b'OSTRMS1C']:
self._set_as_real()
prefix = 'RAEEATC.'
elif table_name_bytes in [b'RAPEATC']: #, b'OSTRMS1C']:
self._set_as_real()
prefix = 'RAPEATC.'
elif table_name_bytes in [b'OESMC1', b'OSTRMC1']:
prefix = 'modal_contribution.'
elif table_name_bytes in [b'OESC1']:
# NASA95
prefix = ''
else:
raise NotImplementedError(op2.table_name)
#if op2.analysis_code == 1:
#op2.sort_bits[1] = 0 # sort1
#op2.sort_method = 1
op2.data_code['sort_bits'] = op2.sort_bits
op2.data_code['nonlinear_factor'] = op2.nonlinear_factor
return prefix, postfix
def _set_as_real(self):
op2 = self.op2
op2.format_code = 1
op2.result_type = 0
op2.sort_bits[0] = 0 # real
op2.sort_bits.is_real = True
op2.sort_bits.is_random = False
def _set_as_random(self):
op2 = self.op2
op2.format_code = 1 # real
op2.result_type = 2 # random
op2.sort_bits.is_real = True
op2.sort_bits.is_random = True
def _set_as_sort1(self):
op2 = self.op2
op2.sort_bits[1] = 0 # sort1
op2.sort_method = 1
def _set_as_sort2(self):
op2 = self.op2
op2.sort_bits[1] = 1 # sort2
op2.sort_method = 2
def _read_oesmc_4(self, data: bytes, ndata: int) -> int:
op2 = self.op2
n = 0
log = op2.log
if op2.element_type == 1:
assert op2.num_wide == 4, op2.code_information()
if op2.read_mode == 1:
return ndata
ntotal = 16 * self.factor # 4*4
nelements = ndata // ntotal
fmt = mapfmt(op2._endian + b'i3f', self.size)
struct1 = Struct(fmt)
for ielem in range(nelements):
edata = data[n:n+ntotal]
out = struct1.unpack(edata)
#print(out)
n += ntotal
log.warning(f'skipping {op2.table_name} with {op2.element_name}-{op2.element_type}')
else:
raise NotImplementedError(op2.code_information())
return n
def _read_oes1_loads_nasa95(self, data, ndata: int) -> Tuple[int, Any, Any]:
"""Reads OES1 subtable 4 for NASA 95"""
op2 = self.op2
prefix, postfix = self.get_oes_prefix_postfix()
result_type = op2.result_type
#self._apply_oes_ato_crm_psd_rms_no('') # TODO: just testing
n = 0
is_magnitude_phase = op2.is_magnitude_phase()
dt = op2.nonlinear_factor
if op2.is_stress:
result_name = 'stress'
#stress_name = 'STRESS'
else:
result_name = 'strain'
#stress_name = 'STRAIN'
if op2._results.is_not_saved(result_name):
return ndata
if op2.element_type in [1, 3, 10]: # rods
# 1-CROD
# 3-CTUBE
# 10-CONROD
n, nelements, ntotal = self._oes_crod(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
#elif op2.element_type == 2: # CBEAM
#n, nelements, ntotal = self._oes_cbeam(data, ndata, dt, is_magnitude_phase,
#result_type, prefix, postfix)
elif op2.element_type == 4: # CSHEAR
n, nelements, ntotal = self._oes_cshear(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type in [11, 12, 13, 14]: # springs
# 11-CELAS1
# 12-CELAS2
# 13-CELAS3
# 14-CELAS4
n, nelements, ntotal = self._oes_celas(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type == 19:
# 19-CQUAD1
n, nelements, ntotal = self._oes_cquad4_33(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type == 34: # CBAR
n, nelements, ntotal = self._oes_cbar_34(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type == 64: # CQUAD4
op2.element_type = 33 # faking...
n, nelements, ntotal = self._oes_cquad4_33(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type == 83:
# 83: TRIA3
n, nelements, ntotal = self._oes_ctria3(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
#elif op2.element_type in [64, 70, 75, 82, 144]: # bilinear plates
# 64-CQUAD8
# 70-CTRIAR
# 75-CTRIA6
# 82-CQUADR
# 144-CQUAD4-bilinear
#n, nelements, ntotal = self._oes_cquad4_144(data, ndata, dt, is_magnitude_phase,
#result_type, prefix, postfix)
else:
#msg = 'sort1 Type=%s num=%s' % (op2.element_name, op2.element_type)
msg = op2.code_information()
print(msg)
return op2._not_implemented_or_skip(data, ndata, msg)
if nelements is None:
return n
assert ndata > 0, ndata
assert nelements > 0, 'nelements=%r element_type=%s element_name=%r' % (nelements, op2.element_type, op2.element_name)
#assert ndata % ntotal == 0, '%s n=%s nwide=%s len=%s ntotal=%s' % (op2.element_name, ndata % ntotal, ndata % op2.num_wide, ndata, ntotal)
assert op2.num_wide * 4 == ntotal, 'numwide*4=%s ntotal=%s' % (op2.num_wide * 4, ntotal)
assert op2.thermal == 0, "thermal = %%s" % op2.thermal
assert n > 0, f'n = {n} result_name={result_name}'
return n
def _read_oes1_loads(self, data, ndata: int):
"""Reads OES op2.thermal=0 stress/strain"""
op2 = self.op2
log = op2.log
prefix, postfix = self.get_oes_prefix_postfix()
result_type = op2.result_type
#self._apply_oes_ato_crm_psd_rms_no('') # TODO: just testing
n = 0
is_magnitude_phase = op2.is_magnitude_phase()
dt = op2.nonlinear_factor
#flag = 'element_id'
if op2.is_stress:
result_name = 'stress'
stress_name = 'STRESS'
else:
result_name = 'strain'
stress_name = 'STRAIN'
#if op2.is_stress:
#_result_name, _class_obj = self.get_stress_mapper()
if op2.table_name_str == 'OESXRMS1':
assert op2.sort_method == 1, op2.code_information()
if op2._results.is_not_saved(result_name):
return ndata
if op2.element_type in [1, 3, 10]: # rods
# 1-CROD
# 3-CTUBE
# 10-CONROD
n, nelements, ntotal = self._oes_crod(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type == 2: # CBEAM
n, nelements, ntotal = self._oes_cbeam(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type == 4: # CSHEAR
n, nelements, ntotal = self._oes_cshear(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type in [11, 12, 13, 14]: # springs
# 11-CELAS1
# 12-CELAS2
# 13-CELAS3
# 14-CELAS4
n, nelements, ntotal = self._oes_celas(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type == 34: # CBAR
n, nelements, ntotal = self._oes_cbar_34(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type in [39, 67, 68, 255]: # solid stress
# 39-CTETRA
# 67-CHEXA
# 68-CPENTA
# 255-CPYRAM
n, nelements, ntotal = self._oes_csolid(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type in [140]:
# 144-CHEXAFD
#TestOP2.test_bdf_op2_other_23
' S T R E S S E S I N H Y P E R E L A S T I C H E X A H E D R O N E L E M E N T S ( H E X A F D ) '
' GRID/ POINT --------CAUCHY STRESSES--------- DIR. COSINES MEAN'
' ELEMENT-ID GAUSS ID NORMAL SHEAR PRINCIPAL -A- -B- -C- PRESSURE'
'0 211 GAUS'
' 1 X 2.627481E+02 XY 8.335709E+01 A 3.040629E+02 LX 0.94-0.34-0.00 -8.630411E+01'
' Y -2.599833E+00 YZ -1.138583E+01 B -5.463896E+01 LY 0.25 0.68 0.69'
' Z -1.235891E+00 ZX 7.851368E+01 C 9.488372E+00 LZ 0.23 0.64-0.73'
' 2 X 1.541617E+02 XY 4.154493E+01 A 1.964021E+02 LX 0.88-0.47-0.00 -8.630411E+01'
' Y 5.412691E+01 YZ -3.499344E+00 B 6.221669E+00 LY 0.25 0.46-0.85'
' Z 5.062376E+01 ZX 6.725376E+01 C 5.628857E+01 LZ 0.40 0.75 0.53'
n, nelements, ntotal = self._oes_csolid_linear_hyperelastic_cosine(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type in [160, 163, 166,
161, # centroid
165,]:
# nonlinear hyperelastic solids
# 160-CPENTAFD
# 163-CHEXAFD
# 166-CTETRAFD
# centroid??
# 161-CTETRAFD
# many nodes?
# 165-CPENTAFD
n, nelements, ntotal = self._oes_csolid_linear_hyperelastic(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type in [202, 204,
216, 218, 220, 221]:
# nonlinear hyperelastic solids
# 202-CHEXAFD
# 204-CPENTAFD
# also nonlinear hyperelastic solid, but somewhat different
' N O N L I N E A R S T R E S S E S I N H Y P E R E L A S T I C H E X A H E D R O N E L E M E N T S ( HEXA8FD )'
' '
' ELEMENT GRID/ POINT CAUCHY STRESSES/ LOG STRAINS PRESSURE VOL. STRAIN'
' ID GAUSS ID X Y Z XY YZ ZX'
'0 401 GRID 401 1.9128E+03 6.2729E+02 -3.4828E+02 -7.5176E+01 7.8259E+00 -2.5001E+02 7.3060E+02 7.3060E-03'
' 6.8270E-01 -6.5437E-04 -1.2874E+00 -3.9645E-02 -2.9882E-03 -5.9975E-02'
# 216-TETRAFD
# 218-HEXAFD
# 220-PENTAFD
# 221-TETRAFD
n, nelements, ntotal = self._oes_csolid_nonlinear_hyperelastic(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type in [300, 301, 302, 303]: # solid stress
# solids without stress eigenvectors
# 300-CHEXA
# 301-CPENTA
# 302-CTETRA
# 303-CPYRAM
n, nelements, ntotal = self._oes_csolid2(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type in [306, 307]:
# 306-CHEXALN
# 307-CPENTALN
n, nelements, ntotal = self._oes_csolid_composite(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
#=========================
# plates
elif op2.element_type in [33, 228]:
# 33: CQUAD4-centroidal
# 228: CQUADR-centroidal
n, nelements, ntotal = self._oes_cquad4_33(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type in [74, 227]: # 229???
# 74: TRIA3
# 227: TRIAR
n, nelements, ntotal = self._oes_ctria3(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type in [64, 70, 75, 82, 144]: # bilinear plates
# 64-CQUAD8
# 70-CTRIAR
# 75-CTRIA6
# 82-CQUADR
# 144-CQUAD4-bilinear
n, nelements, ntotal = self._oes_cquad4_144(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type in [88, 90]: # nonlinear shells
# 88-CTRIA3NL
# 90-CQUAD4NL
n, nelements, ntotal = self._oes_shells_nonlinear(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type in [95, 96, 97, 98, 232, 233]: # composite shell
# 95 - CQUAD4
# 96 - CQUAD8
# 97 - CTRIA3
# 98 - CTRIA6 (composite)
# 232 - QUADRLC (CQUADR-composite)
# 233 - TRIARLC (CTRIAR-composite)
n, nelements, ntotal = self._oes_shells_composite(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type == 53: # axial plates - ctriax6
n, nelements, ntotal = self._oes_ctriax6(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type == 102: # cbush
n, nelements, ntotal = self._oes_cbush(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type == 40: # cbush1d
n, nelements, ntotal = self._oes_cbush1d(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type in [87, 89, 92]: # nonlinear rods
# 87-CTUBENL
# 89-RODNL
# 92-CONRODNL
n, nelements, ntotal = self._oes_crod_nonlinear(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type in [224, 225]: # nonlinear spring
# 224-CELAS1
# 225-CELAS3
# NonlinearSpringStress
n, nelements, ntotal = self._oes_celas_nonlinear(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type == 69: # cbend
# 69-CBEND
n, nelements, ntotal = self._oes_cbend(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type == 86: # cgap
# 86-GAPNL
n, nelements, ntotal = self._oes_cgap_nonlinear(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type == 94:
# 94-BEAMNL
n, nelements, ntotal = self._oes_cbeam_nonlinear(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type in [85, 91, 93, 256]:
# 256-PYRAM
n, nelements, ntotal = self._oes_csolid_nonlinear(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type == 100: # bars
# 100-BARS
n, nelements, ntotal = self._oes_cbar_100(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
#-----------------------------------------------------------------------
elif op2.element_type == 139:
n, nelements, ntotal = self._oes_hyperelastic_quad(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type == 226:
# 226-BUSHNL
n, nelements, ntotal = self._oes_cbush_nonlinear(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type in [271, 275]:
#271 CPLSTN3 Triangle plane strain linear format (Center Only)
#272 CPLSTN4 Quadrilateral plane strain linear format (Center and Corners)
#273 CPLSTN6 Triangle plane strain linear format (Center and Corners)
#274 CPLSTN8 Quadrilateral plane strain linear format (Center and Corners)
#275 CPLSTS3 Triangle plane stress linear Format (Center Only)
#276 CPLSTS4 Quadrilateral plane stress linear format (Center and Corners)
#277 CPLSTS6 Triangle plane stress linear format (Center and Corners)
#278 CPLSTS8 Quadrilateral plane stress linear format (Center and Corners)
# 271-CPLSTN3
# 275-CPLSTS3
n, nelements, ntotal = self._oes_plate_stress_34(data, ndata, dt, is_magnitude_phase,
stress_name, prefix, postfix)
elif op2.element_type in [276, 277, 278]:
# 276-CPLSTS4
# 277-CPLSTS6
# 278-CPLSTS8
n, nelements, ntotal = self._oes_plate_stress_68(data, ndata, dt, is_magnitude_phase,
stress_name, prefix, postfix)
elif op2.element_type == 35: # CON
return ndata
elif op2.element_type in [60, 61]:
# 60-DUM8
# 61-DUM9
return ndata
elif op2.element_type == 101: # AABSF
return ndata
elif op2.element_type in [47, 48, 189, 190]:
# 47-AXIF2
# 48-AXIF3
# 189-???
# 190-VUTRIA
return ndata
elif op2.element_type in [50, 51, 203]:
# 203-SLIF1D?
# 50-SLOT3
# 51-SLOT4
return ndata
elif op2.element_type in [162, 164, 167, 168,
169, 170, 171, 172,
218, 211, 213, 214,
217, 219, 222, 223,
232, 235]:
# 162-TRIA3FD
# 164-QUADFD
# 167-TRIAFD
# 168-TRIAX3FD
# 169-TRIAXFD
# 170-QUADX4FD
# 171-QUADXFD
# 172-QUADRNL
# 211-TRIAFD
# 213-TRIAXFD
# 214-QUADX4FD
# 217-TRIA3FD
# 219-QUADFD
# 223-QUADXFD
# 222-TRIAX3FD
# 232-QUADRLC
# 235-CQUADR
return op2._not_implemented_or_skip(data, ndata, op2.code_information())
elif op2.element_type in [145, 146, 147, # VU-solid
189, # VUQUAD
191]: # VUBEAM
msg = f'{op2.element_name}-{op2.element_type} has been removed'
return op2._not_implemented_or_skip(data, ndata, msg)
elif op2.element_type == 118: # WELDP
# ELEMENT-ID = 100
# S T R A I N S I N W E L D E L E M E N T S ( C W E L D P )
#
# AXIAL MAX STRAIN MIN STRAIN MAX STRAIN MIN STRAIN MAXIMUM
# TIME STRAIN END-A END-A END-B END-B SHEAR STRAIN
# 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0
# 1.000000E-01 0.0 0.0 0.0 0.0 0.0 0.0 0.0
# 2.000000E-01 1.652614E-02 2.381662E+02 -2.381332E+02 2.381623E+02 -2.381293E+02 5.678050E+01 0.0
# 3.000000E-01 6.468190E-03 4.706443E+01 -4.705150E+01 4.703462E+01 -4.702168E+01 1.121626E+01 0.0
#ints = (1001, -0.0007072892040014267, 0.6948937773704529, -0.6963083744049072, 0.6948915123939514, -0.6963061094284058, 6.161498617984762e-07, 0)
#floats = (1001, -0.0007072892040014267, 0.6948937773704529, -0.6963083744049072, 0.6948915123939514, -0.6963061094284058, 6.161498617984762e-07, 0.0)
#if data:
#self.show_data(data)
log.warning('skipping WELDP')
return ndata
elif op2.element_type == 126: # FASTP
#C:\MSC.Software\msc_nastran_runs\cf103e.op2
# S T R E S S E S I N F A S T E N E R E L E M E N T S ( C F A S T )
#
# ELEMENT-ID FORCE-X FORCE-Y FORCE-Z MOMENT-X MOMENT-Y MOMENT-Z
# data = (301, -4.547473508864641e-09, 1.8571810755929619e-09, -7.94031507211912e-10, -0.0, -0.0, 0.0,
# 401, -4.547473508864641e-09, -2.0263790645458357e-09, 1.1617373729677638e-09, -0.0, 0.0, 0.0)
ntotal = op2.num_wide * 4 * self.factor
nelements = ndata // ntotal
assert ndata % ntotal == 0, 'is this a FASTP result?'
#op2.log.warning('skipping FASTP')
#else:
#msg = op2.code_information()
log.warning('skipping FASTP')
return ndata
#return op2._not_implemented_or_skip(data, ndata, msg)
elif op2.is_nx and op2.element_type in [269, 270]:
# 269-CHEXAL
# 270-PENTAL
n, nelements, ntotal = self._oes_composite_solid_nx(data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix)
elif op2.element_type in [159, 184,
200, 201, 236, 237, 242, 243, 244, 245,
272, 273, 274]:
# 159-SEAMP
# 184-CBEAM3
#
# 200-WELD
# 201 CQUAD4FD
# 236 CTRIAR-corner
# 237 CTRIAR-center
# 242-CHEXA?
# 243 CQUADX4
# 244 CTRAX6
# 245 CQUADX8
#
# 272 CPLSTN4
# 273 CPLSTN6
# 274 CPLSTN3
log.warning(f'skipping {op2.element_name}-{op2.element_type}')
return ndata
elif op2.element_type in [312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323,
343, 344, 345, 346, 347, 348, 349,
350, 351, 352, 355, 356, 357, 358, 363]:
#
# 312 TRAX3
# 313 QUADX4
# 314 TRAX6
# 315 QUADX8
# 316 PLSTN3
# 317 PLSTN4
# 318 PLSTN6
# 319 PLSTN8
# 320 PLSTS3
# 321 PLSTS4
# 322 PLSTS6
# 323 PLSTS8
#
# 343 CTRIA6 SOL 401
# 344 CQUAD8 SOL 401
# 345 CTRIAR SOL 401
# 346 CQUADR SOL 401
# 347 CBAR SOL 401
# 348 CBEAM SOL 401
# 349 CBUSH1D SOL 401
#
# 350 CELAS1 SOL 401
# 351 CELAS2 SOL 401
# 352 CBUSH SOL 401
# 355 Composite triangular shell element (CTRIA6); SOL 402?
# 356 Composite quadrilateral shell element (CQUAD8); SOL 402?
# 357 Composite triangular shell element (CTRIAR); SOL 402?
# 358 Composite quadrilateral shell element (CQUADR); SOL 402?
# 363 CROD SOL 402
log.warning(f'skipping {op2.element_name}-{op2.element_type}')
return ndata
else:
#msg = 'sort1 Type=%s num=%s' % (op2.element_name, op2.element_type)
msg = op2.code_information()
#raise NotImplementedError(msg)
return op2._not_implemented_or_skip(data, ndata, msg)
try:
nelements
except NameError:
raise RuntimeError(op2.code_information())
if nelements is None:
return n
#self.check_element_ids()
assert ndata > 0, ndata
assert nelements > 0, f'nelements={nelements} element_type={op2.element_type} element_name={op2.element_name!r}'
#assert ndata % ntotal == 0, '%s n=%s nwide=%s len=%s ntotal=%s' % (op2.element_name, ndata % ntotal, ndata % op2.num_wide, ndata, ntotal)
assert op2.num_wide * 4 * self.factor == ntotal, f'numwide*4={op2.num_wide*4} ntotal={ntotal} element_name={op2.element_name!r}\n{op2.code_information()}'
assert op2.thermal == 0, "thermal = %%s" % op2.thermal
assert n is not None and n > 0, f'n={n} result_name={result_name}\n{op2.code_information()}'
#if self.is_sort2:
#assert len(np.unique(op2.obj._times)) == len(op2.obj._times), f'{op2.obj._times.tolist()}\n{op2.code_information()}'
return n
def check_element_ids(self):
op2 = self.op2
if op2.read_mode == 1:
return
if op2.is_sort1:
obj = op2.obj
if obj is None:
raise RuntimeError('obj is None...\n' + op2.code_information())
if hasattr(obj, 'element_node'):
eids = obj.element_node[:, 0]
elif hasattr(obj, 'element_layer'):
eids = obj.element_layer[:, 0]
elif hasattr(obj, 'element'):
eids = obj.element
else:
print(op2.code_information())
raise RuntimeError(''.join(obj.get_stats()))
if eids.min() <= 0:
#print(obj.code_information())
print(''.join(obj.get_stats()))
raise RuntimeError(f'{op2.element_name}-{op2.element_type}: {eids}')
#else:
#assert._times
def _create_nodes_object(self, nnodes, result_name, slot, obj_vector):
"""same as _create_oes_object4 except it adds to the nnodes parameter"""
op2 = self.op2
auto_return = False
#is_vectorized = True
is_vectorized = op2._is_vectorized(obj_vector)
#print("vectorized...read_mode=%s...%s; %s" % (op2.read_mode, result_name, is_vectorized))
if is_vectorized:
if op2.read_mode == 1:
#print('oes-op2.nonlinear_factor =', op2.nonlinear_factor)
#print(op2.data_code)
op2.create_transient_object(result_name, slot, obj_vector)
#print("read_mode 1; ntimes=%s" % op2.obj.ntimes)
op2.result_names.add(result_name)
#print('op2.obj =', op2.obj)
op2.obj.nnodes += nnodes
auto_return = True
elif op2.read_mode == 2:
self.code = op2._get_code()
#op2.log.info("code = %s" % str(self.code))
#print("code = %s" % str(self.code))
# if this is failing, you probably set obj_vector to None...
try:
op2.obj = slot[self.code]
except KeyError:
msg = 'Could not find key=%s in result=%r\n' % (self.code, result_name)
msg += "There's probably an extra check for read_mode=1..."
self.op2.log.error(msg)
raise
#op2.obj.update_data_code(op2.data_code)
build_obj(op2.obj)
else: # not vectorized
auto_return = True
else:
auto_return = True
return auto_return, is_vectorized
def _create_ntotal_object(self, ntotal, result_name, slot, obj_vector):
"""same as _create_oes_object4 except it adds to the ntotal parameter"""
op2 = self.op2
auto_return = False
#is_vectorized = True
is_vectorized = op2._is_vectorized(obj_vector)
#print("vectorized...read_mode=%s...%s; %s" % (op2.read_mode, result_name, is_vectorized))
if is_vectorized:
if op2.read_mode == 1:
#print('oes-op2.nonlinear_factor =', op2.nonlinear_factor)
#print(op2.data_code)
op2.create_transient_object(result_name, slot, obj_vector)
#print("read_mode 1; ntimes=%s" % op2.obj.ntimes)
op2.result_names.add(result_name)
#print('op2.obj =', op2.obj)
op2.obj.ntotal += ntotal
auto_return = True
elif op2.read_mode == 2:
self.code = op2._get_code()
#op2.log.info("code = %s" % str(self.code))
#print("code = %s" % str(self.code))
# if this is failing, you probably set obj_vector to None...
try:
op2.obj = slot[self.code]
except KeyError:
msg = 'Could not find key=%s in result=%r\n' % (self.code, result_name)
msg += "There's probably an extra check for read_mode=1..."
op2.log.error(msg)
raise
#op2.obj.update_data_code(op2.data_code)
build_obj(op2.obj)
else: # not vectorized
auto_return = True
else:
auto_return = True
return auto_return, is_vectorized
def _oes_celas(self, data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix):
"""
reads stress/strain for element type:
- 11 : CELAS1
- 12 : CELAS2
- 13 : CELAS3
- 14 : CELAS4
"""
op2 = self.op2
n = 0
if op2.is_stress:
if prefix == '' and postfix == '':
prefix = 'stress.'
obj_real = RealSpringStressArray
obj_complex = ComplexSpringStressArray
if op2.element_type == 11:
result_name = prefix + 'celas1_stress' + postfix
elif op2.element_type == 12:
result_name = prefix + 'celas2_stress' + postfix
elif op2.element_type == 13:
result_name = prefix + 'celas3_stress' + postfix
elif op2.element_type == 14:
result_name = prefix + 'celas4_stress' + postfix
else:
raise RuntimeError(op2.element_type)
else:
if prefix == '' and postfix == '':
prefix = 'strain.'
obj_real = RealSpringStrainArray
obj_complex = ComplexSpringStrainArray
if op2.element_type == 11:
result_name = prefix + 'celas1_strain' + postfix
elif op2.element_type == 12:
result_name = prefix + 'celas2_strain' + postfix
elif op2.element_type == 13:
result_name = prefix + 'celas3_strain' + postfix
elif op2.element_type == 14:
result_name = prefix + 'celas4_strain' + postfix
else:
raise RuntimeError(op2.element_type)
if op2._results.is_not_saved(result_name):
return ndata, None, None
log = op2.log
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
if op2.format_code == 1 and op2.num_wide == 2: # real
ntotal = 8 * self.factor # 2 * 4
nelements = ndata // ntotal
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_real)
if auto_return:
assert ntotal == op2.num_wide * 4
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and op2.sort_method == 1:
n = nelements * 4 * op2.num_wide
itotal = obj.ielement
ielement2 = obj.itotal + nelements
itotal2 = ielement2
floats = frombuffer(data, dtype=op2.fdtype8).reshape(nelements, 2)
obj._times[obj.itime] = dt
self.obj_set_element(obj, itotal, itotal2, data, nelements)
#(eid_device, stress)
obj.data[obj.itime, itotal:itotal2, 0] = floats[:, 1].copy()
obj.itotal = itotal2
obj.ielement = ielement2
else:
if is_vectorized and op2.use_vector: # pragma: no cover
log.debug('vectorize CELASx real SORT%s' % op2.sort_method)
n = oes_celas_real_2(op2, data, obj, nelements, ntotal, dt)
elif op2.format_code in [2, 3] and op2.num_wide == 3: # imag
ntotal = 12 * self.factor
nelements = ndata // ntotal
nelements = ndata // ntotal
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_complex)
if auto_return:
return nelements * ntotal, None, None
obj = op2.obj
assert obj is not None, op2.code_information()
if op2.use_vector and is_vectorized and op2.sort_method == 1:
n = nelements * ntotal
itotal = obj.ielement
ielement2 = obj.itotal + nelements
itotal2 = ielement2
floats = frombuffer(data, dtype=op2.fdtype8).reshape(nelements, 3).copy()
obj._times[obj.itime] = dt
self.obj_set_element(obj, itotal, itotal2, data, nelements)
if is_magnitude_phase:
mag = floats[:, 1]
phase = floats[:, 2]
rtheta = radians(phase)
real_imag = mag * (cos(rtheta) + 1.j * sin(rtheta))
else:
real = floats[:, 1]
imag = floats[:, 2]
real_imag = real + 1.j * imag
obj.data[obj.itime, itotal:itotal2, 0] = real_imag
obj.itotal = itotal2
obj.ielement = ielement2
else:
if is_vectorized and op2.use_vector: # pragma: no cover
log.debug('vectorize CELASx imag SORT%s' % op2.sort_method)
n = oes_celas_complex_3(op2, data, obj,
nelements, ntotal,
dt, is_magnitude_phase)
elif op2.format_code == 1 and op2.num_wide == 3: # random
raise RuntimeError(op2.code_information())
#msg = op2.code_information()
#return op2._not_implemented_or_skip(data, ndata, msg)
else: # pragma: no cover
raise RuntimeError(op2.code_information())
return n, nelements, ntotal
def _oes_crod(self, data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix):
"""
reads stress/strain for element type:
- 1 : CROD
- 3 : CTUBE
- 10 : CONROD
"""
op2 = self.op2
n = 0
if op2.is_stress:
obj_vector_real = RealRodStressArray
obj_vector_complex = ComplexRodStressArray
obj_vector_random = RandomRodStressArray
if op2.element_type == 1: # CROD
result_name = prefix + 'crod_stress' + postfix
elif op2.element_type == 3: # CTUBE
result_name = prefix + 'ctube_stress' + postfix
elif op2.element_type == 10: # CONROD
result_name = prefix + 'conrod_stress' + postfix
else: # pragma: no cover
msg = op2.code_information()
return op2._not_implemented_or_skip(data, ndata, msg)
else:
obj_vector_real = RealRodStrainArray
obj_vector_complex = ComplexRodStrainArray
obj_vector_random = RandomRodStrainArray
if op2.element_type == 1: # CROD
result_name = prefix + 'crod_strain' + postfix
elif op2.element_type == 3: # CTUBE
result_name = prefix + 'ctube_strain' + postfix
elif op2.element_type == 10: # CONROD
result_name = prefix + 'conrod_strain' + postfix
else: # pragma: no cover
msg = op2.code_information()
return op2._not_implemented_or_skip(data, ndata, msg)
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
#result_name, unused_is_random = self._apply_oes_ato_crm_psd_rms_no(result_name)
slot = op2.get_result(result_name)
if result_type == 0 and op2.num_wide == 5: # real
ntotal = 5 * 4 * self.factor
nelements = ndata // ntotal
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_real)
if auto_return:
assert ntotal == op2.num_wide * 4
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and op2.sort_method == 1:
n = nelements * 4 * op2.num_wide
itotal = obj.ielement
ielement2 = obj.itotal + nelements
itotal2 = ielement2
floats = frombuffer(data, dtype=op2.fdtype8).reshape(nelements, 5)
obj._times[obj.itime] = dt
self.obj_set_element(obj, itotal, itotal2, data, nelements)
#[axial, torsion, SMa, SMt]
obj.data[obj.itime, itotal:itotal2, :] = floats[:, 1:].copy()
obj.itotal = itotal2
obj.ielement = ielement2
else:
if is_vectorized and op2.use_vector: # pragma: no cover
op2.log.debug('vectorize CROD real SORT%s' % op2.sort_method)
if op2.is_debug_file:
op2.binary_debug.write(' [cap, element1, element2, ..., cap]\n')
op2.binary_debug.write(' cap = %i # assume 1 cap when there could have been multiple\n' % ndata)
op2.binary_debug.write(' #elementi = [eid_device, axial, axial_margin, torsion, torsion_margin]\n')
op2.binary_debug.write(' nelements=%i; nnodes=1 # centroid\n' % nelements)
n = oes_crod_real_5(op2, data, obj, nelements, ntotal, dt)
elif result_type == 1 and op2.num_wide == 5: # imag
ntotal = 20 * self.factor
nelements = ndata // ntotal
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_complex)
if auto_return:
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and op2.sort_method == 1:
n = nelements * ntotal
itotal = obj.ielement
ielement2 = obj.itotal + nelements
itotal2 = ielement2
floats = frombuffer(data, dtype=op2.fdtype8).reshape(nelements, 5)
obj._times[obj.itime] = dt
self.obj_set_element(obj, itotal, itotal2, data, nelements)
real_imag = apply_mag_phase(floats, is_magnitude_phase, [1, 3], [2, 4])
obj.data[obj.itime, itotal:itotal2, :] = real_imag
obj.itotal = itotal2
obj.ielement = ielement2
else:
if is_vectorized and op2.use_vector: # pragma: no cover
op2.log.debug('vectorize CROD imag SORT%s' % op2.sort_method)
n = oes_crod_complex_5(op2, data, obj, nelements, ntotal, dt, is_magnitude_phase)
#elif op2.format_code in [2, 3] and op2.num_wide == 8: # is this imag ???
#ntotal = 32
#s = self.self.struct_i
#nelements = ndata // ntotal
#for i in range(nelements):
#edata = data[n:n + 4]
#eid_device, = s.unpack(edata)
#assert eid > 0, eid
#n += ntotal
elif result_type == 2 and op2.num_wide == 3: # random
ntotal = 3 * 4 * self.factor
nelements = ndata // ntotal
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_random)
if auto_return:
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and op2.sort_method == 1:
if is_vectorized and op2.use_vector: # pragma: no cover
op2.log.debug('vectorize CROD random SORT%s' % op2.sort_method)
n = nelements * ntotal
itotal = obj.ielement
ielement2 = obj.itotal + nelements
itotal2 = ielement2
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, 3)
obj._times[obj.itime] = dt
self.obj_set_element(obj, itotal, itotal2, data, nelements)
#[axial, torsion, SMa, SMt]
obj.data[obj.itime, itotal:itotal2, :] = floats[:, 1:].copy()
obj.itotal = itotal2
obj.ielement = ielement2
else:
n = oes_crod_random_3(op2, data, ndata, obj, nelements, ntotal)
else: # pragma: no cover
raise RuntimeError(op2.code_information())
assert op2.num_wide * 4 * self.factor == ntotal, f'numwide*4={op2.num_wide*4} ntotal={ntotal} element_name={op2.element_name!r}\n{op2.code_information()}'
return n, nelements, ntotal
def _oes_cbeam(self, data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix):
"""
reads stress/strain for element type:
- 2 : CBEAM
"""
op2 = self.op2
n = 0
## TODO: fix method to follow correct pattern...regarding???
if op2.is_stress:
result_name = prefix + 'cbeam_stress' + postfix
else:
result_name = prefix + 'cbeam_strain' + postfix
table_name_bytes = op2.table_name
assert isinstance(table_name_bytes, bytes), table_name_bytes
assert table_name_bytes in TABLES_BYTES, table_name_bytes
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
if result_type == 0 and op2.num_wide == 111: # real
# TODO: vectorize
ntotal = 444 * self.factor # 44 + 10*40 (11 nodes)
if op2.is_stress:
obj_vector_real = RealBeamStressArray
else:
obj_vector_real = RealBeamStrainArray
nelements = ndata // ntotal
nlayers = nelements * 11
auto_return, is_vectorized = op2._create_oes_object4(
nlayers, result_name, slot, obj_vector_real)
if auto_return:
op2._data_factor = 11
assert ntotal == op2.num_wide * 4
return nelements * ntotal, None, None
obj = op2.obj
ntotal = op2.num_wide * 4 * self.factor
nelements = ndata // ntotal
if op2.use_vector and is_vectorized and 0:
raise NotImplementedError('CBEAM-2-real not vectorized')
else:
if op2.use_vector and is_vectorized and op2.sort_method == 1:
obj._times[obj.itime] = dt
n = nelements * ntotal
itotal = obj.itotal
itotal2 = itotal + nelements * 11
ints = frombuffer(data, dtype=op2.idtype8).reshape(nelements, 111)
floats = frombuffer(data, dtype=op2.fdtype8).reshape(nelements, 111)
#print(ints[:2, :].tolist())
#CBEAM 6 2 6 8 0. 1. 0.
#CBEAM 7 2 8 9 0. 1. 0.
#CBEAM 8 2 9 10 0. 1. 0.
#CBEAM 9 2 10 11 0. 1. 0.
#CBEAM 10 2 11 12 0. 1. 0.
#[[61,
# 6, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
# 8, 1065353216, 0, 0, 0, 0, 0, 0, 1, 1],
# [71,
# 8, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
# 9, 1065353216, 0, 0, 0, 0, 0, 0, 1, 1]]
eids = ints[:, 0] // 10
ints2 = ints[:, 1:].reshape(nelements * 11, 10)
#print('floats[:, 1:].shape', floats[:, 1:].shape) # (5,110)
floats2 = floats[:, 1:].reshape(nelements * 11, 10)
xxb = floats2[:, 1]
# ints2 = ints[:, :2]
#print(ints2[0, :])
nids = ints2[:, 0]
#print("eids =", eids)
#print("nids =", nids.tolist())
#print("xxb =", xxb)
eids2 = array([eids] * 11, dtype=op2.idtype8).T.ravel()
assert len(eids2) == len(nids)
obj.element_node[itotal:itotal2, 0] = eids2
obj.element_node[itotal:itotal2, 1] = nids
obj.xxb[itotal:itotal2] = xxb
obj.data[obj.itime, itotal:itotal2, :] = floats2[:, 2:]
#self.data[self.itime, self.itotal, :] = [sxc, sxd, sxe, sxf,
#smax, smin, mst, msc]
else:
if op2.use_vector:
op2.log.debug('vectorize CBEAM real SORT%s' % op2.sort_method)
n = oes_cbeam_real_111(op2, data,
obj,
nelements, dt)
elif result_type == 1 and op2.num_wide == 111: # imag and random?
# definitely complex results for MSC Nastran 2016.1
ntotal = 444 * self.factor # 44 + 10*40 (11 nodes)
nelements = ndata // ntotal
if op2.is_stress:
obj_vector_complex = ComplexBeamStressArray
else:
obj_vector_complex = ComplexBeamStrainArray
nlayers = nelements * 11
auto_return, is_vectorized = op2._create_oes_object4(
nlayers, result_name, slot, obj_vector_complex)
if auto_return:
op2._data_factor = 11
return nelements * ntotal, None, None
obj = op2.obj
nnodes = 10 # 11-1
#ntotal = op2.num_wide * 4
if op2.use_vector and is_vectorized and op2.sort_method == 1:
n = nelements * ntotal
itotal = obj.itotal
itotal2 = itotal + nelements * 11
# chop off eid
floats = frombuffer(data, dtype=op2.fdtype8).reshape(nelements, 111)[:, 1:]
floats2 = floats.reshape(nelements * 11, 10).copy()
obj._times[obj.itime] = dt
if obj.itime == 0:
ints = frombuffer(data, dtype=op2.idtype8).reshape(nelements, 111)
eids = ints[:, 0] // 10
eids2 = array([eids] * 11, dtype=op2.idtype8).T.ravel()
ints2 = ints[:, 1:].reshape(nelements * 11, 10)
nids = ints2[:, 0]
assert eids.min() > 0, eids.min()
assert nids.min() >= 0, nids.min()
obj.element_node[itotal:itotal2, 0] = eids2
obj.element_node[itotal:itotal2, 1] = nids
# 0 1 2 3 4 5 6 7 8 9
# grid, sd, c, d, e, f, c2, d2, e2, f2
real_imag = apply_mag_phase(floats2, is_magnitude_phase, [2, 3, 4, 5], [6, 7, 8, 9])
obj.data[obj.itime, itotal:itotal2, :] = real_imag
obj.sd[itotal:itotal2] = floats2[:, 1]
obj.itotal = itotal2
#obj.ielement = ielement2
else:
if is_vectorized and op2.use_vector: # pragma: no cover
op2.log.debug('vectorize CBEAM imag SORT%s' % op2.sort_method)
n = oes_cbeam_complex_111(op2, data, obj,
nelements, nnodes,
is_magnitude_phase)
elif result_type == 2 and op2.num_wide == 67: # random
# TODO: vectorize
ntotal = 268 # 1 + 11*6 (11 nodes)
if op2.is_stress:
obj_vector_random = RandomBeamStressArray
else:
obj_vector_random = RandomBeamStrainArray
nelements = ndata // ntotal
nlayers = nelements * 11
auto_return, is_vectorized = op2._create_oes_object4(
nlayers, result_name, slot, obj_vector_random)
if auto_return:
op2._data_factor = 11
assert ntotal == op2.num_wide * 4
return nelements * ntotal, None, None
obj = op2.obj
nnodes = 10 # 11-1
ntotal = op2.num_wide * 4
nelements = ndata // ntotal
if op2.use_vector and is_vectorized and 0:
raise NotImplementedError('CBEAM-2-random not vectorized')
else:
if is_vectorized and op2.use_vector: # pragma: no cover
op2.log.debug('vectorize CBEAM random SORT%s' % op2.sort_method)
n = oes_cbeam_random_67(op2, data, obj,
nelements, nnodes, dt)
elif result_type == 1 and op2.num_wide in [67] and table_name_bytes in [b'OESXNO1']: # CBEAM
# C:\MSC.Software\simcenter_nastran_2019.2\tpl_post2\tr1081x.op2
msg = 'skipping freq CBEAM; numwide=67'
n = op2._not_implemented_or_skip(data, ndata, msg)
nelements = None
ntotal = None
elif result_type == 2 and op2.num_wide in [67] and table_name_bytes in [b'OESXNO1']: # CBEAM
#C:\MSC.Software\simcenter_nastran_2019.2\tpl_post2\tr1081x.op2
msg = 'skipping random CBEAM; numwide=67'
n = op2._not_implemented_or_skip(data, ndata, msg)
nelements = None
ntotal = None
else: # pragma: no cover
raise RuntimeError(op2.code_information())
return n, nelements, ntotal
def _oes_cshear(self, data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix):
"""
reads stress/strain for element type:
- 4 : CSHEAR
"""
op2 = self.op2
n = 0
# 4-CSHEAR
if op2.is_stress:
obj_vector_real = RealShearStressArray
obj_vector_complex = ComplexShearStressArray
obj_vector_random = RandomShearStressArray
result_name = prefix + 'cshear_stress' + postfix
else:
obj_vector_real = RealShearStrainArray
obj_vector_complex = ComplexShearStrainArray
obj_vector_random = RandomShearStrainArray
result_name = prefix + 'cshear_strain' + postfix
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
if result_type == 0 and op2.num_wide == 4: # real
ntotal = 16 # 4*4
nelements = ndata // ntotal
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_real)
if auto_return:
assert ntotal == op2.num_wide * 4
return nelements * ntotal, None, None
obj = op2.obj
assert obj is not None
if op2.use_vector and is_vectorized and op2.sort_method == 1:
n = nelements * ntotal
itotal = obj.ielement
ielement2 = obj.itotal + nelements
itotal2 = ielement2
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, 4)
itime = obj.itime
obj._times[itime] = dt
self.obj_set_element(obj, itotal, itotal2, data, nelements)
#[max_strain, avg_strain, margin]
obj.data[itime, itotal:itotal2, :] = floats[:, 1:].copy()
obj.itotal = itotal2
obj.ielement = ielement2
else:
if is_vectorized and op2.use_vector: # pragma: no cover
op2.log.debug('vectorize CSHEAR real SORT%s' % op2.sort_method)
struct1 = Struct(op2._endian + op2._analysis_code_fmt + b'3f')
for unused_i in range(nelements):
edata = data[n:n + ntotal]
out = struct1.unpack(edata) # num_wide=5
if op2.is_debug_file:
op2.binary_debug.write('CSHEAR-4 - %s\n' % str(out))
(eid_device, max_strain, avg_strain, margin) = out
eid, dt = get_eid_dt_from_eid_device(
eid_device, op2.nonlinear_factor, op2.sort_method)
obj.add_sort1(dt, eid, max_strain, avg_strain, margin)
n += ntotal
elif result_type == 1 and op2.num_wide == 5: # imag
ntotal = 20 * self.factor # 4*5
nelements = ndata // ntotal
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_complex)
if auto_return:
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and op2.sort_method == 1:
n = nelements * ntotal
itotal = obj.ielement
ielement2 = obj.itotal + nelements
itotal2 = ielement2
floats = frombuffer(data, dtype=op2.fdtype8).reshape(nelements, 5).copy()
obj._times[obj.itime] = dt
self.obj_set_element(obj, itotal, itotal2, data, nelements)
#(eid_device, etmaxr, etmaxi, etavgr, etavgi)
real_imag = apply_mag_phase(floats, is_magnitude_phase, [1, 3], [2, 4])
obj.data[obj.itime, itotal:itotal2, :] = real_imag
obj.itotal = itotal2
obj.ielement = ielement2
else:
if is_vectorized and op2.use_vector: # pragma: no cover
op2.log.debug('vectorize CSHEAR imag SORT%s' % op2.sort_method)
struct1 = Struct(op2._endian + mapfmt(op2._analysis_code_fmt + b'4f', self.size))
for unused_i in range(nelements):
edata = data[n:n + ntotal]
out = struct1.unpack(edata) # num_wide=5
if op2.is_debug_file:
op2.binary_debug.write('CSHEAR-4 - %s\n' % str(out))
(eid_device, etmaxr, etmaxi, etavgr, etavgi) = out
eid, dt = get_eid_dt_from_eid_device(
eid_device, op2.nonlinear_factor, op2.sort_method)
if is_magnitude_phase:
etmax = polar_to_real_imag(etmaxr, etmaxi)
etavg = polar_to_real_imag(etavgr, etavgi)
else:
etmax = complex(etmaxr, etmaxi)
etavg = complex(etavgr, etavgi)
obj.add_sort1(dt, eid, etmax, etavg)
n += ntotal
elif result_type == 2 and op2.num_wide == 3: # random
ntotal = 12 # 3*4
nelements = ndata // ntotal
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_random)
if auto_return:
assert ntotal == op2.num_wide * 4
return nelements * ntotal, None, None
obj = op2.obj
assert obj is not None
if op2.use_vector and is_vectorized and op2.sort_method == 1:
n = nelements * 4 * op2.num_wide
itotal = obj.ielement
ielement2 = obj.itotal + nelements
itotal2 = ielement2
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, 3)
itime = obj.itime
obj._times[itime] = dt
self.obj_set_element(obj, itotal, itotal2, data, nelements)
#[max_strain, avg_strain, margin]
obj.data[itime, itotal:itotal2, :] = floats[:, 1:].copy()
obj.itotal = itotal2
obj.ielement = ielement2
else:
if is_vectorized and op2.use_vector: # pragma: no cover
op2.log.debug('vectorize CSHEAR random SORT%s' % op2.sort_method)
n = oes_cshear_random_3(op2, data, obj,
nelements, ntotal)
else: # pragma: no cover
raise RuntimeError(op2.code_information())
return n, nelements, ntotal
def _oes_cbar_34(self, data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix):
"""
reads stress/strain for element type:
- 34 : CBAR
"""
op2 = self.op2
#if isinstance(op2.nonlinear_factor, float):
#op2.sort_bits[0] = 1 # sort2
#op2.sort_method = 2
n = 0
if op2.is_stress:
result_name = prefix + 'cbar_stress' + postfix
else:
result_name = prefix + 'cbar_strain' + postfix
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
if result_type == 0 and op2.num_wide == 16: # real
if op2.is_stress:
obj_vector_real = RealBarStressArray
else:
obj_vector_real = RealBarStrainArray
ntotal = 64 * self.factor # 16*4
nelements = ndata // ntotal
#print('CBAR nelements =', nelements)
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_real)
if auto_return:
return ndata, None, None
if op2.is_debug_file:
op2.binary_debug.write(' [cap, element1, element2, ..., cap]\n')
#op2.binary_debug.write(' cap = %i # assume 1 cap when there could have been multiple\n' % ndata)
op2.binary_debug.write(' #elementi = [eid_device, s1a, s2a, s3a, s4a, axial, smaxa, smina, MSt,\n')
op2.binary_debug.write(' s1b, s2b, s3b, s4b, smaxb, sminb, MSc]\n')
op2.binary_debug.write(' nelements=%i; nnodes=1 # centroid\n' % nelements)
obj = op2.obj
if op2.use_vector and is_vectorized and op2.sort_method == 1:
# self.itime = 0
# self.ielement = 0
# self.itotal = 0
#self.ntimes = 0
#self.nelements = 0
n = nelements * op2.num_wide * 4
ielement = obj.ielement
ielement2 = ielement + nelements
obj._times[obj.itime] = dt
self.obj_set_element(obj, ielement, ielement2, data, nelements)
floats = frombuffer(data, dtype=op2.fdtype8).reshape(nelements, 16)
#[s1a, s2a, s3a, s4a, axial, smaxa, smina, margin_tension,
# s1b, s2b, s3b, s4b, smaxb, sminb, margin_compression]
obj.data[obj.itime, ielement:ielement2, :] = floats[:, 1:].copy()
obj.itotal = ielement2
obj.ielement = ielement2
else:
if is_vectorized and op2.use_vector: # pragma: no cover
op2.log.debug('vectorize CBAR real SORT%s' % op2.sort_method)
n = oes_cbar_real_16(op2, data, obj, nelements, ntotal, dt)
elif result_type == 1 and op2.num_wide == 19: # imag
if op2.is_stress:
obj_vector_complex = ComplexBarStressArray
else:
obj_vector_complex = ComplexBarStrainArray
ntotal = 76 * self.factor
nelements = ndata // ntotal
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_complex)
if auto_return:
return ndata, None, None
if op2.is_debug_file:
op2.binary_debug.write(' [cap, element1, element2, ..., cap]\n')
#op2.binary_debug.write(' cap = %i # assume 1 cap when there could have been multiple\n' % ndata)
op2.binary_debug.write(' #elementi = [eid_device, s1a, s2a, s3a, s4a, axial,\n')
op2.binary_debug.write(' s1b, s2b, s3b, s4b]\n')
op2.binary_debug.write(' nelements=%i; nnodes=1 # centroid\n' % nelements)
obj = op2.obj
if op2.use_vector and is_vectorized and op2.sort_method == 1:
n = nelements * ntotal
itotal = obj.itotal
itotal2 = itotal + nelements
ielement2 = itotal2
floats = frombuffer(data, dtype=op2.fdtype8).reshape(nelements, 19).copy()
obj._times[obj.itime] = dt
self.obj_set_element(obj, itotal, itotal2, data, nelements)
isave1 = [1, 2, 3, 4, 5, 11, 12, 13, 14]
isave2 = [6, 7, 8, 9, 10, 15, 16, 17, 18]
real_imag = apply_mag_phase(floats, is_magnitude_phase, isave1, isave2)
obj.data[obj.itime, itotal:itotal2, :] = real_imag
obj.itotal = itotal2
obj.ielement = ielement2
else:
if is_vectorized and op2.use_vector: # pragma: no cover
op2.log.debug('vectorize CBAR imag SORT%s' % op2.sort_method)
n = oes_cbar_complex_19(op2, data, obj, nelements, ntotal, is_magnitude_phase)
elif result_type == 2 and op2.num_wide == 19: # random strain?
raise RuntimeError(op2.code_information())
elif result_type in [1, 2] and op2.num_wide == 10: # random
# random stress/strain per example
#
# DMAP says random stress has num_wide=10 and
# random strain has numwide=19, but it's wrong...maybe???
#
# format_code = 1 - NO/RMS (SORT1 regardless of whether this is a SORT2 table or not)
# format_code = 2 - ATO/PSD/CRM (actually SORT2)
#
element_id = op2.nonlinear_factor
if op2.is_stress:
obj_vector_random = RandomBarStressArray
else:
obj_vector_random = RandomBarStrainArray
op2.data_code['nonlinear_factor'] = element_id
ntotal = 10 * self.size
nelements = ndata // ntotal
#print(f'CBAR* nelements={nelements}')
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_random)
if auto_return:
return ndata, None, None
if op2.is_debug_file:
op2.binary_debug.write(' [cap, element1, element2, ..., cap]\n')
#op2.binary_debug.write(' cap = %i # assume 1 cap when there could have been multiple\n' % ndata)
op2.binary_debug.write(' #elementi = [eid_device, s1a, s2a, s3a, s4a, axial,\n')
op2.binary_debug.write(' s1b, s2b, s3b, s4b]\n')
op2.binary_debug.write(' nelements=%i; nnodes=1 # centroid\n' % nelements)
obj = op2.obj
if op2.use_vector and is_vectorized and 0: # pragma: no cover
# self.itime = 0
# self.ielement = 0
# self.itotal = 0
#self.ntimes = 0
#self.nelements = 0
n = nelements * ntotal
ielement = obj.ielement
ielement2 = ielement + nelements
obj._times[obj.itime] = dt
self.obj_set_element(obj, itotal, itotal2, data, nelements)
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, 10)
#[s1a, s2a, s3a, s4a, axial,
# s1b, s2b, s3b, s4b]
obj.data[obj.itime, ielement:ielement2, :] = floats[:, 1:].copy()
obj.itotal = ielement2
obj.ielement = ielement2
else:
if is_vectorized and op2.use_vector and obj.itime == 0: # pragma: no cover
op2.log.debug('vectorize CBAR random SORT%s' % op2.sort_method)
n = oes_cbar_random_10(op2, data, obj,
nelements, ntotal)
else: # pragma: no cover
raise RuntimeError(op2.code_information())
return n, nelements, ntotal
def _oes_csolid(self, data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix):
"""
reads stress/strain for element type:
- 39 : CTETRA
- 67 : CHEXA
- 68 : CPENTA
"""
op2 = self.op2
n = 0
etype_map = {
#element_type : (element_base, nnodes_expected, element_name)
39 : ('ctetra', 5, 'CTETRA4'),
67 : ('chexa', 9, 'CHEXA8'),
68 : ('cpenta', 7, 'CPENTA6'),
255 : ('cpyram', 6, 'CPYRAM5'),
}
element_base, nnodes_expected, element_name = etype_map[op2.element_type]
if op2.is_stress:
stress_strain = 'stress'
obj_vector_real = RealSolidStressArray
obj_vector_complex = ComplexSolidStressArray
obj_vector_random = RandomSolidStressArray
else:
stress_strain = 'strain'
obj_vector_real = RealSolidStrainArray
obj_vector_complex = ComplexSolidStrainArray
obj_vector_random = RandomSolidStrainArray
if prefix == '' and postfix == '':
prefix = stress_strain + '.'
# stress.chexa_stress
result_name = prefix + f'{element_base}_{stress_strain}' + postfix
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
numwide_real = 4 + 21 * nnodes_expected
numwide_imag = 4 + (17 - 4) * nnodes_expected
numwide_random = 4 + (11 - 4) * nnodes_expected
numwide_random2 = 18 + 14 * (nnodes_expected - 1)
preline1 = '%s-%s' % (op2.element_name, op2.element_type)
preline2 = ' ' * len(preline1)
#print('numwide real=%s imag=%s random=%s' % (numwide_real, numwide_imag, numwide_random2))
op2._data_factor = nnodes_expected
log = op2.log
if op2.format_code == 1 and op2.num_wide == numwide_real: # real
ntotal = (16 + 84 * nnodes_expected) * self.factor
nelements = ndata // ntotal
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_real)
if auto_return:
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and op2.sort_method == 1:
n = nelements * ntotal
itotal = obj.ielement
itotali = obj.itotal + nelements
itotal2 = obj.itotal + nelements * nnodes_expected
obj._times[obj.itime] = dt
if obj.itime == 0:
# (eid_device, cid, abcd, nnodes)
ints = frombuffer(data, dtype=op2.idtype8).copy()
try:
ints1 = ints.reshape(nelements, numwide_real)
except ValueError:
msg = 'ints.shape=%s; size=%s ' % (str(ints.shape), ints.size)
msg += 'nelements=%s numwide_real=%s nelements*numwide=%s' % (
nelements, numwide_real, nelements * numwide_real)
raise ValueError(msg)
eids = ints1[:, 0] // 10
cids = ints1[:, 1]
#nids = ints1[:, 4]
assert eids.min() > 0, eids.min()
obj.element_node[itotal:itotal2, 0] = repeat(eids, nnodes_expected)
ints2 = ints1[:, 4:].reshape(nelements * nnodes_expected, 21)
grid_device = ints2[:, 0]#.reshape(nelements, nnodes_expected)
#print('%s-grid_device=%s' % (op2.element_name, grid_device))
unused_grid_device2 = repeat(grid_device, nnodes_expected)
try:
obj.element_node[itotal:itotal2, 1] = grid_device
except ValueError:
msg = '%s; nnodes=%s\n' % (op2.element_name, nnodes_expected)
msg += 'itotal=%s itotal2=%s\n' % (itotal, itotal2)
msg += 'grid_device.shape=%s; size=%s\n' % (str(grid_device.shape), grid_device.size)
#msg += 'nids=%s' % nids
raise ValueError(msg)
#log.debug(f'cids = {np.unique(cids)}')
obj.element_cid[itotal:itotali, 0] = eids
obj.element_cid[itotal:itotali, 1] = cids
floats = frombuffer(data, dtype=op2.fdtype8).reshape(nelements, numwide_real)[:, 4:]
# 1 9 15 2 10 16 3 11 17 8
#[oxx, oyy, ozz, txy, tyz, txz, o1, o2, o3, ovm]
#isave = [1, 9, 15, 2, 10, 16, 3, 11, 17, 8]
#(grid_device,
#sxx, sxy, s1, a1, a2, a3, pressure, svm,
#syy, syz, s2, b1, b2, b3,
#szz, sxz, s3, c1, c2, c3)
floats1 = floats.reshape(nelements * nnodes_expected, 21)#[:, 1:] # drop grid_device
# o1/o2/o3 is not max/mid/min. They are not consistently ordered, so we force it.
max_mid_min = np.vstack([
floats1[:, 3],
floats1[:, 11],
floats1[:, 17],
]).T
max_mid_min.sort(axis=1)
assert max_mid_min.shape == (nelements * nnodes_expected, 3), max_mid_min.shape
obj.data[obj.itime, itotal:itotal2, 6:9] = max_mid_min[:, [2, 1, 0]]
#obj.data[obj.itime, itotal:itotal2, :] = floats1[:, isave]
obj.data[obj.itime, itotal:itotal2, :6] = floats1[:, [1, 9, 15, 2, 10, 16]]
obj.data[obj.itime, itotal:itotal2, 9] = floats1[:, 8]
obj.itotal = itotal2
obj.ielement = itotali
else:
if is_vectorized and op2.use_vector: # pragma: no cover
log.debug(f'vectorize CSolid real SORT{op2.sort_method}')
n = oes_csolid_real(op2, data, obj,
nelements, dt,
element_name, nnodes_expected,
preline1, preline2)
elif op2.format_code in [2, 3] and op2.num_wide == numwide_imag: # complex
ntotal = numwide_imag * 4 * self.factor
nelements = ndata // ntotal
self.ntotal += nelements * nnodes_expected
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_complex)
if auto_return:
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and op2.sort_method == 1:
n = nelements * ntotal
ielement = obj.ielement
ielement2 = ielement + nelements
itotal = obj.itotal
itotal2 = itotal + nelements * nnodes_expected
floats = frombuffer(data, dtype=op2.fdtype8).reshape(nelements, numwide_imag)
floats1 = floats[:, 4:].reshape(nelements * nnodes_expected, 13).copy()
obj._times[obj.itime] = dt
if obj.itime == 0:
ints = frombuffer(data, dtype=op2.idtype8).reshape(nelements, numwide_imag)
ints1 = ints[:, 4:].reshape(nelements * nnodes_expected, 13)
eids = ints[:, 0] // 10
cids = ints[:, 1]
nids = ints1[:, 0]
# TODO: ctype, nodef not considered
assert eids.min() > 0, eids.min()
assert nids.min() >= 0, nids.min()
eids2 = np.vstack([eids] * nnodes_expected).T.ravel()
#nids2 = np.vstack([nids] * nnodes_expected).T.ravel()
#print(nids2)
obj.element_node[itotal:itotal2, 0] = eids2
obj.element_node[itotal:itotal2, 1] = nids
obj.element_cid[ielement:ielement2, 0] = eids
obj.element_cid[ielement:ielement2, 1] = cids
# 0 is nid
isave1 = [1, 2, 3, 4, 5, 6]
isave2 = [7, 8, 9, 10, 11, 12]
real_imag = apply_mag_phase(floats1, is_magnitude_phase, isave1, isave2)
obj.data[obj.itime, itotal:itotal2, :] = real_imag
obj.itotal = itotal2
obj.ielement = ielement2
else:
if is_vectorized and op2.use_vector: # pragma: no cover
log.debug(f'vectorize CSolid imag SORT{op2.sort_method}')
n = oes_csolid_complex(op2, data, obj,
nelements, # nnodes,
element_name, nnodes_expected,
is_magnitude_phase)
elif op2.format_code == 1 and op2.num_wide == numwide_random: # random
if not op2.is_sort1:
log.debug(f'support CSolid random SORT{op2.sort_method}')
return ndata, None, None
ntotal = numwide_random * 4
nelements = ndata // ntotal
assert ndata % ntotal == 0, ndata
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_random)
if auto_return:
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and 0: # pragma: no cover
n = nelements * 4 * op2.num_wide
itotal = obj.ielement
itotali = obj.itotal + nelements
itotal2 = obj.itotal + nelements * nnodes_expected
obj._times[obj.itime] = dt
if obj.itime == 0:
# (eid_device, cid, abcd, nnodes)
ints = frombuffer(data, dtype=op2.idtype).copy()
try:
ints1 = ints.reshape(nelements, numwide_real)
except ValueError:
msg = 'ints.shape=%s; size=%s ' % (str(ints.shape), ints.size)
msg += 'nelements=%s numwide_real=%s nelements*numwide=%s' % (
nelements, numwide_real, nelements * numwide_real)
raise ValueError(msg)
eids = ints1[:, 0] // 10
cids = ints1[:, 1]
#nids = ints1[:, 4]
assert eids.min() > 0, eids.min()
obj.element_node[itotal:itotal2, 0] = repeat(eids, nnodes_expected)
ints2 = ints1[:, 4:].reshape(nelements * nnodes_expected, 21)
grid_device = ints2[:, 0]#.reshape(nelements, nnodes_expected)
#print('%s-grid_device=%s' % (op2.element_name, grid_device))
unused_grid_device2 = repeat(grid_device, nnodes_expected)
try:
obj.element_node[itotal:itotal2, 1] = grid_device
except ValueError:
msg = '%s; nnodes=%s\n' % (op2.element_name, nnodes_expected)
msg += 'itotal=%s itotal2=%s\n' % (itotal, itotal2)
msg += 'grid_device.shape=%s; size=%s\n' % (str(grid_device.shape), grid_device.size)
#msg += 'nids=%s' % nids
raise ValueError(msg)
obj.element_cid[itotal:itotali, 0] = eids
obj.element_cid[itotal:itotali, 1] = cids
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, numwide_real)[:, 4:]
# 1 9 15 2 10 16 3 11 17 8
#[oxx, oyy, ozz, txy, tyz, txz, o1, o2, o3, ovm]
#isave = [1, 9, 15, 2, 10, 16, 3, 11, 17, 8]
#(grid_device,
#sxx, sxy, s1, a1, a2, a3, pressure, svm,
#syy, syz, s2, b1, b2, b3,
#szz, sxz, s3, c1, c2, c3)
floats1 = floats.reshape(nelements * nnodes_expected, 21)#[:, 1:] # drop grid_device
# o1/o2/o3 is not max/mid/min. They are not consistently ordered, so we force it.
max_mid_min = np.vstack([
floats1[:, 3],
floats1[:, 11],
floats1[:, 17],
]).T
max_mid_min.sort(axis=1)
assert max_mid_min.shape == (nelements * nnodes_expected, 3), max_mid_min.shape
obj.data[obj.itime, itotal:itotal2, 6:9] = max_mid_min[:, [2, 1, 0]]
#obj.data[obj.itime, itotal:itotal2, :] = floats1[:, isave]
obj.data[obj.itime, itotal:itotal2, :6] = floats1[:, [1, 9, 15, 2, 10, 16]]
obj.data[obj.itime, itotal:itotal2, 9] = floats1[:, 8]
obj.itotal = itotal2
obj.ielement = itotali
else:
if is_vectorized and op2.use_vector and obj.itime == 0: # pragma: no cover
log.debug(f'vectorize CSolid random SORT{op2.sort_method}')
n = oes_csolid_random(op2, data, obj, nelements,
element_name, nnodes_expected,
preline1, preline2)
elif op2.format_code in [2, 3] and op2.num_wide == numwide_random2:
#raise RuntimeError(op2.code_information())
## a = 18
## b = 14
## a + b * nnodes = numwide_random3
## a + b * 4 = 74 # CTETRA
## a + b * 6 = 102 # CPENTA
## a + b * 8 = 130 # CHEXA-67
#msg = 'OES-CHEXA-random-numwide=%s numwide_real=%s numwide_imag=%s numwide_random=%s' % (
#op2.num_wide, numwide_real, numwide_imag, numwide_random)
#return op2._not_implemented_or_skip(data, ndata, msg)
#print('numwide real=%s imag=%s random=%s' % (numwide_real, numwide_imag, numwide_random))
unused_num_wide_random = 4 + nnodes_expected * (17 - 4)
#print('random2=%s' % num_wide_random)
#print(op2.code_information())
#if op2.num_wide ==
if op2.read_mode == 1:
return ndata, None, None
return ndata, None, None
#print('numwide=%s numwide_random=%s attempt2=%s subcase=%s' % (
#op2.num_wide, numwide_random, num_wide_random, op2.isubcase))
##msg = op2.code_information()
#ntotal = 130
#nelements = ndata // ntotal
## cid, coord_type, nactive_pnts,
## nid, oxx, oyy, ozz, txy, tyz, txz
#struct1 = Struct(op2._endian + b'2i 4s')
#struct2 = Struct(op2._endian + b'i6f')
#for i in range(nelements):
#edata = data[n:n+12]
#out = struct1.unpack(edata)
#(eid_device, cid, abcd) = out
#eid, dt = get_eid_dt_from_eid_device(
#eid_device, op2.nonlinear_factor, op2.sort_method)
#if op2.is_debug_file:
#op2.binary_debug.write('%s - eid=%i; %s\n' % (preline1, eid, str(out)))
#n += 12
#for inode in range(nnodes_expected): # nodes pts, +1 for centroid (???)
#out = struct2.unpack(data[n:n + 28]) # 4*7 = 28
#if op2.is_debug_file:
#op2.binary_debug.write('%s - %s\n' % (preline2, str(out)))
#(grid_device, sxx, syy, sz, txy, tyz, txz) = out
#msg = 'OES-CHEXA-random-numwide=%s numwide_real=%s numwide_imag=%s numwide_random=%s' % (
#op2.num_wide, numwide_real, numwide_imag, numwide_random)
#return op2._not_implemented_or_skip(data, ndata, msg)
elif op2.format_code in [1, 2] and op2.num_wide == 67: # CHEXA
#44 = 5 * 8 + 4 (TETRA)
#52 = 6 * 8 + 4 (PYRAM)
#60 = 7 * 8 + 4 (PENTA)
#76 = 9 * 8 + 4 (HEXA)
msg = 'skipping random CHEXA; numwide=67'
#print(op2.code_information())
#asdf
n = op2._not_implemented_or_skip(data, ndata, msg)
nelements = None
ntotal = None
elif op2.format_code in [1, 2, 3] and op2.num_wide in [60] and op2.table_name in [b'OESXRMS1', b'OESXNO1']: # CPENTA
# bad
#if op2.read_mode == 2:
#ints = (68011, 0, 805.28, 6,
#0, 0, 0, 0, 0, 0, 0, 0,
#120000, 0, 0, 0, 0, 0, 0, 0,
#120001, 0, 0, 0, 0, 0, 0, 0,
#120010, 0, 0, 0, 0, 0, 0, 0,
#120100, 0, 0, 0, 0, 0, 0, 0,
#120101, 0, 0, 0, 0, 0, 0, 0,
#120110, 0, 0, 0, 0, 0, 0, 0,
#68111, 0, 1145655879, 15,
#0, 1080284864, 1080296481, 1080284990, 1080279426, 1080285072, 1080285570, 1080285750,
#130000, 1080285656, 1080285656, 1080285656, 1080285162, 1080287537, 1080285308, 1080285794,
#130002, 1080285656, 1080285656, 1080285656, 1080284551, 1080287537, 1080285308, 1080285794,
#130020, 1080285656, 1080285656, 1080285656, 1080289401, 1080287537, 1080285308, 1080285794,
#130200, 1080285656, 1080285656, 1080285656, 1080285269, 1080287537, 1080285308, 1080285794,
#130202, 1080288409, 1080287759, 1080323139, 1080285308, 1080285512, 1080285308, 1080285874,
#130220, 1080285333, 1080285373, 1080285450, 1080287537, 1080287537, 1080285625, 1080285771)
#floats = (68011, 0.0, 805.28, 6,
#0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
#1.681558157189705e-40, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
#1.681572170174437e-40, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
#1.681698287036213e-40, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
#1.682959455654153e-40, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
#1.682973468638786e-40, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
#1.683099585500578e-40, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
#9.544383970362762e-41, 0.0, 805.28, 2.1019476964872256e-44,
#0.0, 3.559982299804687, 3.562752008438110, 3.5600123405456, 3.558685779571533, 3.560031890869140, 3.560150623321533, 3.56019353866577,
#1.8216880036222622e-40, 3.560171127319336, 3.560171127319336, 3.5601711273193, 3.560053348541259, 3.560619592666626, 3.560088157653808, 3.56020402908325,
#1.8217160295915487e-40, 3.560171127319336, 3.560171127319336, 3.5601711273193, 3.559907674789428, 3.560619592666626, 3.560088157653808, 3.56020402908325,
#1.8219682633151272e-40, 3.560171127319336, 3.560171127319336, 3.5601711273193, 3.561064004898071, 3.560619592666626, 3.560088157653808, 3.56020402908325,
#1.8244906005509118e-40, 3.560171127319336, 3.560171127319336, 3.5601711273193, 3.560078859329223, 3.560619592666626, 3.560088157653808, 3.56020402908325,
#1.8245186265201983e-40, 3.560827493667602, 3.560672521591186, 3.5691077709198, 3.560088157653808, 3.560136795043945, 3.560088157653808, 3.56022310256958,
#1.8247708602437768e-40, 3.560094118118286, 3.56010365486145, 3.5601220130920, 3.560619592666626, 3.560619592666626, 3.560163736343384, 3.56019854545593)
#self.show_data(data, types='ifs')
#C:\MSC.Software\simcenter_nastran_2019.2\tpl_post2\tr1081x.op2
msg = 'skipping random CPENTA; numwide=60'
n = op2._not_implemented_or_skip(data, ndata, msg)
nelements = None
ntotal = None
elif op2.format_code in [1, 2, 3] and op2.num_wide in [76] and op2.table_name in [b'OESXRMS1', b'OESXNO1']: # CHEXA
# C:\MSC.Software\simcenter_nastran_2019.2\tpl_post2\tr1081x.op2
msg = 'skipping random CHEXA; numwide=76'
n = op2._not_implemented_or_skip(data, ndata, msg)
nelements = None
ntotal = None
#elif op2.format_code in [2, 3] and op2.num_wide == 76: # imag
# C:\MSC.Software\simcenter_nastran_2019.2\tpl_post2\tr1081x.op2
# analysis_code = 5 Frequency
# table_code = 905 OESXNO1-OESXNO1C - Cumulative Root Mean Square output
# format_code = 3 Magnitude/Phase
# result_type = 3 Magnitude/Phase
# sort_method = 1
# random_code = 0
# element_type = 67 CHEXA
#msg = op2.code_information()
#return op2._not_implemented_or_skip(data, ndata, msg), None, None
else: # pragma: no cover
raise RuntimeError(op2.code_information() +
'\nnumwide real=%s imag=%s random=%s' % (
numwide_real, numwide_imag, numwide_random2))
return n, nelements, ntotal
def _oes_csolid2(self, data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix):
"""
reads stress/strain for element type:
- 300 : CHEXA
- 301 : CPENTA
- 302 : CTETRA
- 303 : CPYRAM
"""
op2 = self.op2
n = 0
if op2.is_stress:
stress_strain = 'stress'
obj_real = RealSolidStressArrayNx
#obj_vector_complex = ComplexSolidStressArray
#obj_vector_random = RandomSolidStressArray
else:
obj_real = RealSolidStrainArrayNx
#obj_vector_complex = ComplexSolidStrainArray
#obj_vector_random = RandomSolidStrainArray
stress_strain = 'strain'
if prefix == '' and postfix == '':
prefix = stress_strain + '.'
etype_map = {
300 : ('chexa', 8, 'CHEXA8'),
301 : ('cpenta', 6, 'CPENTA6'),
302 : ('ctetra', 4, 'CTETRA4'),
303 : ('cpyram', 5, 'CPYRAM5'),
}
element_base, nnodes_expected, element_name = etype_map[op2.element_type]
# chexa_stress
result_name = prefix + f'{element_base}_{stress_strain}' + postfix
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
numwide_real = 3 + 8 * nnodes_expected
#numwide_imag = 4 + (17 - 4) * nnodes_expected
#numwide_random = 4 + (11 - 4) * nnodes_expected
#numwide_random2 = 18 + 14 * (nnodes_expected - 1)
#print('nnodes_expected =', nnodes_expected)
#print('numwide real=%s imag=%s random=%s' % (numwide_real, numwide_imag, numwide_random2))
op2._data_factor = nnodes_expected
if op2.format_code == 1 and op2.num_wide == numwide_real: # real
ntotal = (12 + 32 * nnodes_expected) * self.factor
nelements = ndata // ntotal
#auto_return, is_vectorized = op2._create_oes_object4(
#nelements, result_name, slot, obj_vector_real)
#auto_return = op2.read_mode == 1
#is_vectorized = False
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_real)
if auto_return:
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and op2.sort_method == 1: # pragma: no cover
n = nelements * ntotal
itotal = obj.ielement
itotali = obj.itotal + nelements
itotal2 = obj.itotal + nelements * nnodes_expected
obj._times[obj.itime] = dt
if obj.itime == 0:
# (eid_device, cid, abcd, nnodes)
ints = frombuffer(data, dtype=op2.idtype8).copy()
try:
ints1 = ints.reshape(nelements, numwide_real)
except ValueError:
msg = 'ints.shape=%s; size=%s ' % (str(ints.shape), ints.size)
msg += 'nelements=%s numwide_real=%s nelements*numwide=%s' % (
nelements, numwide_real, nelements * numwide_real)
raise ValueError(msg)
eids = ints1[:, 0] // 10
cids = ints1[:, 1]
#nids = ints1[:, 4]
assert eids.min() > 0, eids.min()
obj.element_node[itotal:itotal2, 0] = repeat(eids, nnodes_expected)
ints2 = ints1[:, 3:].reshape(nelements * nnodes_expected, 8)
grid_device = ints2[:, 0]#.reshape(nelements, nnodes_expected)
#print('%s-grid_device=%s' % (op2.element_name, grid_device))
unused_grid_device2 = repeat(grid_device, nnodes_expected)
try:
obj.element_node[itotal:itotal2, 1] = grid_device
except ValueError:
msg = '%s; nnodes=%s\n' % (op2.element_name, nnodes_expected)
msg += 'itotal=%s itotal2=%s\n' % (itotal, itotal2)
msg += 'grid_device.shape=%s; size=%s\n' % (str(grid_device.shape), grid_device.size)
#msg += 'nids=%s' % nids
raise ValueError(msg)
obj.element_cid[itotal:itotali, 0] = eids
obj.element_cid[itotal:itotali, 1] = cids
floats = frombuffer(data, dtype=op2.fdtype8).reshape(nelements, numwide_real)[:, 3:]
# 1 2 3 4 5 6 7 - verify...
#[oxx, oyy, ozz, txy, tyz, txz, ovm]
#isave = [1, 9, 15, 2, 10, 16, 3, 11, 17, 8]
#(grid_device,
#sxx, sxy, s1, a1, a2, a3, pressure, svm,
#syy, syz, s2, b1, b2, b3,
#szz, sxz, s3, c1, c2, c3)
floats1 = floats.reshape(nelements * nnodes_expected, 8)[:, 1:] # drop grid_device
# o1/o2/o3 is not max/mid/min. They are not consistently ordered, so we force it.
obj.data[obj.itime, itotal:itotal2, :] = floats1
obj.itotal = itotal2
obj.ielement = itotali
else:
n = _oes_csolid2_real(op2, data, n,
obj,
nnodes_expected,
nelements,
element_name,
stress_strain=stress_strain)
else: # pragma: no cover
raise NotImplementedError(op2.code_information())
assert isinstance(n, int), n
assert isinstance(ntotal, int), ntotal
assert isinstance(nelements, int), nelements
#assert n == ntotal * nelements, f'n={n} ntotal={ntotal*nelements}'
return n, nelements, ntotal
def _oes_csolid_composite(self, data, ndata, dt, is_magnitude_phase: bool,
result_type: int, prefix: str, postfix: str) -> int:
"""
306: Nonlinear composite HEXA element (CHEXALN)
307: Nonlinear composite PENTA element (CPENTALN)
reads stress/strain for element type:
- 306 : CHEXALN
- 307 : CPENTA
#- 302 : CTETRA
#- 303 : CPYRAM
"""
op2 = self.op2
n = 0
if op2.is_stress:
#obj_vector_real = RealSolidStressArray
#obj_vector_complex = ComplexSolidStressArray
#obj_vector_random = RandomSolidStressArray
word = 'stress'
if op2.element_type == 306: # CHEXALN
nedges = 4 # quad
nnodes_expected = 8
result_name = prefix + 'chexa_stress' + postfix
element_name = 'CHEXA8'
# real=67
elif op2.element_type == 307: # CPENTALN
nedges = 3 # tri
nnodes_expected = 6
result_name = prefix + 'cpenta_stress' + postfix
element_name = 'CPENTA6'
#elif op2.element_type == 302: # CTETRA
#nnodes_expected = 4
#result_name = prefix + 'ctetra_stress' + postfix
#element_name = 'CTETRA4'
#elif op2.element_type == 303: # CPYRAM
#nnodes_expected = 5
#result_name = prefix + 'cpyram_stress' + postfix
#element_name = 'CPYRAM5'
else: # pragma: no cover
raise RuntimeError(op2.code_information())
else:
#obj_vector_real = RealSolidStrainArray
#obj_vector_complex = ComplexSolidStrainArray
#obj_vector_random = RandomSolidStrainArray
word = 'strain'
if op2.element_type == 306: # CHEXALN
nedges = 4 # quad
nnodes_expected = 8
result_name = prefix + 'chexa_strain' + postfix
element_name = 'CHEXA8'
elif op2.element_type == 307: # CPENTA
nedges = 3 # tri
nnodes_expected = 6
result_name = prefix + 'cpenta_strain' + postfix
element_name = 'CPENTA6'
#elif op2.element_type == 302: # CTETRA
#nnodes_expected = 4
#result_name = prefix + 'ctetra_strain' + postfix
#element_name = 'CTETRA4'
#elif op2.element_type == 303: # CPYRAM
#nnodes_expected = 5
#result_name = prefix + 'cpyram_strain' + postfix
#element_name = 'CPYRAM5'
else: # pragma: no cover
raise NotImplementedError(op2.code_information())
#msg = 'sort1 Type=%s num=%s' % (op2.element_name, op2.element_type)
#return op2._not_implemented_or_skip(data, ndata, msg)
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
numwide_real = 3 + 8 * nedges # 3 + 8*4 = 35
numwide_imag = 3 + 7 * 14 # 3 + 7 * 14 = 101
#print(op2.code_information())
#print(f'{op2.element_name} numwide_real={numwide_real} numwide_imag={numwide_imag} -> {op2.num_wide}')
#numwide_real = 3 + 8 * nnodes_expected
#numwide_imag = 4 + (17 - 4) * nnodes_expected
#numwide_random = 4 + (11 - 4) * nnodes_expected
#numwide_random2 = 18 + 14 * (nnodes_expected - 1)
preline1 = '%s-%s' % (op2.element_name, op2.element_type)
preline2 = ' ' * len(preline1)
#print('nnodes_expected =', nnodes_expected)
#print('numwide real=%s imag=%s random=%s' % (numwide_real, numwide_imag, numwide_random2))
op2._data_factor = nedges
if result_type == 0 and op2.num_wide == numwide_real: # real
op2.log.warning(f'skipping {op2.table_name_str}: {op2.element_name}-{op2.element_type} {word} csolid composite')
ntotal = 12 + 32 * nedges
nelements = ndata // ntotal
#auto_return, is_vectorized = op2._create_oes_object4(
#nelements, result_name, slot, obj_vector_real)
auto_return = op2.read_mode == 1
is_vectorized = False
if auto_return:
assert ntotal == op2.num_wide * 4
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and op2.sort_method == 1 and 0: # pragma: no cover
n = nelements * 4 * op2.num_wide
itotal = obj.ielement
itotali = obj.itotal + nelements
itotal2 = obj.itotal + nelements * nnodes_expected
obj._times[obj.itime] = dt
if obj.itime == 0:
# (eid_device, cid, abcd, nnodes)
ints = frombuffer(data, dtype=op2.idtype).copy()
try:
ints1 = ints.reshape(nelements, numwide_real)
except ValueError:
msg = 'ints.shape=%s; size=%s ' % (str(ints.shape), ints.size)
msg += 'nelements=%s numwide_real=%s nelements*numwide=%s' % (
nelements, numwide_real, nelements * numwide_real)
raise ValueError(msg)
eids = ints1[:, 0] // 10
cids = ints1[:, 1]
#nids = ints1[:, 4]
assert eids.min() > 0, eids.min()
obj.element_node[itotal:itotal2, 0] = repeat(eids, nnodes_expected)
ints2 = ints1[:, 4:].reshape(nelements * nnodes_expected, 21)
grid_device = ints2[:, 0]#.reshape(nelements, nnodes_expected)
#print('%s-grid_device=%s' % (op2.element_name, grid_device))
unused_grid_device2 = repeat(grid_device, nnodes_expected)
try:
obj.element_node[itotal:itotal2, 1] = grid_device
except ValueError:
msg = '%s; nnodes=%s\n' % (op2.element_name, nnodes_expected)
msg += 'itotal=%s itotal2=%s\n' % (itotal, itotal2)
msg += 'grid_device.shape=%s; size=%s\n' % (str(grid_device.shape), grid_device.size)
#msg += 'nids=%s' % nids
raise ValueError(msg)
obj.element_cid[itotal:itotali, 0] = eids
obj.element_cid[itotal:itotali, 1] = cids
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, numwide_real)[:, 4:]
# 1 9 15 2 10 16 3 11 17 8
#[oxx, oyy, ozz, txy, tyz, txz, o1, o2, o3, ovm]
#isave = [1, 9, 15, 2, 10, 16, 3, 11, 17, 8]
#(grid_device,
#sxx, sxy, s1, a1, a2, a3, pressure, svm,
#syy, syz, s2, b1, b2, b3,
#szz, sxz, s3, c1, c2, c3)
floats1 = floats.reshape(nelements * nnodes_expected, 21)#[:, 1:] # drop grid_device
# o1/o2/o3 is not max/mid/min. They are not consistently ordered, so we force it.
max_mid_min = np.vstack([
floats1[:, 3],
floats1[:, 11],
floats1[:, 17],
]).T
max_mid_min.sort(axis=1)
assert max_mid_min.shape == (nelements * nnodes_expected, 3), max_mid_min.shape
obj.data[obj.itime, itotal:itotal2, 6:9] = max_mid_min[:, [2, 1, 0]]
#obj.data[obj.itime, itotal:itotal2, :] = floats1[:, isave]
obj.data[obj.itime, itotal:itotal2, :6] = floats1[:, [1, 9, 15, 2, 10, 16]]
obj.data[obj.itime, itotal:itotal2, 9] = floats1[:, 8]
obj.itotal = itotal2
obj.ielement = itotali
else:
#if is_vectorized and op2.use_vector: # pragma: no cover
#op2.log.debug('vectorize CSolid real SORT%s' % op2.sort_method)
n = oes_csolid_composite_real(op2, data, obj,
nelements, nedges,
element_name, preline1, preline2, dt)
elif result_type == 1 and op2.num_wide == numwide_imag: # complex
# 1 PLY I Lamina number
# 2 FLOC I Fiber location (BOT, MID, TOP)
#
# 3 GRID I Edge grid ID
# 4 EX1R RS Normal strain in the 1-direction
# 5 EY1R RS Normal strain in the 2-direction
# 6 EZ1R RS Normal strain in the 3-direction
# 7 ET1R RS Shear strain in the 12-plane
# 8 EL2R RS Shear strain in the 23-plane
# 9 EL1R RS Shear strain in the 13-plane
# 10 EX1I RS Normal strain in the 1-direction
# 11 EY1I RS Normal strain in the 2-direction
# 12 EZ1I RS Normal strain in the 3-direction
# 13 ET1I RS Shear strain in the 12-plane
# 14 EL2I RS Shear strain in the 23-plane
# 15 EL1I RS Shear strain in the 13-plane
# 16 ETMAX1 RS von Mises strain
# For each fiber location requested (PLSLOC), words 3 through 16 repeat 4 times.
if op2.read_mode == 1:
return ndata, None, None
self.show_data(data[n:n+4*op2.num_wide])
aaa
else: # pragma: no cover
raise NotImplementedError(op2.code_information())
assert n == ntotal * nelements, f'n={n} ntotal={ntotal*nelements}'
return n, nelements, ntotal
def _oes_csolid_linear_hyperelastic_cosine(self, data, ndata, dt, unused_is_magnitude_phase,
result_type, prefix, postfix):
"""
reads stress/strain for element type:
- 140 :CHEXAFD
"""
op2 = self.op2
n = 0
log = op2.log
if op2.is_stress:
#obj_vector_real = RealSolidStressArray
#obj_vector_complex = ComplexSolidStressArray
#obj_vector_random = RandomSolidStressArray
word = 'stress'
prefix = word + '.'
if op2.element_type == 140: # CHEXA
nnodes_expected = 8
result_name = prefix + 'chexa_stress' + postfix
#element_name = 'CHEXA8'
# real=122
#elif op2.element_type == 160: # CPENTA
#nnodes_expected = 6
#result_name = prefix + 'cpenta_stress' + postfix
#element_name = 'CPENTA6'
#elif op2.element_type == 165: # CPENTA
#nnodes_expected = 21
#result_name = prefix + 'cpenta_stress' + postfix
#element_name = 'CPENTA6'
#elif op2.element_type == 161: # CTETRA
#nnodes_expected = 1
#result_name = prefix + 'ctetra_stress' + postfix
#element_name = 'CTETRA4'
#elif op2.element_type == 166: # CTETRA
#nnodes_expected = 5
#result_name = prefix + 'ctetra_stress' + postfix
#element_name = 'CTETRA4'
#elif op2.element_type == 303: # CPYRAM
#nnodes_expected = 5
#result_name = prefix + 'cpyram_stress' + postfix
#element_name = 'CPYRAM5'
else: # pragma: no cover
raise RuntimeError(op2.code_information())
else:
#obj_vector_real = RealSolidStrainArray
#obj_vector_complex = ComplexSolidStrainArray
#obj_vector_random = RandomSolidStrainArray
word = 'strain'
#if op2.element_type == 202: # CHEXA
#nnodes_expected = 8
#result_name = prefix + 'chexa_strain' + postfix
#element_name = 'CHEXA8'
#elif op2.element_type == 301: # CPENTA
#nnodes_expected = 6
#result_name = prefix + 'cpenta_strain' + postfix
#element_name = 'CPENTA6'
#elif op2.element_type == 302: # CTETRA
#nnodes_expected = 4
#result_name = prefix + 'ctetra_strain' + postfix
#element_name = 'CTETRA4'
#elif op2.element_type == 303: # CPYRAM
#nnodes_expected = 5
#result_name = prefix + 'cpyram_strain' + postfix
#element_name = 'CPYRAM5'
#else: # pragma: no cover
raise RuntimeError(op2.code_information())
#msg = 'sort1 Type=%s num=%s' % (op2.element_name, op2.element_type)
#return op2._not_implemented_or_skip(data, ndata, msg)
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
numwide_real = 2 + 20 * nnodes_expected
#numwide_real = 162 # CHEXA
#print(op2.num_wide, numwide_real)
assert numwide_real == op2.num_wide, numwide_real
#numwide_imag = 4 + (17 - 4) * nnodes_expected
#numwide_random = 4 + (11 - 4) * nnodes_expected
#numwide_random2 = 18 + 14 * (nnodes_expected - 1)
preline1 = '%s-%s' % (op2.element_name, op2.element_type)
preline2 = ' ' * len(preline1)
#print('nnodes_expected =', nnodes_expected)
#print('numwide real=%s imag=%s random=%s' % (numwide_real, numwide_imag, numwide_random2))
op2._data_factor = nnodes_expected
if op2.format_code == 1 and op2.num_wide == numwide_real: # real
ntotal = 8 + 80 * nnodes_expected
#ntotal = numwide_real * 4
nelements = ndata // ntotal
assert ndata % ntotal == 0
#auto_return, is_vectorized = op2._create_oes_object4(
#nelements, result_name, slot, obj_vector_real)
auto_return = op2.read_mode == 1
is_vectorized = False
if auto_return:
assert ntotal == op2.num_wide * 4
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and op2.sort_method == 1 and 0: # pragma: no cover
n = nelements * 4 * op2.num_wide
itotal = obj.ielement
itotali = obj.itotal + nelements
itotal2 = obj.itotal + nelements * nnodes_expected
obj._times[obj.itime] = dt
if obj.itime == 0:
# (eid_device, cid, abcd, nnodes)
ints = frombuffer(data, dtype=op2.idtype).copy()
try:
ints1 = ints.reshape(nelements, numwide_real)
except ValueError:
msg = 'ints.shape=%s; size=%s ' % (str(ints.shape), ints.size)
msg += 'nelements=%s numwide_real=%s nelements*numwide=%s' % (
nelements, numwide_real, nelements * numwide_real)
raise ValueError(msg)
eids = ints1[:, 0] // 10
cids = ints1[:, 1]
#nids = ints1[:, 4]
assert eids.min() > 0, eids.min()
obj.element_node[itotal:itotal2, 0] = repeat(eids, nnodes_expected)
ints2 = ints1[:, 4:].reshape(nelements * nnodes_expected, 21)
grid_device = ints2[:, 0]#.reshape(nelements, nnodes_expected)
#print('%s-grid_device=%s' % (op2.element_name, grid_device))
unused_grid_device2 = repeat(grid_device, nnodes_expected)
try:
obj.element_node[itotal:itotal2, 1] = grid_device
except ValueError:
msg = '%s; nnodes=%s\n' % (op2.element_name, nnodes_expected)
msg += 'itotal=%s itotal2=%s\n' % (itotal, itotal2)
msg += 'grid_device.shape=%s; size=%s\n' % (str(grid_device.shape), grid_device.size)
#msg += 'nids=%s' % nids
raise ValueError(msg)
obj.element_cid[itotal:itotali, 0] = eids
obj.element_cid[itotal:itotali, 1] = cids
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, numwide_real)[:, 4:]
# 1 9 15 2 10 16 3 11 17 8
#[oxx, oyy, ozz, txy, tyz, txz, o1, o2, o3, ovm]
#isave = [1, 9, 15, 2, 10, 16, 3, 11, 17, 8]
#(grid_device,
#sxx, sxy, s1, a1, a2, a3, pressure, svm,
#syy, syz, s2, b1, b2, b3,
#szz, sxz, s3, c1, c2, c3)
floats1 = floats.reshape(nelements * nnodes_expected, 21)#[:, 1:] # drop grid_device
# o1/o2/o3 is not max/mid/min. They are not consistently ordered, so we force it.
max_mid_min = np.vstack([
floats1[:, 3],
floats1[:, 11],
floats1[:, 17],
]).T
max_mid_min.sort(axis=1)
assert max_mid_min.shape == (nelements * nnodes_expected, 3), max_mid_min.shape
obj.data[obj.itime, itotal:itotal2, 6:9] = max_mid_min[:, [2, 1, 0]]
#obj.data[obj.itime, itotal:itotal2, :] = floats1[:, isave]
obj.data[obj.itime, itotal:itotal2, :6] = floats1[:, [1, 9, 15, 2, 10, 16]]
obj.data[obj.itime, itotal:itotal2, 9] = floats1[:, 8]
obj.itotal = itotal2
obj.ielement = itotali
else:
#if is_vectorized and op2.use_vector: # pragma: no cover
#op2.log.debug('vectorize CSolid real SORT%s' % op2.sort_method)
n = oes_csolid_linear_hyperelastic_cosine_real(
op2, data,
nelements, nnodes_expected,
preline1, preline2)
log.warning(f'skipping {op2.table_name_str}: {op2.element_name}-{op2.element_type} linear hyperelastic cosine {word}')
return n, None, None
else: # pragma: no cover
raise RuntimeError(op2.code_information())
assert n == ntotal * nelements, f'n={n} ntotal={ntotal*nelements}'
return n, nelements, ntotal
def _oes_csolid_linear_hyperelastic(self, data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix):
"""
reads stress/strain for element type:
- 160 :CPENTAFD
- 163 : CHEXAFD
- 166 : CTETRAFD
# centroid??
- 161 : CTETRAFD
# more nodes???
- 165 : CPENTAFD
"""
op2 = self.op2
n = 0
if op2.is_stress:
#obj_vector_real = RealSolidStressArray
#obj_vector_complex = ComplexSolidStressArray
#obj_vector_random = RandomSolidStressArray
word = 'stress'
if op2.element_type == 163: # CHEXA
nnodes_expected = 27
result_name = prefix + 'chexa_stress' + postfix
#element_name = 'CHEXA8'
# real=122
elif op2.element_type == 160: # CPENTA
nnodes_expected = 6
result_name = prefix + 'cpenta_stress' + postfix
#element_name = 'CPENTA6'
elif op2.element_type == 165: # CPENTA
nnodes_expected = 21
result_name = prefix + 'cpenta_stress' + postfix
#element_name = 'CPENTA6'
elif op2.element_type == 161: # CTETRA
nnodes_expected = 1
result_name = prefix + 'ctetra_stress' + postfix
#element_name = 'CTETRA4'
elif op2.element_type == 166: # CTETRA
nnodes_expected = 5
result_name = prefix + 'ctetra_stress' + postfix
#element_name = 'CTETRA4'
#elif op2.element_type == 303: # CPYRAM
#nnodes_expected = 5
#result_name = prefix + 'cpyram_stress' + postfix
#element_name = 'CPYRAM5'
else: # pragma: no cover
raise RuntimeError(op2.code_information())
else:
#obj_vector_real = RealSolidStrainArray
#obj_vector_complex = ComplexSolidStrainArray
#obj_vector_random = RandomSolidStrainArray
word = 'strain'
#if op2.element_type == 202: # CHEXA
#nnodes_expected = 8
#result_name = prefix + 'chexa_strain' + postfix
#element_name = 'CHEXA8'
#elif op2.element_type == 301: # CPENTA
#nnodes_expected = 6
#result_name = prefix + 'cpenta_strain' + postfix
#element_name = 'CPENTA6'
#elif op2.element_type == 302: # CTETRA
#nnodes_expected = 4
#result_name = prefix + 'ctetra_strain' + postfix
#element_name = 'CTETRA4'
#elif op2.element_type == 303: # CPYRAM
#nnodes_expected = 5
#result_name = prefix + 'cpyram_strain' + postfix
#element_name = 'CPYRAM5'
#else: # pragma: no cover
raise RuntimeError(op2.code_information())
#msg = 'sort1 Type=%s num=%s' % (op2.element_name, op2.element_type)
#return op2._not_implemented_or_skip(data, ndata, msg)
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
numwide_real = 2 + 20 * nnodes_expected
#numwide_real = 122 # CHEXA
#print(op2.num_wide, numwide_real)
assert numwide_real == op2.num_wide, numwide_real
numwide_imag = 4 + (17 - 4) * nnodes_expected
#numwide_random = 4 + (11 - 4) * nnodes_expected
numwide_random2 = 18 + 14 * (nnodes_expected - 1)
preline1 = '%s-%s' % (op2.element_name, op2.element_type)
preline2 = ' ' * len(preline1)
#print('nnodes_expected =', nnodes_expected)
#print('numwide real=%s imag=%s random=%s' % (numwide_real, numwide_imag, numwide_random2))
op2._data_factor = nnodes_expected
if op2.format_code == 1 and op2.num_wide == numwide_real: # real
ntotal = 8 + 80 * nnodes_expected
#ntotal = numwide_real * 4
nelements = ndata // ntotal
assert ndata % ntotal == 0
#auto_return, is_vectorized = op2._create_oes_object4(
#nelements, result_name, slot, obj_vector_real)
auto_return = op2.read_mode == 1
is_vectorized = False
if auto_return:
assert ntotal == op2.num_wide * 4
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and op2.sort_method == 1 and 0: # pragma: no cover
n = nelements * 4 * op2.num_wide
itotal = obj.ielement
itotali = obj.itotal + nelements
itotal2 = obj.itotal + nelements * nnodes_expected
obj._times[obj.itime] = dt
if obj.itime == 0:
# (eid_device, cid, abcd, nnodes)
ints = frombuffer(data, dtype=op2.idtype).copy()
try:
ints1 = ints.reshape(nelements, numwide_real)
except ValueError:
msg = 'ints.shape=%s; size=%s ' % (str(ints.shape), ints.size)
msg += 'nelements=%s numwide_real=%s nelements*numwide=%s' % (
nelements, numwide_real, nelements * numwide_real)
raise ValueError(msg)
eids = ints1[:, 0] // 10
cids = ints1[:, 1]
#nids = ints1[:, 4]
assert eids.min() > 0, eids.min()
obj.element_node[itotal:itotal2, 0] = repeat(eids, nnodes_expected)
ints2 = ints1[:, 4:].reshape(nelements * nnodes_expected, 21)
grid_device = ints2[:, 0]#.reshape(nelements, nnodes_expected)
#print('%s-grid_device=%s' % (op2.element_name, grid_device))
unused_grid_device2 = repeat(grid_device, nnodes_expected)
try:
obj.element_node[itotal:itotal2, 1] = grid_device
except ValueError:
msg = '%s; nnodes=%s\n' % (op2.element_name, nnodes_expected)
msg += 'itotal=%s itotal2=%s\n' % (itotal, itotal2)
msg += 'grid_device.shape=%s; size=%s\n' % (str(grid_device.shape), grid_device.size)
#msg += 'nids=%s' % nids
raise ValueError(msg)
obj.element_cid[itotal:itotali, 0] = eids
obj.element_cid[itotal:itotali, 1] = cids
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, numwide_real)[:, 4:]
# 1 9 15 2 10 16 3 11 17 8
#[oxx, oyy, ozz, txy, tyz, txz, o1, o2, o3, ovm]
#isave = [1, 9, 15, 2, 10, 16, 3, 11, 17, 8]
#(grid_device,
#sxx, sxy, s1, a1, a2, a3, pressure, svm,
#syy, syz, s2, b1, b2, b3,
#szz, sxz, s3, c1, c2, c3)
floats1 = floats.reshape(nelements * nnodes_expected, 21)#[:, 1:] # drop grid_device
# o1/o2/o3 is not max/mid/min. They are not consistently ordered, so we force it.
max_mid_min = np.vstack([
floats1[:, 3],
floats1[:, 11],
floats1[:, 17],
]).T
max_mid_min.sort(axis=1)
assert max_mid_min.shape == (nelements * nnodes_expected, 3), max_mid_min.shape
obj.data[obj.itime, itotal:itotal2, 6:9] = max_mid_min[:, [2, 1, 0]]
#obj.data[obj.itime, itotal:itotal2, :] = floats1[:, isave]
obj.data[obj.itime, itotal:itotal2, :6] = floats1[:, [1, 9, 15, 2, 10, 16]]
obj.data[obj.itime, itotal:itotal2, 9] = floats1[:, 8]
obj.itotal = itotal2
obj.ielement = itotali
else:
#if is_vectorized and op2.use_vector: # pragma: no cover
#op2.log.debug('vectorize CSolid real SORT%s' % op2.sort_method)
n = oes_csolid_linear_hyperelastic_real(op2, data, obj, nelements,
nnodes_expected,
preline1, preline2)
op2.log.warning(f'skipping {op2.table_name_str}: {op2.element_name}-{op2.element_type} linear hyperelastic {word}')
else: # pragma: no cover
raise RuntimeError(op2.code_information() +
'\nnumwide real=%s imag=%s random=%s' % (
numwide_real, numwide_imag, numwide_random2))
assert n == ntotal * nelements, f'n={n} ntotal={ntotal*nelements}'
return n, nelements, ntotal
def _oes_csolid_nonlinear_hyperelastic(self, data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix):
"""
reads stress/strain for element type:
- 202 : CHEXAFD
- 204 : PENTA6FD
' N O N L I N E A R S T R E S S E S I N H Y P E R E L A S T I C H E X A H E D R O N E L E M E N T S ( HEXA8FD )'
' '
' ELEMENT GRID/ POINT CAUCHY STRESSES/ LOG STRAINS PRESSURE VOL. STRAIN'
' ID GAUSS ID X Y Z XY YZ ZX'
'0 401 GRID 401 1.9128E+03 6.2729E+02 -3.4828E+02 -7.5176E+01 7.8259E+00 -2.5001E+02 7.3060E+02 7.3060E-03'
' 6.8270E-01 -6.5437E-04 -1.2874E+00 -3.9645E-02 -2.9882E-03 -5.9975E-02'
' 402 1.1024E+03 1.0686E+03 2.0832E+01 -1.7936E+00 -2.3656E-01 -1.1467E+02 7.3060E+02 7.3060E-03'
' 6.8201E-01 6.4335E-01 -1.2964E+00 -2.7195E-03 -1.0809E-02 6.3608E-02'
' 1402 1.1024E+03 1.0686E+03 2.0832E+01 1.7936E+00 2.3656E-01 -1.1467E+02 7.3060E+02 7.3060E-03'
' 6.8201E-01 6.4335E-01 -1.2964E+00 -2.7195E-03 -1.0809E-02 6.3608E-02'
' 1401 1.9128E+03 6.2729E+02 -3.4828E+02 7.5176E+01 -7.8259E+00 -2.5001E+02 7.3060E+02 7.3060E-03'
' 6.8201E-01 6.4335E-01 -1.2964E+00 -2.7195E-03 -1.0809E-02 6.3608E-02'
' 501 1.9159E+03 6.2332E+02 -3.4744E+02 -7.5730E+01 7.9009E+00 -2.5075E+02 7.3060E+02 7.3060E-03'
' 6.8201E-01 6.4335E-01 -1.2964E+00 -2.7195E-03 -1.0809E-02 6.3608E-02'
' 502 1.1004E+03 1.0667E+03 2.4631E+01 -1.7898E+00 -2.2971E-01 -1.1434E+02 7.3060E+02 7.3060E-03'
' 6.8201E-01 6.4335E-01 -1.2964E+00 -2.7195E-03 -1.0809E-02 6.3608E-02'
' 1502 1.1004E+03 1.0667E+03 2.4631E+01 1.7898E+00 2.2971E-01 -1.1434E+02 7.3060E+02 7.3060E-03'
' 6.8201E-01 6.4335E-01 -1.2964E+00 -2.7195E-03 -1.0809E-02 6.3608E-02'
' 1501 1.9159E+03 6.2332E+02 -3.4744E+02 7.5730E+01 -7.9009E+00 -2.5075E+02 7.3060E+02 7.3060E-03'
' 6.8201E-01 6.4335E-01 -1.2964E+00 -2.7195E-03 -1.0809E-02 6.3608E-02'
# 216 TETRAFD
# 218 HEXAFD
# 220 (CPENTA)
# 221 (CTETRA)
ELEMENT GRID/ POINT STRESSES/ TOTAL STRAINS EQUIVALENT EFF. STRAIN EFF. CREEP
ID GAUSS ID X Y Z XY YZ ZX STRESS PLAS/NLELAS STRAIN
"""
op2 = self.op2
n = 0
if op2.is_stress:
#obj_vector_real = RealSolidStressArray
#obj_vector_complex = ComplexSolidStressArray
#obj_vector_random = RandomSolidStressArray
word = 'stress'
if op2.element_type in [202, 218]: # CHEXA
nnodes_expected = 8
result_name = prefix + 'chexa_stress_strain' + postfix
element_name = 'CHEXA8'
# real=122
elif op2.element_type in [204, 220]: # CPENTA
nnodes_expected = 6
result_name = prefix + 'cpenta_stress_strain' + postfix
element_name = 'CPENTA6'
elif op2.element_type in [216, 221]: # CTETRA
nnodes_expected = 4
result_name = prefix + 'ctetra_stress_strain' + postfix
element_name = 'CTETRA4'
#elif op2.element_type == 303: # CPYRAM
#nnodes_expected = 5
#result_name = prefix + 'cpyram_stress' + postfix
#element_name = 'CPYRAM5'
else: # pragma: no cover
raise RuntimeError(op2.code_information())
else:
#obj_vector_real = RealSolidStrainArray
#obj_vector_complex = ComplexSolidStrainArray
#obj_vector_random = RandomSolidStrainArray
word = 'strain'
#if op2.element_type == 202: # CHEXA
#nnodes_expected = 8
#result_name = prefix + 'chexa_strain' + postfix
#element_name = 'CHEXA8'
#elif op2.element_type == 301: # CPENTA
#nnodes_expected = 6
#result_name = prefix + 'cpenta_strain' + postfix
#element_name = 'CPENTA6'
#elif op2.element_type == 302: # CTETRA
#nnodes_expected = 4
#result_name = prefix + 'ctetra_strain' + postfix
#element_name = 'CTETRA4'
#elif op2.element_type == 303: # CPYRAM
#nnodes_expected = 5
#result_name = prefix + 'cpyram_strain' + postfix
#element_name = 'CPYRAM5'
#else: # pragma: no cover
raise RuntimeError(op2.code_information())
#msg = 'sort1 Type=%s num=%s' % (op2.element_name, op2.element_type)
#return op2._not_implemented_or_skip(data, ndata, msg)
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
numwide_real = 2 + 15 * nnodes_expected
#numwide_real = 122 # CHEXA
assert numwide_real == op2.num_wide, numwide_real
numwide_imag = 4 + (17 - 4) * nnodes_expected
#numwide_random = 4 + (11 - 4) * nnodes_expected
numwide_random2 = 18 + 14 * (nnodes_expected - 1)
preline1 = '%s-%s' % (op2.element_name, op2.element_type)
preline2 = ' ' * len(preline1)
#print('nnodes_expected =', nnodes_expected)
#print('numwide real=%s imag=%s random=%s' % (numwide_real, numwide_imag, numwide_random2))
op2._data_factor = nnodes_expected
if result_type == 0 and op2.num_wide == numwide_real: # real
ntotal = 8 + 60 * nnodes_expected
#ntotal = numwide_real * 4
nelements = ndata // ntotal
assert ndata % ntotal == 0
#auto_return, is_vectorized = op2._create_oes_object4(
#nelements, result_name, slot, obj_vector_real)
auto_return = op2.read_mode == 1
is_vectorized = False
if auto_return:
assert ntotal == op2.num_wide * 4
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and op2.sort_method == 1 and 0: # pragma: no cover
n = nelements * 4 * op2.num_wide
itotal = obj.ielement
itotali = obj.itotal + nelements
itotal2 = obj.itotal + nelements * nnodes_expected
obj._times[obj.itime] = dt
if obj.itime == 0:
# (eid_device, cid, abcd, nnodes)
ints = frombuffer(data, dtype=op2.idtype).copy()
try:
ints1 = ints.reshape(nelements, numwide_real)
except ValueError:
msg = 'ints.shape=%s; size=%s ' % (str(ints.shape), ints.size)
msg += 'nelements=%s numwide_real=%s nelements*numwide=%s' % (
nelements, numwide_real, nelements * numwide_real)
raise ValueError(msg)
eids = ints1[:, 0] // 10
cids = ints1[:, 1]
#nids = ints1[:, 4]
assert eids.min() > 0, eids.min()
obj.element_node[itotal:itotal2, 0] = repeat(eids, nnodes_expected)
ints2 = ints1[:, 4:].reshape(nelements * nnodes_expected, 21)
grid_device = ints2[:, 0]#.reshape(nelements, nnodes_expected)
#print('%s-grid_device=%s' % (op2.element_name, grid_device))
unused_grid_device2 = repeat(grid_device, nnodes_expected)
try:
obj.element_node[itotal:itotal2, 1] = grid_device
except ValueError:
msg = '%s; nnodes=%s\n' % (op2.element_name, nnodes_expected)
msg += 'itotal=%s itotal2=%s\n' % (itotal, itotal2)
msg += 'grid_device.shape=%s; size=%s\n' % (str(grid_device.shape), grid_device.size)
#msg += 'nids=%s' % nids
raise ValueError(msg)
obj.element_cid[itotal:itotali, 0] = eids
obj.element_cid[itotal:itotali, 1] = cids
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, numwide_real)[:, 4:]
# 1 9 15 2 10 16 3 11 17 8
#[oxx, oyy, ozz, txy, tyz, txz, o1, o2, o3, ovm]
#isave = [1, 9, 15, 2, 10, 16, 3, 11, 17, 8]
#(grid_device,
#sxx, sxy, s1, a1, a2, a3, pressure, svm,
#syy, syz, s2, b1, b2, b3,
#szz, sxz, s3, c1, c2, c3)
floats1 = floats.reshape(nelements * nnodes_expected, 21)#[:, 1:] # drop grid_device
# o1/o2/o3 is not max/mid/min. They are not consistently ordered, so we force it.
max_mid_min = np.vstack([
floats1[:, 3],
floats1[:, 11],
floats1[:, 17],
]).T
max_mid_min.sort(axis=1)
assert max_mid_min.shape == (nelements * nnodes_expected, 3), max_mid_min.shape
obj.data[obj.itime, itotal:itotal2, 6:9] = max_mid_min[:, [2, 1, 0]]
#obj.data[obj.itime, itotal:itotal2, :] = floats1[:, isave]
obj.data[obj.itime, itotal:itotal2, :6] = floats1[:, [1, 9, 15, 2, 10, 16]]
obj.data[obj.itime, itotal:itotal2, 9] = floats1[:, 8]
obj.itotal = itotal2
obj.ielement = itotali
else:
#if is_vectorized and op2.use_vector: # pragma: no cover
#op2.log.debug('vectorize CSolid real SORT%s' % op2.sort_method)
# 2 TYPE CHAR4 Grid or Gaus
#
# 3 ID I
# 4 SX RS
# 5 SY RS
# 6 SZ RS
# 7 SXY RS
# 8 SYZ RS
# 9 SZX RS
# 10 PRESSURE RS
# 11 VOLSTR RS
# 12 EX RS
# 13 EY RS
# 14 EZ RS
# 15 EXY RS
# 16 EYZ RS
# 17 EZX RS
# Words 3 through 17 repeat 008 times
struct1 = Struct(op2._endian + op2._analysis_code_fmt + b'4s')
struct2 = Struct(op2._endian + b'i14f')
if op2.is_debug_file:
msg = (
f'{op2.element_name}-{op2.element_type} nelements={nelements} '
f'nnodes={nnodes_expected}; '
'C=[sxx, syy, szz, txy, tyz, txz, pressure, '
'evol, exx, eyy, ezz, exy, eyz, exz]\n')
op2.binary_debug.write(msg)
for unused_i in range(nelements):
edata = data[n:n+8]
out = struct1.unpack(edata)
(eid_device, unused_abcd, ) = out
eid, dt = get_eid_dt_from_eid_device(
eid_device, op2.nonlinear_factor, op2.sort_method)
if op2.is_debug_file:
op2.binary_debug.write('%s - eid=%i; %s\n' % (preline1, eid, str(out)))
#assert nnodes < 21, 'print_block(data[n:n+16])' #self.print_block(data[n:n+16])
n += 8
for inode in range(nnodes_expected): # nodes pts, no centroid
out = struct2.unpack(data[n:n + 60]) # 4*15 = 60
if op2.is_debug_file:
op2.binary_debug.write('%s - %s\n' % (preline2, str(out)))
(grid_device, sxx, syy, szz, txy, tyz, txz, pressure,
evol, exx, eyy, ezz, exy, eyz, exz) = out
#print(out)
if op2.is_debug_file:
op2.binary_debug.write(' eid=%s inode=%i; C=[%s]\n' % (
eid, grid_device, ', '.join(['%r' % di for di in out])))
grid = grid_device
if 0: # pragma: no cover
if inode == 0:
# this is correct, but fails
#element_name = op2.element_name + str(nnodes)
obj.add_eid_sort1(element_name, cid, dt, eid, grid,
sxx, syy, szz, txy, tyz, txz, ovm)
else:
obj.add_node_sort1(dt, eid, inode, grid,
sxx, syy, szz, txy, tyz, txz, ovm)
n += 60
op2.log.warning(f'skipping {op2.table_name_str}: {op2.element_name}-{op2.element_type} nonlinear hyperelastic {word}')
return n, None, None
else: # pragma: no cover
raise RuntimeError(op2.code_information() +
'\nnumwide real=%s imag=%s random=%s' % (
numwide_real, numwide_imag, numwide_random2))
assert n == ntotal * nelements, f'n={n} ntotal={ntotal*nelements}'
return n, nelements, ntotal
def _oes_csolid_nonlinear(self, data, ndata, dt, unused_is_magnitude_phase,
result_type, prefix, postfix):
"""
reads stress/strain for element type:
- 85-TETRANL
- 91-PENTANL
- 93-HEXANL
- 256-PYRAMNL
2 CTYPE CHAR4
3 NODEF 1 Number of active GRID points
4 GRID I Grid / Gauss
5 SX RS Stress in x
6 SY RS Stress in y
7 SZ RS Stress in z
8 SXY RS Stress in xy
9 SYZ RS Stress in yz
10 SZX RS Stress in zx
11 SE RS Equivalent stress
12 EPS RS Effective plastic strain
13 ECS RS Effective creep strain
14 EX RS Strain in x
15 EY RS Strain in y
16 EZ RS Strain in z
17 EXY RS Strain in xy
18 EYZ RS Strain in yz
19 EZX RS Strain in zx
Words 3 through 19 repeat 005 times
"""
op2 = self.op2
#real
#85: 2 + (18 - 2) * 5, # Nonlinear CTETRA
#256: 4 + (18 - 2) * 6, # Nonlinear CHEXA -> ???
# random
#91: 4 + (25 - 4) * 7, # Nonlinear CPENTA
#93: 4 + (25 - 4) * 9, # Nonlinear CHEXA -> 584 (can cause a crash)
#256: 4 + (25 - 4) * 6, # Nonlinear CHEXA -> ???
# the nodes are nnodes + 1
if op2.element_type == 85:
etype = 'CTETRANL'
nnodes = 5
result_name = prefix + 'ctetra_stress_strain' + postfix
elif op2.element_type == 91:
etype = 'CPENTANL'
nnodes = 7
result_name = prefix + 'cpenta_stress_strain' + postfix
elif op2.element_type == 93:
etype = 'CHEXANL'
nnodes = 9
result_name = prefix + 'chexa_stress_strain' + postfix
elif op2.element_type == 256:
etype = 'CPYRAMNL'
nnodes = 6
result_name = prefix + 'chexa_stress_strain' + postfix
else: # pragma: no cover
raise RuntimeError(op2.code_information())
numwide_real = 4 + (25 - 4) * nnodes # real???
numwide_random = 2 + (18 - 2) * nnodes # imag???
#op2.log.debug("format_code=%s numwide=%s numwide_real=%s numwide_random=%s" % (
#op2.format_code, op2.num_wide, numwide_real, numwide_random))
#numwide_real = 0
#numwide_imag = 2 + 16 * nnodes
#ntotal = 8 + 64 * nnodes
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
if op2.format_code == 1 and op2.num_wide == numwide_real:
#if op2.read_mode == 1:
#return ndata, None, None
ntotal = numwide_real * 4 * self.factor
#if op2.is_stress:
#op2.create_transient_object(self.nonlinearPlateStress, NonlinearSolid)
#else:
#op2.create_transient_object(self.nonlinearPlateStrain, NonlinearSolid)
#self.handle_results_buffer(self.OES_CQUAD4NL_90, resultName, name)
raise RuntimeError('OES_CSOLIDNL_90')
elif op2.format_code == 1 and op2.num_wide == numwide_random: # random
# 82 : CTETRA_NL (etype=85)
# 146 : CHEXA_NL (etype=93)
#raise RuntimeError(op2.code_information())
#elif op2.format_code in [2, 3] and op2.num_wide == numwide_imag: # imag
ntotal = numwide_random * 4 * self.factor
nelements = ndata // ntotal
self.ntotal += nelements * nnodes
#print(op2.read_mode, RealNonlinearSolidArray)
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, RealNonlinearSolidArray)
if auto_return:
op2._data_factor = nnodes
return nelements * ntotal, None, None
n = 0
s1 = Struct(op2._endian + op2._analysis_code_fmt + b'4s')
s2 = Struct(op2._endian + b'i15f')
nelements = ndata // ntotal
obj = op2.obj
for unused_i in range(nelements): # 2+16*9 = 146 -> 146*4 = 584
edata = data[n:n+8]
n += 8
out = s1.unpack(edata)
if op2.is_debug_file:
op2.binary_debug.write('%s-%s - %s\n' % (etype, op2.element_type, str(out)))
(eid_device, unused_ctype) = out
eid, dt = get_eid_dt_from_eid_device(
eid_device, op2.nonlinear_factor, op2.sort_method)
#print('%s-%s -eid=%s dt=%s %s\n' % (etype, op2.element_type, eid, dt, str(out)))
for unused_j in range(nnodes):
edata = data[n:n+64]
n += 64
out = s2.unpack(edata)
if op2.is_debug_file:
op2.binary_debug.write('%s-%sB - %s\n' % (etype, op2.element_type, str(out)))
#print('%s-%sB - %s\n' % (etype, op2.element_type, str(out)))
assert len(out) == 16
(grid,
sx, sy, sz, sxy, syz, sxz, se, eps, ecs,
ex, ey, ez, exy, eyz, exz) = out
obj.add_sort1(dt, eid, grid,
sx, sy, sz, sxy, syz, sxz, se, eps, ecs,
ex, ey, ez, exy, eyz, exz)
else: # pragma: no cover
#msg = op2.code_information()
msg = "format_code=%s numwide=%s numwide_real=%s numwide_random=%s\n" % (
op2.format_code, op2.num_wide, numwide_real, numwide_random)
#return op2._not_implemented_or_skip(data, ndata, msg)
raise RuntimeError(msg + op2.code_information())
return n, nelements, ntotal
def _oes_cquad4_33(self, data, ndata: int, dt, is_magnitude_phase: bool,
result_type: int, prefix: str, postfix: str) -> Tuple[int, Any, Any]:
"""
reads stress/strain for element type:
- 33 : CQUAD4-centroidal
- 64 : CQUAD4-centroidal (nastran95)
- 228 : CQUADR-centroidal
"""
op2 = self.op2
n = 0
factor = self.factor
size = self.size
#print('_oes_cquad4_33')
if op2.is_stress:
obj_vector_real = RealPlateStressArray
obj_vector_complex = ComplexPlateStressArray
if op2.element_type == 33:
result_name = prefix + 'cquad4_stress' + postfix
elif op2.element_type == 228:
result_name = prefix + 'cquadr_stress' + postfix
assert op2.num_wide in [17, 15], op2.code_information()
elif op2._nastran_format == 'nasa95':
if op2.element_type == 19:
result_name = prefix + 'cquad1_stress' + postfix
elif op2.element_type == 64:
result_name = prefix + 'cquad4_stress' + postfix
else:
raise NotImplementedError(op2.code_information())
else:
raise NotImplementedError(op2.code_information())
else:
obj_vector_real = RealPlateStrainArray
obj_vector_complex = ComplexPlateStrainArray
if op2.element_type == 33:
result_name = prefix + 'cquad4_strain' + postfix
elif op2.element_type == 228:
result_name = prefix + 'cquadr_strain' + postfix
assert op2.num_wide in [17, 15], op2.code_information()
elif op2._nastran_format == 'nasa95':
if op2.element_type == 19:
result_name = prefix + 'cquad1_strain' + postfix
elif op2.element_type == 64:
result_name = prefix + 'cquad4_strain' + postfix
else:
raise NotImplementedError(op2.code_information())
else:
raise NotImplementedError(op2.code_information())
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
numwide_real = 17
sort_method = op2.sort_method
if result_type == 0 and op2.num_wide == 17: # real
ntotal = 68 * factor # 4*17
nelements = ndata // ntotal
nlayers = nelements * 2 # 2 layers per node
#op2.log.info(f'CQUAD4-33: len(data)={ndata} numwide={op2.num_wide} nelements={nelements} nlayers={nlayers}')
auto_return, is_vectorized = op2._create_oes_object4(
nlayers, result_name, slot, obj_vector_real)
if auto_return:
op2._data_factor = 2 # number of "layers" for an element
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and sort_method == 1:
nnodes_expected = 2
n = nelements * ntotal
ielement = obj.ielement
ielement2 = ielement + nelements
itotal = obj.itotal
itotal2 = itotal + nelements * nnodes_expected
obj._times[obj.itime] = dt
if obj.itime == 0:
ints = frombuffer(data, dtype=op2.idtype8)
ints1 = ints.reshape(nelements, numwide_real)
eids = ints1[:, 0] // 10
eids = np.vstack([eids, eids]).T.ravel()
assert eids.min() > 0, eids.min()
obj.element_node[itotal:itotal2, 0] = eids
floats = frombuffer(data, dtype=op2.fdtype8).reshape(nelements, numwide_real)[:, 1:]
#fd, sx, sy, txy, angle, major, minor, max_shear
floats1 = floats.reshape(nelements * nnodes_expected, 8)
obj.data[obj.itime, itotal:itotal2, :] = floats1.copy()
obj.itotal = itotal2
obj.ielement = ielement2
else:
if is_vectorized and op2.use_vector: # pragma: no cover
op2.log.debug(f'vectorize centroidal quad: {op2.element_name}-{op2.element_type} real '
f'SORT{sort_method}')
n = oes_quad4_33_real_17(op2, data, obj, ntotal, nelements, dt)
if op2.is_sort1:
assert obj.element_node[:, 0].min() > 0, obj.element_node[:, 0].shape
elif result_type == 1 and op2.num_wide == 15: # imag
#op2.to_nx(f' because CQUAD4-33 (numwide=15) was found')
#nnodes = 0 # centroid + 4 corner points
ntotal = op2.num_wide * size
#op2.log.info(f'CQUAD4-33: len(data)={ndata} numwide={op2.num_wide} nelements={nelements} nlayers={nlayers}')
nelements = ndata // ntotal
nlayers = nelements * 2
auto_return, is_vectorized = op2._create_oes_object4(
nlayers, result_name, slot, obj_vector_complex)
if auto_return:
op2._data_factor = 2
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and sort_method == 1:
n = nelements * ntotal
nnodes_all = 1
itotal = obj.itotal
itotal2 = itotal + 2 * nelements * nnodes_all
ielement = obj.ielement
ielement2 = ielement + nelements
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, 15 * nnodes_all)
floats1 = floats[:, 1:].reshape(nelements * nnodes_all * 2, 7).copy()
obj._times[obj.itime] = dt
if obj.itime == 0:
ints = frombuffer(data, dtype=op2.idtype).reshape(nelements, 15 * nnodes_all).copy()
eids = ints[:, 0] // 10
ints[:, 0] = 0
ints1 = ints.reshape(nelements * nnodes_all, 15)
nids = ints[:, 0]
assert eids.min() > 0, eids.min()
eids2 = np.vstack([eids, eids]).T.ravel()
nids2 = np.vstack([nids, nids]).T.ravel()
obj.element_node[itotal:itotal2, 0] = eids2
obj.element_node[itotal:itotal2, 1] = nids2
#[fd, sxr, sxi, syr, syi, txyr, txyi]
isave1 = [1, 3, 5]
isave2 = [2, 4, 6]
real_imag = apply_mag_phase(floats1, is_magnitude_phase, isave1, isave2)
obj.fiber_curvature[itotal:itotal2] = floats1[:, 0]
obj.data[obj.itime, itotal:itotal2, :] = real_imag
obj.itotal = itotal2
obj.ielement = ielement2
else:
if is_vectorized and op2.use_vector: # pragma: no cover
op2.log.debug(f'vectorize CQUAD4-33 imag SORT{sort_method}')
n = oes_cquad4_33_complex_15(
op2, data, obj,
nelements, ntotal,
is_magnitude_phase)
elif result_type in [1, 2] and op2.num_wide == 9: # random msc
# _oes_cquad4 is the same as _oes_ctria3
element_id = op2.nonlinear_factor
if op2.is_stress:
obj_vector_random = RandomPlateStressArray
else:
obj_vector_random = RandomPlateStrainArray
op2.data_code['nonlinear_factor'] = element_id
if op2._results.is_not_saved(result_name):
op2._data_factor = 2
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
ntotal = 36 * self.factor # 4*9
nelements = ndata // ntotal
nlayers = nelements * 2
nnodes_expected = 1
#if op2.table_name_str.startswith('OSTRRMS'):
#print(f'{op2.table_name_str} {result_name}: {nelements} {ntotal}')
auto_return, is_vectorized = op2._create_oes_object4(
nlayers, result_name, slot, obj_vector_random)
if auto_return:
op2._data_factor = 2
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized:
n = nelements * ntotal
itotal = obj.itotal
itotal2 = itotal + nelements * 2
if sort_method == 1:
ielement = obj.ielement
ielement2 = ielement + nelements
obj._times[obj.itime] = dt
if obj.itime == 0:
ints = frombuffer(data, dtype=op2.idtype)
ints1 = ints.reshape(nelements, 9)
eids = ints1[:, 0] // 10
assert eids.min() > 0, eids.min()
#print(eids)
eids2 = np.vstack([eids, eids]).T.ravel()
#print(eids2)
# TODO: what about layer 1/2?
#print(op2.code_information())
#print(f'eids.shape={eids.shape} obj.element_node.shape={obj.element_node.shape}')
obj.element_node[itotal:itotal2, 0] = eids2
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, 9)[:, 1:]
#fd1, sx1, sy1, txy1, fd2, fx2, fy2, txy2
floats2 = floats.reshape(nelements * nnodes_expected, 8)
#[eid_device, fd1, sx1, sy1, txy1,
# fd2, sx2, sy2, txy2,]
nf2 = floats2.shape[0]
floats3 = floats2.reshape(nf2*2, 4)
obj.fiber_curvature[itotal:itotal2] = floats3[:, 0].copy()
obj.data[obj.itime, itotal:itotal2, :] = floats3[:, 1:].copy()
obj.itotal = itotal2
obj.ielement = ielement2
elif sort_method == 2 and op2._analysis_code_fmt == b'f':
ielement = obj.itime
ie_upper = 2 * ielement
ie_lower = 2 * ielement + 1
obj.element_node[ie_upper, 0] = dt
obj.element_node[ie_lower, 0] = dt
#obj._times[obj.itime] = dt
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, 9)# [:, 1:]
# itime is actually ielement
# we grab the element id from the ints for all times
if op2._analysis_code_fmt == b'i' and obj.itime == 0:
#print('analysis_code ', op2.analysis_code, op2._analysis_code_fmt)
ints = frombuffer(data, dtype=op2.idtype).reshape(nelements, 9)
eids = ints[:, 0] // 10
#nids = np.zeros(len(eids), dtype='int32')
#print(eids)
#eids = np.vstack([eids, nids]).T.ravel()
#print(eids.shape)
#print(eids)
#print(obj.element)
#assert eids.min() > 0, eids.min()
#obj.element[itotal:itotal2, 0] = eids
obj._times[itotal:itotal2] = eids
aaa
elif op2._analysis_code_fmt == b'f' and obj.itime == 0:
#print(floats[:, 0])
#print(floats[:, 0].shape, obj._times.shape)
obj._times[itotal:itotal2] = floats[:, 0]
floats1 = floats[:, 1:]
#print(floats1)
#print(floats1.shape)
#fd, sx, sy, txy,
floats2 = floats1.reshape(nelements * nnodes_expected, 8)
nf2 = floats2.shape[0]
# reshape it into 2 layers
floats3 = floats2.reshape(nf2*2, 4)
# we only need to grab the first two fiber/curvature values
# as they're duplicated many times for the same element
obj.fiber_curvature[2*obj.itime:2*obj.itime+2] = floats3[:2, 0].copy()
# we apply the data across 2 rows because we have 2 layers
obj.data[:, ie_upper, :] = floats3[::2, 1:].copy()
obj.data[:, ie_lower, :] = floats3[1::2, 1:].copy()
else:
raise NotImplementedError(op2.code_information())
obj.itotal = itotal2
#obj.ielement = ielement2
else:
if is_vectorized and op2.use_vector: # pragma: no cover
op2.log.debug(f'vectorize CQUAD4-33 random numwide=9 SORT{sort_method}')
n = oes_cquad4_33_random_9(op2, data, obj, nelements, ntotal)
elif result_type in [1, 2] and op2.num_wide == 11: # random
#2 FD1 RS Z1 = Fibre Distance
#3 SX1 RS Normal in x at Z1
#4 SY1 RS Normal in y at Z1
#5 TXY1 RS Shear in xy at Z1
#6 RMSVM1 RS RMS von Mises at Z1
#7 FD2 RS Z2 = Fibre Distance
#8 SX2 RS Normal in x at Z2
#9 SY2 RS Normal in y at Z2
#10 TXY2 RS Shear in xy at Z2
#11 RMSVM2 RS RMS von Mises at Z2
element_id = op2.nonlinear_factor
if op2.is_stress:
obj_vector_random = RandomPlateVMStressArray
else:
obj_vector_random = RandomPlateVMStrainArray
op2.data_code['nonlinear_factor'] = element_id
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
ntotal = 44 * factor # 4*11
nelements = ndata // ntotal
nlayers = nelements * 2
nnodes_expected = 1
auto_return, is_vectorized = op2._create_oes_object4(
nlayers, result_name, slot, obj_vector_random)
if auto_return:
op2._data_factor = 2
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and 0: # pragma: no cover
n = nelements * 4 * op2.num_wide
ielement = obj.ielement
ielement2 = ielement + nelements
itotal = obj.itotal
itotal2 = itotal + nelements * nnodes_expected
obj._times[obj.itime] = dt
if obj.itime == 0:
ints = frombuffer(data, dtype=op2.idtype)
ints1 = ints.reshape(nelements, 9)
eids = ints1[:, 0] // 10
print(eids)
eids = np.vstack([eids, eids]).T.ravel()
print(eids.shape)
print(eids)
print(obj.element)
assert eids.min() > 0, eids.min()
obj.element[itotal:itotal2, 0] = eids
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, 11)[:, 1:]
print(floats.shape)
#fd, sx, sy, txy,
floats1 = floats.reshape(nelements * nnodes_expected, 10)
obj.data[obj.itime, itotal:itotal2, :] = floats1.copy()
obj.itotal = itotal2
obj.ielement = ielement2
else:
n = oes_cquad4_33_random_vm_11(op2, data, obj, nelements, ntotal)
elif result_type == 1 and op2.num_wide == 17 and op2.table_name in [b'OESVM1', b'OESVM2', b'OSTRVM1', b'OSTRVM2']: # freq
# Table of element stresses for frequency response analysis that includes
# von Mises stress output in SORT1 format.
element_id = op2.nonlinear_factor
if op2.is_stress: # TODO: add new complex type
obj_vector_complex = ComplexPlateVMStressArray
else:
obj_vector_complex = ComplexPlateVMStrainArray
op2.data_code['nonlinear_factor'] = element_id
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
ntotal = 68 * self.factor # 4*17
nelements = ndata // ntotal
nlayers = nelements * 2
nnodes_expected = 1
#op2.log.info(f'CQUAD4-33: len(data)={ndata} numwide={op2.num_wide} nelements={nelements} nlayers={nlayers}')
auto_return, is_vectorized = op2._create_oes_object4(
nlayers, result_name, slot, obj_vector_complex)
if auto_return:
op2._data_factor = 2
return nelements * ntotal, None, None
#self.show_data(data)
# ELEMENT FIBER - STRESSES IN ELEMENT COORDINATE SYSTEM -
# ID. DISTANCE NORMAL-X NORMAL-Y SHEAR-XY VON MISES
#0 101 -5.000000E-01 -8.152692E-01 / 0.0 -1.321875E+00 / 0.0 -3.158517E+00 / 0.0 5.591334E+00
# 5.000000E-01 1.728573E+00 / 0.0 -7.103837E+00 / 0.0 2.856040E+00 / 0.0 9.497519E+00
#floats = (1011,
#-0.5, -0.8152692317962646, 0.0, -1.321874737739563, 0.0, -3.1585168838500977, 0.0, 5.591334342956543,
#0.5, 1.7285730838775635, 0.0, -7.103837490081787, 0.0, 2.8560397624969482, 0.0, 9.497518539428711)
obj = op2.obj
if is_vectorized and op2.use_vector and op2.sort_method == 1 and 0: # pragma: no cover
raise NotImplementedError(op2.table_name_str)
else:
if is_vectorized and op2.use_vector: # pragma: no cover
op2.log.debug('vectorize CQUAD4-33 complex '
f'{op2.table_name_str} SORT{op2.sort_method}')
n = oes_cquad4_33_complex_vm_17(op2, data, obj, nelements, ntotal,
is_magnitude_phase)
else: # pragma: no cover
raise RuntimeError(op2.code_information())
assert op2.obj.element_name == op2.element_name, op2.obj
assert n > 0
return n, nelements, ntotal
def _oes_ctria3(self, data, ndata: int, dt, is_magnitude_phase: bool,
result_type: int, prefix: str, postfix: str) -> Tuple[int, Any, Any]:
"""
reads stress/strain for element type:
- 74 : CTRIA3-centroidal
- 83 : CTRIA3-centroidal (NASA 95)
- 227: TRIAR-centroidal
"""
op2 = self.op2
#print('_oes_ctria3')
n = 0
etype_map = {
#element_type : (element_base, element_name)
74 : ('ctria3', 'CTRIA3'),
83 : ('ctria3', 'CTRIA3'), # NASA-95
227 : ('ctriar', 'CTRIAR'),
}
if op2.is_stress:
stress_strain = 'stress'
obj_vector_real = RealPlateStressArray
obj_vector_complex = ComplexPlateStressArray
else:
stress_strain = 'strain'
obj_vector_real = RealPlateStrainArray
obj_vector_complex = ComplexPlateStrainArray
#if prefix == '' and postfix == '':
#prefix = stress_strain + '.'
element_base, element_name = etype_map[op2.element_type]
# stress.ctria3_stress
result_name = prefix + f'{element_base}_{stress_strain}' + postfix
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
#print(op2.element_name, result_name, op2.format_code, op2.num_wide)
#table_names = [
# b'OES1', b'OES1X', b'OES1X1', b'OSTR1X',
# b'OES2', b'OSTR2',
# b'OESVM1', b'OESVM2', b'OSTRVM1', b'OSTRVM2',
# b'OESPSD1', b'OESRMS1',
# b'OESPSD2', b'OESATO2', b'OESCRM2', b'OESNO1', b'OESXRMS1', b'OESXNO1',
# b'OSTRPSD2', b'OSTRATO2', b'OSTRCRM2', b'OSTRNO1', b'OSTRRMS1',
#]
#assert op2.table_name in table_names, op2.table_name
sort_method = op2.sort_method
element_name_type = f'{op2.element_name}-{op2.element_type}'
if op2.format_code in [1, 3] and op2.num_wide == 17: # real
ntotal = 68 * self.factor # 4*17
nelements = ndata // ntotal
nlayers = nelements * 2 # 2 layers per node
#if self.code[0] == 100:
#print(f'\nnelements={nelements} nlayers={nlayers} {self.code in slot}')
auto_return, is_vectorized = op2._create_oes_object4(
nlayers, result_name, slot, obj_vector_real)
if auto_return:
op2._data_factor = 2
return nelements * ntotal, None, None
if op2.is_debug_file:
op2.binary_debug.write(' [cap, element1, element2, ..., cap]\n')
op2.binary_debug.write(' cap = %i # assume 1 cap when there could have been multiple\n' % ndata)
op2.binary_debug.write(' #elementi = [eid_device, fd1, sx1, sy1, txy1, angle1, major1, minor1, vm1,\n')
op2.binary_debug.write(' # fd2, sx2, sy2, txy2, angle2, major2, minor2, vm2,]\n')
op2.binary_debug.write(' nelements=%i; nnodes=1 # centroid\n' % nelements)
obj = op2.obj
if op2.use_vector and is_vectorized and sort_method == 1:
nfields = 17 * nelements
nbytes = nfields * 4
itotal = obj.itotal
iend = obj.itotal + nlayers
itime = obj.itime
if itime == 0:
ints = frombuffer(data, dtype=op2.idtype8).reshape(nelements, 17)
eids = ints[:, 0] // 10
#ilayers = ints[:, 1]
#ints2 = ints[:, 1:].reshape(nlayers, 8)
assert eids.min() > 0, eids
obj._times[obj.itime] = dt
obj.element_node[itotal:iend:2, 0] = eids
obj.element_node[itotal+1:iend+1:2, 0] = eids
#obj.element_node[itotal:iend, 1] = 0
floats = frombuffer(data, dtype=op2.fdtype8).reshape(nelements, 17)
floats1 = floats[:, 1:].reshape(nlayers, 8).copy()
obj.data[obj.itime, itotal:iend, :] = floats1
obj._times[obj.itime] = dt
obj.itotal += nlayers
n = nbytes
else:
if is_vectorized and op2.use_vector: # pragma: no cover
op2.log.debug(f'vectorize centroidal tri: {element_name_type} real SORT{sort_method}')
n = oes_ctria3_real_17(op2, data, obj,
ntotal, nelements, dt)
if op2.is_sort1:
assert obj.element_node[:, 0].min() > 0, obj.element_node[:, 0]
elif op2.format_code in [2, 3] and op2.num_wide == 15: # imag
ntotal = 60 * self.factor # 4*15
nelements = ndata // ntotal
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_complex)
if auto_return:
op2._data_factor = 2
assert ntotal == op2.num_wide * 4
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and sort_method == 1:
n = nelements * ntotal
itotal = obj.itotal
itotal2 = itotal + nelements * 2
ielement = obj.ielement
ielement2 = ielement + nelements
floats = frombuffer(data, dtype=op2.fdtype8).reshape(nelements, 15)
floats1 = floats[:, 1:].reshape(nelements * 2, 7).copy()
obj._times[obj.itime] = dt
if obj.itime == 0:
ints = frombuffer(data, dtype=op2.idtype8).reshape(nelements, 15).copy()
eids = ints[:, 0] // 10
ints[:, 0] = 0
unused_ints1 = ints.reshape(nelements, 15)
nids = ints[:, 0]
assert eids.min() > 0, eids.min()
eids2 = np.vstack([eids, eids]).T.ravel()
nids2 = np.vstack([nids, nids]).T.ravel()
obj.element_node[itotal:itotal2, 0] = eids2
obj.element_node[itotal:itotal2, 1] = nids2
#[fd, sxr, sxi, syr, syi, txyr, txyi]
isave1 = [1, 3, 5]
isave2 = [2, 4, 6]
real_imag = apply_mag_phase(floats1, is_magnitude_phase, isave1, isave2)
obj.fiber_curvature[itotal:itotal2] = floats1[:, 0]
obj.data[obj.itime, itotal:itotal2, :] = real_imag
obj.itotal = itotal2
obj.ielement = ielement2
else:
if is_vectorized and op2.use_vector: # pragma: no cover
op2.log.debug(f'vectorize CTRIA3 imag SORT{sort_method}')
struct1 = Struct(op2._endian + mapfmt(op2._analysis_code_fmt + b'14f', self.size))
cen = 0 # CEN/3
for unused_i in range(nelements):
edata = data[n:n + ntotal]
out = struct1.unpack(edata)
(eid_device,
fd1, sx1r, sx1i, sy1r, sy1i, txy1r, txy1i,
fd2, sx2r, sx2i, sy2r, sy2i, txy2r, txy2i,) = out
eid, dt = get_eid_dt_from_eid_device(
eid_device, op2.nonlinear_factor, op2.sort_method)
if op2.is_debug_file:
op2.binary_debug.write(' OESC %s - eid=%i; C=[%s]\n' % (
element_name_type, eid,
', '.join(['%r' % di for di in out])))
if is_magnitude_phase:
sx1 = polar_to_real_imag(sx1r, sx1i)
sy1 = polar_to_real_imag(sy1r, sy1i)
sx2 = polar_to_real_imag(sx2r, sx2i)
sy2 = polar_to_real_imag(sy2r, sy2i)
txy1 = polar_to_real_imag(txy1r, txy1i)
txy2 = polar_to_real_imag(txy2r, txy2i)
else:
sx1 = complex(sx1r, sx1i)
sy1 = complex(sy1r, sy1i)
sx2 = complex(sx2r, sx2i)
sy2 = complex(sy2r, sy2i)
txy1 = complex(txy1r, txy1i)
txy2 = complex(txy2r, txy2i)
obj.add_sort1(dt, eid, cen,
fd1, sx1, sy1, txy1,
fd2, sx2, sy2, txy2)
n += ntotal
#elif op2.format_code == 1 and op2.num_wide == 9: # random?
#msg = op2.code_information()
#return op2._not_implemented_or_skip(data, ndata, msg), None, None
elif op2.format_code in [2, 3] and op2.num_wide == 17 and op2.table_name in [b'OESVM1', b'OESVM2', b'OSTRVM1', b'OSTRVM2']:
# freq:
# # random; CTRIA3
assert op2.table_name in [b'OESVM1', b'OESVM2', b'OSTRVM1', b'OSTRVM2'], op2.code_information()
element_id = op2.nonlinear_factor
if op2.is_stress: # TODO: add new complex type
obj_vector_complex = ComplexPlateVMStressArray
else:
obj_vector_complex = ComplexPlateVMStrainArray
op2.data_code['nonlinear_factor'] = element_id
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
ntotal = 68 * self.factor # 17*4
nelements = ndata // ntotal
nlayers = nelements * 2
nnodes_expected = 1
auto_return, is_vectorized = op2._create_oes_object4(
nlayers, result_name, slot, obj_vector_complex)
if auto_return:
op2._data_factor = 2
return nelements * ntotal, None, None
obj = op2.obj
# C:\MSC.Software\simcenter_nastran_2019.2\tpl_post2\shlthk14.op2
nelements = ndata // ntotal
assert ndata % ntotal == 0
# C O M P L E X S T R E S S E S I N T R I A N G U L A R E L E M E N T S ( T R I A 3 )
# (REAL/IMAGINARY)
#
# ELEMENT FIBER - STRESSES IN ELEMENT COORDINATE SYSTEM -
# ID. DISTANCE NORMAL-X NORMAL-Y SHEAR-XY VON MISES
#0 1 -4.359080E+00 -1.391918E+00 / 2.474756E-03 -1.423926E+00 / 2.530494E-03 2.655153E-02 / -5.158625E-05 1.408948E+00
# 4.359080E+00 1.391918E+00 / -2.474756E-03 1.423926E+00 / -2.530494E-03 -2.655153E-02 / 5.158625E-05 1.408948E+00
n = oes_ctria3_complex_vm_17(op2, data, obj, nelements, ntotal, dt,
is_magnitude_phase)
assert n is not None, n
elif op2.format_code in [1, 2, 3] and op2.num_wide == 11: # random; CTRIA3
#2 FD1 RS Z1 = Fibre Distance
#3 SX1 RS Normal in x at Z1
#4 SY1 RS Normal in y at Z1
#5 TXY1 RS Shear in xy at Z1
#6 RMSVM1 RS RMS von Mises at Z1
#7 FD2 RS Z2 = Fibre Distance
#8 SX2 RS Normal in x at Z2
#9 SY2 RS Normal in y at Z2
#10 TXY2 RS Shear in xy at Z2
#11 RMSVM2 RS RMS von Mises at Z2
element_id = op2.nonlinear_factor
if op2.is_stress:
obj_vector_random = RandomPlateVMStressArray
else:
obj_vector_random = RandomPlateVMStrainArray
op2.data_code['nonlinear_factor'] = element_id
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
ntotal = 44 * self.factor # 4*11
nelements = ndata // ntotal
nlayers = nelements * 2
nnodes_expected = 1
auto_return, is_vectorized = op2._create_oes_object4(
nlayers, result_name, slot, obj_vector_random)
if auto_return:
op2._data_factor = 2
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and 0: # pragma: no cover
n = nelements * 4 * op2.num_wide
ielement = obj.ielement
ielement2 = ielement + nelements
itotal = obj.itotal
itotal2 = itotal + nelements * nnodes_expected
obj._times[obj.itime] = dt
if obj.itime == 0:
ints = frombuffer(data, dtype=op2.idtype)
ints1 = ints.reshape(nelements, 9)
eids = ints1[:, 0] // 10
print(eids)
eids = np.vstack([eids, eids]).T.ravel()
print(eids.shape)
print(eids)
print(obj.element)
assert eids.min() > 0, eids.min()
obj.element[itotal:itotal2, 0] = eids
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, 11)[:, 1:]
print(floats.shape)
#fd, sx, sy, txy,
floats1 = floats.reshape(nelements * nnodes_expected, 10)
obj.data[obj.itime, itotal:itotal2, :] = floats1.copy()
obj.itotal = itotal2
obj.ielement = ielement2
else:
if is_vectorized and op2.use_vector and obj.itime == 0: # pragma: no cover
op2.log.debug(f'vectorize {element_name_type} random numwide=11 SORT{sort_method}')
n = oes_ctria3_random_vm_11(op2, data, obj, nelements, ntotal)
elif op2.format_code in [1, 2] and op2.num_wide == 9: # random MSC stress/strain; CTRIA3
# _oes_cquad4 is the same as _oes_ctria3
element_id = op2.nonlinear_factor
if op2.is_stress:
obj_vector_random = RandomPlateStressArray
#result_name = prefix + 'ctria3_stress' + postfix
else:
obj_vector_random = RandomPlateStrainArray
#result_name = prefix + 'ctria3_strain' + postfix
op2.data_code['nonlinear_factor'] = element_id
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
ntotal = 36 * self.factor # 4*9
nelements = ndata // ntotal
nlayers = nelements * 2
nnodes_expected = 1
auto_return, is_vectorized = op2._create_oes_object4(
nlayers, result_name, slot, obj_vector_random)
if auto_return:
op2._data_factor = 2
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and 0: # pragma: no cover
n = nelements * 4 * op2.num_wide
ielement = obj.ielement
ielement2 = ielement + nelements
itotal = obj.itotal
itotal2 = itotal + nelements * nnodes_expected
obj._times[obj.itime] = dt
if obj.itime == 0:
ints = frombuffer(data, dtype=op2.idtype)
ints1 = ints.reshape(nelements, 9)
eids = ints1[:, 0] // 10
print(eids)
eids = np.vstack([eids, eids]).T.ravel()
print(eids.shape)
print(eids)
print(obj.element)
assert eids.min() > 0, eids.min()
obj.element[itotal:itotal2, 0] = eids
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, 9)[:, 1:]
print(floats.shape)
#fd, sx, sy, txy,
floats1 = floats.reshape(nelements * nnodes_expected, 8)
obj.data[obj.itime, itotal:itotal2, :] = floats1.copy()
obj.itotal = itotal2
obj.ielement = ielement2
else:
if is_vectorized and op2.use_vector: # pragma: no cover
op2.log.debug(f'vectorize {element_name_type} random2 SORT{sort_method}')
n = oes_ctria3_random_9(op2, data, obj, nelements, ntotal)
else: # pragma: no cover
raise RuntimeError(op2.code_information())
assert n is not None, op2.code_information()
return n, nelements, ntotal
def _oes_cquad4_144(self, data: bytes, ndata: int, dt, is_magnitude_phase: bool,
result_type: int, prefix: str, postfix: str) -> Tuple[int, Any, Any]:
"""
reads stress/strain for element type:
- 64 : CQUAD8
- 70 : CTRIAR
- 75 : CTRIA6
- 82 : CQUADR
- 144 : CQUAD4-bilinear
"""
op2 = self.op2
n = 0
size = self.size
etype_map = {
#element_type : (element_base, nnodes_expected, element_name)
64 : ('cquad8', 4, 'CQUAD8'),
70 : ('ctriar', 3, 'CTRIAR'),
75 : ('ctria6', 3, 'CTRIA6'),
82 : ('cquadr', 4, 'CQUADR'),
144 : ('cquad4', 4, 'CQUAD4-bilinear'),
}
if op2.is_stress:
stress_strain = 'stress'
obj_vector_real = RealPlateStressArray
obj_vector_complex = ComplexPlateStressArray
obj_vector_random = RandomPlateStressArray
else:
stress_strain = 'strain'
obj_vector_real = RealPlateStrainArray
obj_vector_complex = ComplexPlateStrainArray
obj_vector_random = RandomPlateStrainArray
# centroid not incldued in nnodes
element_base, nnodes, element_name = etype_map[op2.element_type]
#if prefix == '' and postfix == '':
#prefix = stress_strain + '.'
# stress.cquad4_stress
result_name = prefix + f'{element_base}_{stress_strain}' + postfix
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
log = op2.log
nnodes_all = nnodes + 1 # adding the centroid
slot = op2.get_result(result_name)
numwide_real = 2 + 17 * nnodes_all
numwide_imag = 2 + 15 * nnodes_all
numwide_random = 2 + 9 * nnodes_all
#numwide_imag2 = 2 + 16 * nnodes_all
#print('%s real=%s imag=%s imag2=%s random=%s' % (
#op2.element_name, numwide_real, numwide_imag, numwide_imag2, numwide_random
#))
#etype = op2.element_name
#grid_center = 'CEN/%i' % nnodes
# OESVM1/2 (complex)
# 87 - CQUAD8
# OSTRNO1
# 47 - CQUAD4
# CQUAD8 real=87 imag=77 imag2=82 random=47
# CQUAD4 ???=
sort_method = op2.sort_method
element_name_type = f'{op2.element_name}-{op2.element_type}'
#print(op2.code_information())
if result_type == 0 and op2.num_wide == numwide_real: # real
ntotal = 4 * (2 + 17 * nnodes_all) * self.factor
nelements = ndata // ntotal
assert ndata % ntotal == 0
nlayers = 2 * nelements * nnodes_all # 2 layers per node
auto_return, is_vectorized = op2._create_oes_object4(
nlayers, result_name, slot, obj_vector_real)
if auto_return:
op2._data_factor = 2 * nnodes_all # number of "layers" for an element
return nelements * ntotal, None, None
obj = op2.obj
#print('dt=%s, itime=%s' % (obj.itime, dt))
if op2.use_vector and is_vectorized and sort_method == 1:
# self.itime = 0
# self.ielement = 0
# self.itotal = 0
#self.ntimes = 0
#self.nelements = 0
n = nelements * ntotal
istart = obj.itotal
iend = istart + nlayers
obj._times[obj.itime] = dt
if obj.itime == 0:
ints = frombuffer(data, dtype=op2.idtype8).reshape(nelements, numwide_real)
ints1 = ints[:, 2:].reshape(nlayers//2, 17)[:, 0].reshape(nelements, nnodes_all).copy()
ints1[:, 0] = 0.
nids = ints1.ravel()
eids = ints[:, 0] // 10
eids2 = array([eids] * (nnodes_all * 2), dtype=op2.idtype8).T.ravel()
nids2 = vstack([nids, nids]).T.ravel()
obj.element_node[istart:iend, 0] = eids2
obj.element_node[istart:iend, 1] = nids2
#assert obj.element_node[:iend, 0].min() > 0, eids2
if obj.nonlinear_factor is not None:
float_mask = np.arange(nelements * numwide_real, dtype=np.int32).reshape(nelements, numwide_real)
float_mask1 = float_mask[:, 2:].reshape(nlayers // 2, 17)[:, 1:].reshape(nlayers, 8)
obj.float_mask = float_mask1
if obj.nonlinear_factor is not None:
results = frombuffer(data, dtype=op2.fdtype8)[obj.float_mask].copy()
else:
floats = frombuffer(data, dtype=op2.fdtype8).reshape(nelements, numwide_real)
floats1 = floats[:, 2:].reshape(nlayers // 2, 17)
results = floats1[:, 1:].reshape(nlayers, 8).copy()
#[fiber_dist, oxx, oyy, txy, angle, majorP, minorP, ovm]
obj.data[obj.itime, istart:iend, :] = results
assert obj.element_node[:, 0].min() > 0, obj.element_node[:, 0]
else:
if is_vectorized and op2.use_vector: # pragma: no cover
log.debug(f'vectorize nodal shell: {element_name_type}... real SORT{sort_method}')
n = oes_cquad4_144_real(op2, data, ndata, obj,
nelements, nnodes, dt)
if op2.is_sort1:
assert obj.element_node[:, 0].min() > 0, obj.element_node[:, 0]
elif result_type == 1 and op2.num_wide == numwide_imag: # complex
ntotal = numwide_imag * 4 * self.factor
#assert op2.num_wide * 4 == ntotal, 'numwide*4=%s ntotal=%s' % (op2.num_wide*4, ntotal)
nelements = ndata // ntotal
nlayers = nelements * 2 * nnodes_all
#print(element_name_type)
#print('ndata', ndata)
#print('ntotal', ntotal)
#print('nelements', nelements)
#print('nlayers', nlayers)
auto_return, is_vectorized = op2._create_oes_object4(
nlayers, result_name, slot, obj_vector_complex)
if auto_return:
op2._data_factor = 2 * nnodes_all
#if op2.num_wide == 77:
#print('ntotal =', ndata, ntotal, op2.num_wide)
#print('nelements * ntotal =', nelements * ntotal)
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and sort_method == 1:
n = nelements * ntotal
itotal = obj.itotal
itotal2 = itotal + nelements * (nnodes_all * 2)
ielement = obj.ielement
ielement2 = ielement + nelements
floats = frombuffer(data, dtype=op2.fdtype8).reshape(nelements, numwide_imag)
floats1 = floats[:, 2:].reshape(nelements * nnodes_all, 15)
floats2 = floats1[:, 1:].reshape(nelements * nnodes_all * 2, 7).copy()
obj._times[obj.itime] = dt
if obj.itime == 0:
ints = frombuffer(data, dtype=op2.idtype8).reshape(nelements, numwide_imag).copy()
ints[:, 2] = 0 # set center node to 0
ints1 = ints[:, 2:].reshape(nelements * nnodes_all, 15)
eids = ints[:, 0] // 10
nids = ints1[:, 0]
eids2 = np.vstack([eids] * (nnodes_all * 2)).T.ravel()
nids2 = np.vstack([nids, nids]).T.ravel()
assert eids.min() > 0, eids.min()
obj.element_node[itotal:itotal2, 0] = eids2
obj.element_node[itotal:itotal2, 1] = nids2
#[fd, sxr, sxi, syr, syi, txyr, txyi]
isave1 = [1, 3, 5]
isave2 = [2, 4, 6]
real_imag = apply_mag_phase(floats2, is_magnitude_phase, isave1, isave2)
obj.fiber_curvature[itotal:itotal2] = floats2[:, 0]
obj.data[obj.itime, itotal:itotal2, :] = real_imag
obj.itotal = itotal2
obj.ielement = ielement2
else:
if is_vectorized and op2.use_vector: # pragma: no cover
log.debug(f'vectorize CQUAD4-144/{element_name_type}... imag SORT{sort_method}')
# nnodes_cquad4 = 5
# nelements = 3
# nlayers = nelements * nodes_cquad4 * 2 = 3*5*2 = 30
# ntotal = nlayers
n = oes_cquad4_144_complex_77(op2, data, obj,
nelements, nnodes,
dt, is_magnitude_phase)
#elif op2.format_code == 1 and op2.num_wide == numwide_random: # random
#msg = op2.code_information()
#msg += ' numwide=%s numwide_real=%s numwide_imag=%s numwide_random=%s' % (
#op2.num_wide, numwide_real, numwide_imag, numwide_random)
#return op2._not_implemented_or_skip(data, ndata, msg), None, None
elif result_type == 2 and op2.num_wide == numwide_random: # random
# 47 - CQUAD8-64
# 38 - CTRIAR-70
ntotal = op2.num_wide * 4 * self.factor
nelements = ndata // ntotal
assert ndata % ntotal == 0
nlayers = 2 * nelements * nnodes_all # 2 layers per node
#op2.log.info(f'random quad-144 ntotal={ntotal} ndata={ndata} ntotal={ntotal} numwide={op2.num_wide} -> nelements={nelements}')
#if op2.read_mode == 1:
#msg = ''
#return op2._not_implemented_or_skip(data, ndata, msg), None, None
auto_return, is_vectorized = op2._create_oes_object4(
nlayers, result_name, slot, obj_vector_random)
if auto_return:
op2._data_factor = 2 * nnodes_all # number of "layers" for an element
return nelements * ntotal, None, None
obj = op2.obj
#print('dt=%s, itime=%s' % (obj.itime, dt))
if op2.use_vector and is_vectorized and 0:
# self.itime = 0
# self.ielement = 0
# self.itotal = 0
#self.ntimes = 0
#self.nelements = 0
n = nelements * ntotal
istart = obj.itotal
iend = istart + nlayers
obj._times[obj.itime] = dt
if obj.itime == 0:
ints = frombuffer(data, dtype=op2.idtype).reshape(nelements, numwide_real)
ints1 = ints[:, 2:].reshape(nlayers//2, 17)[:, 0].reshape(nelements, nnodes_all).copy()
ints1[:, 0] = 0.
nids = ints1.ravel()
eids = ints[:, 0] // 10
eids2 = array([eids] * (nnodes_all * 2), dtype='int32').T.ravel()
nids2 = vstack([nids, nids]).T.ravel()
obj.element_node[istart:iend, 0] = eids2
obj.element_node[istart:iend, 1] = nids2
#assert obj.element_node[:iend, 0].min() > 0, eids2
if obj.nonlinear_factor is not None:
float_mask = np.arange(nelements * numwide_real, dtype=np.int32).reshape(nelements, numwide_real)
float_mask1 = float_mask[:, 2:].reshape(nlayers // 2, 17)[:, 1:].reshape(nlayers, 8)
obj.float_mask = float_mask1
if obj.nonlinear_factor is not None:
results = frombuffer(data, dtype=op2.fdtype)[obj.float_mask].copy()
else:
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, numwide_real)
floats1 = floats[:, 2:].reshape(nlayers // 2, 17)
results = floats1[:, 1:].reshape(nlayers, 8).copy()
#[fiber_dist, oxx, oyy, txy, angle, majorP, minorP, ovm]
obj.data[obj.itime, istart:iend, :] = results
else:
if is_vectorized and op2.use_vector: # pragma: no cover
log.debug(f'vectorize CQUAD4-144/{element_name_type}... random SORT{sort_method}')
#numwide_random = 2 + 9 * nnodes_all
n = oes_cquad4_144_random(op2, data, obj, nelements, nnodes, ndata)
#if op2.read_mode == 1:
#msg = ''
#return op2._not_implemented_or_skip(data, ndata, msg), None, None
##self.show_data(data[:44])
#ntotal = 44
#struct1 = Struct(op2._endian + b'i4si 8f')
#for i in ra
#for i in range(20):
#edata = data[n:n+ntotal]
#out = struct1.unpack(edata)
#self.show_data(edata)
#print(out)
#n += ntotal
## 47 - CQUAD8-64
##msg = op2.code_information()
#msg = '%s-CQUAD4-numwide=%s format_code=%s;\n numwide_real=%s numwide_imag=%s numwide_random=%s' % (
#op2.table_name_str, op2.num_wide, op2.format_code,
#numwide_real, numwide_imag, numwide_random)
#return op2._not_implemented_or_skip(data, ndata, msg), None, None
elif op2.table_name in [b'OESVM1', b'OESVM2', b'OSTRVM1', b'OSTRVM2']:
# 82 CQUADR -> 87
# CQUAD8 sort_method=2 ntotal=348 nelements=3
# CTRIA6 sort_method=2 ntotal=348 nelements=2
msg = op2.code_information()
if result_type == 1: # complex
# ndata = 3828
# ???
#
# ndata=1044
assert op2.num_wide in [70, 87], op2.code_information()
ntotal = op2.num_wide * self.size # 87*4
nelements = ndata // ntotal
nlayers = nelements * 2
assert ndata % ntotal == 0
if op2.is_stress:
obj_vector_complex = ComplexPlateVMStressArray
else:
obj_vector_complex = ComplexPlateVMStrainArray
auto_return, is_vectorized = op2._create_oes_object4(
nlayers, result_name, slot, obj_vector_complex)
if auto_return:
op2._data_factor = 2 * nnodes_all # number of "layers" for an element
#op2._data_factor = 2
return nelements * ntotal, None, None
#if op2.read_mode == 1:
#return op2._not_implemented_or_skip(data, ndata, msg), None, None
#self.show_data(data)
#print(ndata, ntotal)
obj = op2.obj
n = oes_cquad4_complex_vm_87(op2, data, obj, nelements, nnodes_all,
is_magnitude_phase)
#if result_type == 1 and op2.num_wide in [70, 87]:
# 70 - CTRIA6-75
# 87 - CQUAD4-144
#pass
else:
#msg = (f'skipping {op2.table_name_str}-{op2.element_name}: numwide={op2.num_wide} '
#f'result_type={op2.result_type} (complex);\n numwide_real={numwide_real} '
#f'numwide_imag={numwide_imag} numwide_random={numwide_random}')
#print(msg)
raise NotImplementedError(msg)
#return op2._not_implemented_or_skip(data, ndata, msg), None, None
#elif op2.format_code in [2, 3] and op2.num_wide == 70:
## 87 - CQUAD4-144
##msg = op2.code_information()
#msg = '%s-CTRIA6-numwide=%s numwide_real=%s numwide_imag=%s numwide_random=%s' % (
#op2.table_name_str, op2.num_wide, numwide_real, numwide_imag, numwide_random)
#return op2._not_implemented_or_skip(data, ndata, msg), None, None
elif result_type == 2 and op2.table_name in [b'OESXRMS1', b'OESXNO1']:
# CTRIA6-75 numwide=46 C:\MSC.Software\simcenter_nastran_2019.2\tpl_post2\tr1081x.op2
# CQUAD8-64 numwide=64 C:\MSC.Software\simcenter_nastran_2019.2\tpl_post2\tr1081x.op2
# corners + centroid
if op2.element_type == 75: # CTRIA6 (numwide=46)
# C:\MSC.Software\simcenter_nastran_2019.2\tpl_post2\tr1081x.op2
nnodes = 4
elif op2.element_type in [64, 144]:
# CQUAD8 (numwide=57) C:\MSC.Software\simcenter_nastran_2019.2\tpl_post2\tr1081x.op2
# CQUAD4-144 (numwide=57) C:\MSC.Software\simcenter_nastran_2019.2\tpl_post2\plate_111o.op2
nnodes = 5
else:
raise RuntimeError(op2.code_information())
assert 2 + 11 * nnodes == op2.num_wide, op2.code_information()
if op2.is_stress:
obj_vector_random = RandomPlateVMStressArray
else:
obj_vector_random = RandomPlateVMStrainArray
ntotal = op2.num_wide * size
nelements = ndata // ntotal
nlayers = nelements * nnodes_all * 2
assert ndata % ntotal == 0
#print(f'selement_name={op2.element_name} sort_method={op2.sort_method} ntotal={ntotal} num_wide={op2.num_wide} nelements={nelements} ndata={ndata} nlayers={nlayers}')
#print('nnodes_all =', nnodes_all)
# 57 = 2 + 11 * 5
#ints = (64011, 'CEN/',
# 5, -1127428915, 1168048724, 1174717287, 1170166698, 1179261635, 0.025, 1167390785, 1175199013, 1169871798, 1179273875,
# 80000, -1127428915, 1144450062, 1161227278, 1155248064, 1165738662, 0.025, 1149212546, 1165989762, 1154256422, 1167150968,
# 80002, -1127428915, 1182003448, 1198780664, 1170939895, 1197489549, 0.025, 1182601858, 1199379074, 1169253917, 1197953979,
# 80202, -1127428915, 1171034189, 1187612615, 1180541053, 1191448609, 0.025, 1169771716, 1186411799, 1181780048, 1191501390,
# 80200, -1127428915, 1178323054, 1132616020, 1157628875, 1178722187, 0.025, 1176075105, 1152142782, 1150736003, 1177059240,
# 64021, 'CEN/',
# 5,
# -1127428915, 1180051065, 1181683390, 1180538582, 1189027141,
# 0.025, 1176941913, 1182077843, 1179498554, 1188072152,
# 80200, -1127428915, 1179821552, 1170149790, 1168886008, 1181456870, 0.025, 1178168847, 1171555762, 1167658184, 1179691422,
# 80202, -1127428915, 1180244047, 1197120002, 1184242951, 1198270294, 0.025, 1180604356, 1197449345, 1183498771, 1198177490,
# 80402, -1127428915, 1192586977, 1184669070, 1184219971, 1194803525, 0.025, 1193976276, 1184750091, 1182770117, 1194901195,
# 80400, -1127428915, 1199592178, 1184219800, 1179170385, 1198821390, 0.025, 1198216710, 1184079548, 1179988640, 1197735836,
# 64111, 'CEN/',
# 8, -1127428915, 1176450101, 1187579168, 1179109877, 1190548891, 0.025, 1176466945, 1187779876, 1179033188, 1190633996,
# 90000, -1127428915, 1163199692, 1169764570, 1159841955, 1171614247, 0.025, 1163848412, 1172543886, 1158998260, 1173065416,
# 90002, -1127428915, 1183839433, 1201603141, 1152572706, 1200652206, 0.025, 1184231034, 1201902140, 1154931831, 1200916849,
# 90202, -1127428915, 1156439069, 1187515149, 1193045713, 1200691352, 0.025, 1130954657, 1188796022, 1193252160, 1200929208,
# 90200, -1127428915, 1188552094, 1155874499, 1172127542, 1189476821, 0.025, 1187333567, 1120409103, 1169694799, 1188368246,
# 64121, 'CEN/',
# 8, -1127428915, 1188544334, 1178584960, 1188275834, 1196282134, 0.025, 1186800673, 1178575754, 1187794850, 1195571664,
# 90200, -1127428915, 1189840387, 1170594118, 1173995469, 1190183343, 0.025, 1188877086, 1165971753, 1173556714, 1189607807,
# 90202, -1127428915, 1140886249, 1189485297, 1193242194, 1200973710, 0.025, 1161620989, 1191030157, 1193534822, 1201299068,
# 90402, -1127428915, 1176972259, 1174260793, 1195749260, 1202470249, 0.025, 1185547735, 1170629093, 1194910619, 1201965268,
# 90400, -1127428915, 1202771906, 1185377334, 1175183109, 1201858966, 0.025, 1202359853, 1184055805, 1173072162, 1201506363)
#floats = (8.969851599989587e-41, 'CEN/',
# 5, -0.025, 5088.291015625, 8496.8505859375, 6122.4580078125, 12934.6904296875,
# 0.025, 4767.03173828125, 8967.2861328125, 5978.4638671875, 12946.6435546875,
# 1.1210387714598537e-40, -0.025, 731.6883544921875, 2926.75341796875, 1757.4921875, 4028.16552734375,
# 0.025, 1022.3673095703125, 4089.46923828125, 1636.442138671875, 4649.93359375,
# 1.1210667974291402e-40, -0.025, 15612.2421875, 62448.96875, 6499.99560546875, 57405.55078125,
# 0.025, 16196.626953125, 64786.5078125, 5676.76416015625, 59219.73046875,
# 1.1238693943577898e-40, -0.025, 6546.03759765625, 25795.888671875, 14184.1220703125, 33808.12890625,
# 0.025, 5929.595703125, 23450.544921875, 15394.078125, 34014.3046875,
# 1.1238413683885033e-40, -0.025, 12018.107421875, 260.6978759765625, 2048.237060546875, 12407.8857421875,
# 0.025, 9822.8447265625, 1378.429443359375, 1206.7034912109375, 10783.9140625,
# 8.971252898453911e-41, 'CEN/',
# 5, -0.025, 13705.6181640625, 15299.685546875, 14181.708984375, 28558.634765625,
# 0.025, 10669.3369140625, 15684.8935546875, 13166.056640625, 26693.421875,
# 1.1238413683885033e-40, -0.025, 13481.484375, 6114.2021484375, 5497.12109375, 15078.474609375,
# 0.025, 11867.5146484375, 6800.7119140625, 4897.59765625, 13354.404296875,
# 1.1238693943577898e-40, -0.025, 13894.0771484375, 55962.0078125, 19214.513671875, 60455.3359375,
# 0.025, 14245.94140625, 57248.50390625, 17761.037109375, 60092.8203125,
# 1.1266719912864394e-40, -0.025, 38254.87890625, 20046.77734375, 19169.630859375, 46913.26953125,
# 0.025, 43681.828125, 20205.021484375, 16360.9423828125, 47294.79296875,
# 1.126643965317153e-40, -0.025, 65701.890625, 19169.296875, 12845.5791015625, 62608.0546875,
# 0.025, 60246.0234375, 18895.3671875, 13644.65625, 58367.609375,
# 8.983864584632835e-41, 'CEN/',
# 8, -0.025, 10189.0517578125, 25730.5625, 12786.4892578125, 31530.802734375,
# 0.025, 10205.5009765625, 26122.5703125, 12711.59765625, 31697.0234375,
# 1.2611686178923354e-40, -0.025, 3408.2998046875, 5926.1064453125, 2588.539794921875, 6829.26904296875,
# 0.025, 3566.6787109375, 7283.1943359375, 2382.5595703125, 7537.84765625,
# 1.2611966438616219e-40, -0.025, 18426.392578125, 81412.5390625, 1430.910400390625, 73983.359375,
# 0.025, 19191.23828125, 83748.46875, 1718.8895263671875, 76050.8828125,
# 1.2639992407902715e-40, -0.025, 1902.8785400390625, 25605.525390625, 40046.81640625, 74289.1875,
# 0.025, 232.99855041503906, 28107.23046875, 40853.25, 76147.4375,
# 1.263971214820985e-40, -0.025, 27630.80859375, 1833.9613037109375, 7079.9013671875, 29436.916015625,
# 0.025, 25250.873046875, 100.04308319091797, 5892.03857421875, 27271.73046875,
# 8.98526588309716e-41, 'CEN/',
# 8, -0.025, 27615.65234375, 12273.875, 27091.23828125, 52689.0859375,
# 0.025, 24210.064453125, 12264.884765625, 26151.81640625, 49913.8125,
# 1.263971214820985e-40, -0.025, 30147.005859375, 6331.1591796875, 7991.97509765625, 30816.841796875,
# 0.025, 28265.55859375, 4085.072509765625, 7777.7392578125, 29692.748046875,
# 1.2639992407902715e-40,-0.025, 514.1704711914062, 29453.470703125, 40814.3203125, 76495.109375,
# 0.025, 3022.874267578125, 32470.775390625, 41957.3984375, 79036.96875,
# 1.2668018377189211e-40,-0.025, 10698.9716796875, 8121.52783203125, 50607.546875, 88186.8203125,
# 0.025, 21762.919921875, 6348.23681640625, 47331.60546875, 84241.65625,
# 1.2667738117496346e-40,-0.025, 90543.515625, 21430.10546875, 8951.7548828125, 83411.171875,
# 0.025, 87324.3515625, 18848.994140625, 7541.1416015625, 80656.4609375)
#if op2.read_mode == 2:
#self.show_data(data)
#ddd
auto_return, is_vectorized = op2._create_oes_object4(
nlayers, result_name, slot, obj_vector_random)
if auto_return:
op2._data_factor = 2 * nnodes_all # number of "layers" for an element
return nelements * ntotal, None, None
obj = op2.obj
n = oes_cquad4_random_vm_57(op2, data, op2.obj, nelements, ntotal, nnodes,
dt)
else: # pragma: no cover
raise RuntimeError(op2.code_information())
return n, nelements, ntotal
def _oes_shells_nonlinear(self, data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix):
"""
reads stress/strain for element type:
- 88 : CTRIA3NL
- 90 : CQUAD4NL
"""
op2 = self.op2
n = 0
if op2.is_stress:
if op2.element_type == 88:
result_name = prefix + 'ctria3_stress' # + postfix nonlinear_
elif op2.element_type == 90:
result_name = prefix + 'cquad4_stress' + postfix # nonlinear_
else:
raise RuntimeError(op2.element_type)
else:
if op2.element_type == 88:
result_name = prefix + 'ctria3_strain' + postfix # nonlinear_
elif op2.element_type == 90:
result_name = prefix + 'cquad4_strain' + postfix # nonlinear_
else:
raise RuntimeError(op2.element_type)
slot = op2.get_result(result_name)
op2._results._found_result(result_name)
#print(op2.code_information())
log = op2.log
if op2.format_code == 1 and op2.num_wide == 13 and op2.element_type in [88, 90]: # real
# single layered hyperelastic (???) ctria3, cquad4
ntotal = 52 * self.factor # 4*13
nelements = ndata // ntotal
obj_vector_real = RealNonlinearPlateArray
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_real)
if auto_return:
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and op2.sort_method == 1:
n = nelements * op2.num_wide * 4
ielement = obj.ielement
ielement2 = ielement + nelements
obj._times[obj.itime] = dt
self.obj_set_element(obj, ielement, ielement2, data, nelements)
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, 13).copy()
#[fiber_distance, oxx, oyy, ozz, txy, exx, eyy, ezz, exy, es, eps, ecs]
floats[:, 1] = 0
obj.data[obj.itime, ielement:ielement2, :] = floats[:, 1:]
obj.ielement = ielement2
obj.itotal = ielement2
else:
if is_vectorized and op2.use_vector: # pragma: no cover
log.debug('vectorize CTRIA3/CQUAD4_NL real SORT%s' % op2.sort_method)
struct1 = Struct(op2._endian + op2._analysis_code_fmt + b'12f') # 1+12=13
for unused_i in range(nelements):
edata = data[n:n + ntotal]
out = struct1.unpack(edata)
if op2.is_debug_file:
op2.binary_debug.write('CQUADNL-90 - %s\n' % str(out))
(eid_device, fd1,
sx1, sy1, sz1, txy1, es1, eps1, ecs1,
ex1, ey1, ez1, exy1) = out
eid, dt = get_eid_dt_from_eid_device(
eid_device, op2.nonlinear_factor, op2.sort_method)
obj.add_new_eid_sort1(
dt, eid, op2.element_type, fd1,
sx1, sy1, sz1, txy1, es1, eps1, ecs1,
ex1, ey1, ez1, exy1)
n += ntotal
elif op2.format_code == 1 and op2.num_wide == 25 and op2.element_type in [88, 90]:
# TODO: vectorize
# ELEMENT FIBER STRESSES/ TOTAL STRAINS EQUIVALENT EFF. STRAIN EFF. CREEP
# ID DISTANCE X Y Z XY STRESS PLASTIC/NLELAST STRAIN
# 0 721 -7.500000E+00 5.262707E+02 2.589492E+02 0.000000E+00 -2.014457E-14 4.557830E+02 5.240113E-02 0.0
# 4.775555E-02 -2.775558E-17 -4.625990E-02 -7.197441E-18
# 7.500000E+00 5.262707E+02 2.589492E+02 0.000000E+00 1.308169E-14 4.557830E+02 5.240113E-02 0.0
# 4.775555E-02 -1.387779E-17 -4.625990E-02 4.673947E-18
# 0 722 -7.500000E+00 5.262707E+02 2.589492E+02 0.000000E+00 2.402297E-13 4.557830E+02 5.240113E-02 0.0
# 4.775555E-02 -2.081668E-17 -4.625990E-02 8.583152E-17
# 7.500000E+00 5.262707E+02 2.589492E+02 0.000000E+00 2.665485E-14 4.557830E+02 5.240113E-02 0.0
# 4.775555E-02 -2.081668E-17 -4.625990E-02 9.523495E-18
#
ntotal = 100 * self.factor # 4*25
nelements = ndata // ntotal
obj_vector_real = RealNonlinearPlateArray
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_real)
if auto_return:
op2._data_factor = 2
return nelements * ntotal, None, None
#return nelements * op2.num_wide * 4
obj = op2.obj
is_vectorized = False
if op2.use_vector and is_vectorized and op2.sort_method == 1:
n = nelements * op2.num_wide * 4
ielement = obj.ielement
ielement2 = ielement + nelements
itotal = obj.itotal
itotal2 = itotal + nelements * 2
obj._times[obj.itime] = dt
#print('ielement=%s:%s' % (ielement, ielement2))
#print('itotal=%s:%s' % (itotal, itotal2))
if obj.itime == 0:
try:
ints = fromstring(data, dtype=op2.idtype).reshape(nelements, 25)
except ValueError:
unused_values = fromstring(data, dtype=op2.idtype)
eids = ints[:, 0] // 10
#eids2 = vstack([eids, eids]).T.ravel()
#print(eids.tolist())
obj.element[ielement:ielement2] = eids # 150
#print(obj.element_node[:10, :])
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, 25)[:, 1:]
#[fiber_distance, oxx, oyy, ozz, txy, exx, eyy, ezz, exy, es, eps, ecs]
#floats[:, 1] = 0
obj.data[obj.itime, itotal:itotal2, :] = floats.reshape(nelements * 2, 12).copy()
#obj.data[obj.itime, ielement:ielement2, :] = floats[:, 1:]
obj.ielement = ielement2
obj.itotal = itotal2
else:
if is_vectorized and op2.use_vector: # pragma: no cover
log.debug('vectorize CTRIA3/CQUAD4_NL imag SORT%s' % op2.sort_method)
etype = op2.element_type
struct1 = Struct(op2._endian + mapfmt(op2._analysis_code_fmt + b'24f', self.size)) # 1+24=25
for unused_i in range(nelements):
edata = data[n:n + ntotal]
out = struct1.unpack(edata)
if op2.is_debug_file:
eid = out[0] // 10
op2.binary_debug.write('CQUADNL-90 - %s : %s\n' % (eid, str(out)))
(eid_device,
fd1, sx1, sy1, undef1, txy1, es1, eps1, ecs1, ex1, ey1, undef2, etxy1,
fd2, sx2, sy2, undef3, txy2, es2, eps2, ecs2, ex2, ey2, undef4, etxy2) = out
eid, dt = get_eid_dt_from_eid_device(
eid_device, op2.nonlinear_factor, op2.sort_method)
obj.add_new_eid_sort1(
dt, eid, etype,
fd1, sx1, sy1, undef1, txy1, es1, eps1, ecs1, ex1, ey1, undef2, etxy1)
obj.add_sort1(
dt, eid, etype,
fd2, sx2, sy2, undef3, txy2, es2, eps2, ecs2, ex2, ey2, undef4, etxy2)
n += ntotal
elif op2.format_code == 1 and op2.num_wide == 0: # random
msg = op2.code_information()
return op2._not_implemented_or_skip(data, ndata, msg)
else: # pragma: no cover
raise RuntimeError(op2.code_information())
return n, nelements, ntotal
def _oes_shells_composite(self, data, ndata, dt, is_magnitude_phase,
result_type: int, prefix: str, postfix: str) -> Tuple[int, Any, Any]:
"""
reads stress/strain for element type:
- 95 : CQUAD4
- 96 : CQUAD8
- 97 : CTRIA3
- 98 : CTRIA6 (composite)
- 232 : QUADRLC (CQUADR-composite)
- 233 : TRIARLC (CTRIAR-composite)
"""
op2 = self.op2
table_name = op2.table_name
assert isinstance(table_name, bytes), table_name
n = 0
composite_element_name_map = {
95: 'cquad4',
96: 'cquad8',
97: 'ctria3',
98: 'ctria6',
232: 'cquadr',
233: 'ctriar',
}
try:
element_name = composite_element_name_map[op2.element_type]
except KeyError: # pragma: no cover
raise KeyError(op2.code_information())
if op2.is_stress:
stress_strain = 'stress'
obj_vector_real = RealCompositePlateStressArray
obj_vector_strength = RealCompositePlateStressStrengthRatioArray
#obj_vector_complex = ComplexCompositePlateStressArray
obj_vector_random = RandomCompositePlateStressArray
else:
stress_strain = 'strain'
obj_vector_real = RealCompositePlateStrainArray
obj_vector_strength = None # RealCompositePlateStrainStrengthRatioArray
#obj_vector_complex = ComplexCompositePlateStrainArray
obj_vector_random = RandomCompositePlateStrainArray
result_name = prefix + f'{element_name}_composite_{stress_strain}' + postfix
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
etype = op2.element_name
sort_method = op2.sort_method
if result_type == 0 and op2.num_wide == 11: # real
# S T R E S S E S I N L A Y E R E D C O M P O S I T E E L E M E N T S ( T R I A R )
# ELEMENT PLY STRESSES IN FIBER AND MATRIX DIRECTIONS INTER-LAMINAR STRESSES PRINCIPAL STRESSES (ZERO SHEAR) MAX
# ID ID NORMAL-1 NORMAL-2 SHEAR-12 SHEAR XZ-MAT SHEAR YZ-MAT ANGLE MAJOR MINOR SHEAR
# 7070 1 7.50000E-01 3.00000E+00 9.86167E-08 -6.58903E-08 3.00000E+00 90.00 3.00000E+00 7.50000E-01 1.12500E+00
# 7070 2 -7.50000E-01 -3.00000E+00 -9.86167E-08 0.0 0.0 -0.00 -7.50000E-01 -3.00000E+00 1.12500E+00
ntotal = 44 * self.factor
nelements = ndata // ntotal
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_real)
if auto_return:
return nelements * ntotal, None, None
obj = op2.obj
if op2.is_debug_file:
op2.binary_debug.write(' [cap, element1, element2, ..., cap]\n')
op2.binary_debug.write(' cap = %i # assume 1 cap when there could have been multiple\n' % ndata)
op2.binary_debug.write(' element1 = [eid_device, layer, o1, o2, t12, t1z, t2z, angle, major, minor, ovm)]\n')
op2.binary_debug.write(' nelements=%i; nnodes=1 # centroid\n' % nelements)
if op2.use_vector and is_vectorized and sort_method == 1:
n = nelements * op2.num_wide * 4
istart = obj.itotal
iend = istart + nelements
obj._times[obj.itime] = dt
if obj.itime == 0:
ints = frombuffer(data, dtype=op2.idtype8).reshape(nelements, 11).copy()
eids = ints[:, 0] // 10
nids = ints[:, 1]
obj.element_layer[istart:iend, 0] = eids
obj.element_layer[istart:iend, 1] = nids
floats = frombuffer(data, dtype=op2.fdtype8).reshape(nelements, 11)
#[o1, o2, t12, t1z, t2z, angle, major, minor, ovm]
obj.data[obj.itime, istart:iend, :] = floats[:, 2:].copy()
else:
if is_vectorized and op2.use_vector: # pragma: no cover
op2.log.debug(f'vectorize COMP_SHELL real SORT{sort_method}')
n = oes_comp_shell_real_11(op2, data, ndata, obj,
ntotal, nelements, etype, dt)
#elif result_type == 1 and op2.num_wide == 9: # TODO: imag? - not done...
# TODO: vectorize
#raise NotImplementedError('imaginary composite stress?')
#msg = op2.code_information()
#nelements = ndata // ntotal
#obj_vector_complex = None
#auto_return, is_vectorized = op2._create_oes_object4(
#nelements, result_name, slot, obj_vector_complex)
#if auto_return:
#assert ntotal == op2.num_wide * 4
#return nelements * ntotal, None, None
## TODO: this is an OEF result???
## furthermore the actual table is calle dout as
## 'i8si4f4s', not 'i8si3fi4s'
#ntotal = 36
#nelements = ndata // ntotal
#s = self.struct_i
#s2 = Struct(op2._endian + b'8si3fi4s')
#s3 = Struct(op2._endian + b'8si4f4s')
#for i in range(nelements):
##out = s.unpack(data[n:n + ntotal])
#eid_device, = s.unpack(data[n:n+4])
##t, = s.unpack(data[n:n+4])
#if eid_device > 0:
#out = s2.unpack(data[n+4:n+ntotal])
#else:
#unused_out1 = s2.unpack(data[n+4:n+ntotal])
#out = s3.unpack(data[n+4:n+ntotal])
#(theory, lamid, fp, fm, fb, fmax, fflag) = out
#if op2.is_debug_file:
#op2.binary_debug.write('%s-%s - (%s) + %s\n' % (op2.element_name, op2.element_type, eid_device, str(out)))
#obj.add_new_eid_sort1(dt, eid, theory, lamid, fp, fm, fb, fmax, fflag)
#n += ntotal
#raise NotImplementedError('this is a really weird case...')
elif result_type == 1 and op2.num_wide == 11 and table_name in [b'OESCP', b'OESTRCP']: # complex
# OESCP - STRAINS IN LAYERED COMPOSITE ELEMENTS (QUAD4)
ntotal = 44 * self.factor
nelements = ndata // ntotal
op2.log.warning(f'skipping complex {op2.table_name_str}-PCOMP')
return nelements * ntotal, None, None
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, ComplexLayeredCompositesArray)
if auto_return:
return nelements * ntotal, None, None
obj = op2.obj
n = oes_shell_composite_complex_11(op2, data, obj,
ntotal, nelements, sort_method,
dt, is_magnitude_phase)
return nelements * ntotal, None, None
elif result_type == 0 and op2.num_wide == 9 and table_name == b'OESRT': # real
# strength_ratio.cquad4_composite_stress
ntotal = 36 * self.factor
nelements = ndata // ntotal
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_strength)
if auto_return:
return nelements * ntotal, None, None
obj = op2.obj
if op2.is_debug_file:
op2.binary_debug.write(' [cap, element1, element2, ..., cap]\n')
op2.binary_debug.write(' cap = %i # assume 1 cap when there could have been multiple\n' % ndata)
op2.binary_debug.write(' element1 = [eid_device, failure_theory, ply_id, strength_ratio_ply, failure_index_bonding, strength_ratio_bonding, flag, flag2)]\n')
op2.binary_debug.write(' nelements=%i; nnodes=1 # centroid\n' % nelements)
if op2.use_vector and is_vectorized and sort_method == 1 and 0:
n = nelements * op2.num_wide * 4
asdf
else:
op2.log.warning(f'need to vectorize oes_shell_composite; {op2.element_name}-{op2.element_type} '
f'(numwide={op2.num_wide}) {op2.table_name_str}')
n = oesrt_comp_shell_real_9(op2, data, ndata, obj,
ntotal, nelements, etype, dt)
elif result_type == 1 and op2.num_wide == 13 and table_name in [b'OESVM1C', b'OSTRVM1C']: # complex
op2.log.warning(f'skipping complex {op2.table_name_str}-PCOMP (numwide=13)')
ntotal = 52 * self.factor
nelements = ndata // ntotal
return nelements * ntotal, None, None
op2.table_name = table_name
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, ComplexLayeredCompositesArray)
if auto_return:
return nelements * ntotal, None, None
if is_vectorized and op2.use_vector: # pragma: no cover
op2.log.debug(f'vectorize COMP_SHELL random SORT{sort_method} (numwide=13)')
obj = op2.obj
n = oes_shell_composite_complex_13(op2, data, obj,
ntotal, nelements, sort_method,
dt, is_magnitude_phase)
#return nelements * ntotal, None, None
elif result_type == 2 and op2.num_wide == 7:
# TCODE,7 =0 Real
# 2 PLY I Lamina Number
# 3 EX1 RS Normal-1
# 4 EY1 RS Normal-2
# 5 ET1 RS Shear-12
# 6 EL1 RS Shear-1Z
# 7 EL2 RS Shear-2Z
# 8 A1 RS Shear angle
# 9 EMJRP1 RS Major Principal
# 10 EMNRP1 RS Minor Principal
# 11 ETMAX1 RS von Mises or Maximum shear
#
# TCODE,7 =1 Real/imaginary
# 2 PLY I Lamina Number
# 3 EX1 RS Normal-1
# 4 EY1 RS Normal-2
# 5 ET1 RS Shear-12
# 6 EL1 RS Shear-1Z
# 7 EL2 RS Shear-2Z
# 8 EX1I RS Normal-1
# 9 EY1I RS Normal-2
# 10 ET1I RS Shear-12
# 11 EL1I RS Shear-1Z
# 12 EL2I RS Shear-2Z
#
# TCODE,7 =2 Random Response
# 2 PLY I Lamina Number
# 3 EX1 RS Normal-1
# 4 EY1 RS Normal-2
# 5 ET1 RS Shear-12
# 6 EL1 RS Shear-1Z
# 7 EL2 RS Shear-2Z
ntotal = 28 * self.factor
nelements = ndata // ntotal
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_random)
if auto_return:
assert ntotal == op2.num_wide * 4
return nelements * ntotal, None, None
obj = op2.obj
struct1 = Struct(op2._endian + op2._analysis_code_fmt + b'i5f')
for unused_i in range(nelements):
edata = data[n:n+ntotal]
out = struct1.unpack(edata)
(eid_device, ply_id, oxx, oyy, txy, txz, tyz) = out
eid, dt = get_eid_dt_from_eid_device(
eid_device, op2.nonlinear_factor, sort_method)
#print(eid, out)
#if op2.is_debug_file:
#op2.binary_debug.write('%s-%s - (%s) + %s\n' % (op2.element_name, op2.element_type, eid_device, str(out)))
#print(obj)
obj.add_sort1_7words(dt, eid, ply_id, oxx, oyy, txy, txz, tyz)
n += ntotal
elif result_type == 2 and op2.num_wide == 8:
# analysis_code = 5 Frequency
# table_code = 805 OESXRM1C-OESXRMS1 - element RMS stresses for random analysis that includes von Mises stress output.
# format_code = 1 Real
# result_type = 2 Random
# sort_method = 1
# sort_code = 4
# sort_bits = (0, 0, 1)
# data_format = 0 Real
# sort_type = 0 Sort1
# is_random = 1 Random Responses
# random_code = 0
# element_type = 95 QUAD4-nonlinear
# num_wide = 8
# freq = 0.0
# NX Nastran
msg = op2.code_information()
msg = (f'etype={op2.element_name} ({op2.element_type}) '
f'{op2.table_name_str}-COMP-random-numwide={op2.num_wide} '
f'numwide_real=11 numwide_imag=9 result_type={result_type}')
return op2._not_implemented_or_skip(data, ndata, msg), None, None
elif result_type == 1 and op2.num_wide in [11, 12]:
# analysis_code = 9 Complex eigenvalues
# table_code = 5 OESCP-OES - Element Stress
# format_code = 2 Real/Imaginary
# result_type = 1 Complex
# sort_method = 1
# sort_code = 0
# sort_bits = (1, 0, 0)
# data_format = 1 Real/Imaginary
# sort_type = 0 Sort1
# is_random = 0 Sorted Responses
# random_code = 0
# element_type = 95 QUAD4-nonlinear
# num_wide = 11
# mode = 0
# eigr = 0.0
# eigi = 0.0
# NX Nastran
msg = op2.code_information()
msg = (f'etype={op2.element_name} ({op2.element_type}) '
f'{op2.table_name_str}-COMP-random-numwide={op2.num_wide} '
f'numwide_real=11 numwide_imag=9 result_type={result_type}')
return op2._not_implemented_or_skip(data, ndata, msg), None, None
else:
raise RuntimeError(op2.code_information())
#msg = op2.code_information()
#msg = (f'etype={op2.element_name} ({op2.element_type}) '
#f'{op2.table_name_str}-COMP-random-numwide={op2.num_wide} '
#f'numwide_real=11 numwide_imag=9 result_type={result_type}')
#return op2._not_implemented_or_skip(data, ndata, msg), None, None
return n, nelements, ntotal
def _oes_ctriax6(self, data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix):
"""
reads stress/strain for element type:
- 53 : CTRIAX6
"""
op2 = self.op2
n = 0
if op2.is_stress:
result_name = prefix + 'ctriax_stress' + postfix
else:
result_name = prefix + 'ctriax_strain' + postfix
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
if result_type == 0 and op2.num_wide == 33: # real
if op2.is_stress:
obj_vector_real = RealTriaxStressArray
else:
obj_vector_real = RealTriaxStrainArray
ntotal = 132 * self.factor # (1+8*4)*4 = 33*4 = 132
nelements = ndata // ntotal
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_real)
if auto_return:
op2._data_factor = 4
assert ntotal == op2.num_wide * 4
return nelements * ntotal, None, None
obj = op2.obj
nnodes_all = 4
if op2.use_vector and is_vectorized and op2.sort_method == 1:
n = nelements * op2.num_wide * 4
itotal = obj.itotal
itotal2 = itotal + nelements * nnodes_all
ielement = obj.ielement
ielement2 = ielement + nelements
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, 33)
floats1 = floats[:, 1:].reshape(nelements * nnodes_all, 8).copy()
obj._times[obj.itime] = dt
if obj.itime == 0:
ints = frombuffer(data, dtype=op2.idtype).reshape(nelements, 33).copy()
ints1 = ints[:, 1:].reshape(nelements * nnodes_all, 8)
eids = ints[:, 0] // 10
ints[:, 0] = 0
nids = ints1[:, 0]
eids2 = np.vstack([eids] * nnodes_all).T.ravel()
assert eids.min() > 0, eids.min()
obj.element_node[itotal:itotal2, 0] = eids2
obj.element_node[itotal:itotal2, 1] = nids
#[loc, rs, azs, As, ss, maxp, tmax, octs]
obj.data[obj.itime, itotal:itotal2, :] = floats1[:, 1:]
obj.ielement = ielement2
obj.itotal = itotal2
else:
n = oes_ctriax6_real_33(op2, data, obj,
nelements, ntotal, dt)
elif result_type == 1 and op2.num_wide == 37: # imag
# TODO: vectorize object
if op2.is_stress:
#print('op2.element_type', op2.element_type)
#print('op2.element_name', op2.element_name)
#raise NotImplementedError('ComplexTriaxStressArray')
obj_vector_complex = ComplexTriaxStressArray
else:
raise NotImplementedError('ComplexTriaxStrainArray')
#obj_vector_complex = ComplexTriaxStrainArray
num_wide = 1 + 4 * 9
ntotal = num_wide * 4 * self.factor
assert num_wide == op2.num_wide, num_wide
nelements = ndata // ntotal # (1+8*4)*4 = 33*4 = 132
leftover = ndata % ntotal
assert leftover == 0, 'ntotal=%s nelements=%s leftover=%s' % (ntotal, nelements, leftover)
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_complex)
if data is None:
return ndata, None, None
auto_return = False
is_vectorized = False
if auto_return:
op2._data_factor = 4
return nelements * ntotal, None, None
obj = op2.obj
nnodes_all = 4
if op2.use_vector and is_vectorized and 0:
n = nelements * ntotal
itotal = obj.itotal
itotal2 = itotal + nelements * nnodes_all
ielement = obj.ielement
ielement2 = ielement + nelements
numwide_imag = 37
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, numwide_imag)
floats1 = floats[:, 1:].reshape(nelements * nnodes_all, 9).copy()
obj._times[obj.itime] = dt
if obj.itime == 0:
ints = frombuffer(data, dtype=op2.idtype).reshape(nelements, numwide_imag)
ints1 = ints[:, 1:].reshape(nelements * nnodes_all, 9).copy()
eids = ints[:, 0] // 10
ints[:, 0] = 0
nids = ints1[:, 0]
eids2 = np.vstack([eids] * nnodes_all).T.ravel()
assert eids.min() > 0, eids.min()
obj.element_node[itotal:itotal2, 0] = eids2
obj.element_node[itotal:itotal2, 1] = nids
# [loc, rsr, rsi, azsr, azsi, Asr, Asi, ssr, ssi]
isave1 = [1, 3, 5, 7]
isave2 = [2, 4, 6, 9]
real_imag = apply_mag_phase(floats1, is_magnitude_phase, isave1, isave2)
obj.data[obj.itime, itotal:itotal2, :] = real_imag
obj.itotal = itotal2
obj.ielement = ielement2
else:
n = oes_ctriax_complex_37(op2, data, obj,
nelements,
is_magnitude_phase)
else: # pragma: no cover
raise RuntimeError(op2.code_information())
#msg = op2.code_information()
#raise NotImplementedError(msg)
#return op2._not_implemented_or_skip(data, ndata, msg)
return n, nelements, ntotal
def _oes_cbush(self, data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix):
"""
reads stress/strain for element type:
- 102 : CBUSH
"""
op2 = self.op2
n = 0
if op2.is_stress:
result_name = prefix + 'cbush_stress' + postfix
else:
result_name = prefix + 'cbush_strain' + postfix
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
if result_type in [0, 2] and op2.num_wide == 7: # real, random
if op2.is_stress:
obj_vector_real = RealBushStressArray
else:
obj_vector_real = RealBushStrainArray
assert op2.num_wide == 7, "num_wide=%s not 7" % op2.num_wide
ntotal = 28 * self.factor # 4*7
nelements = ndata // ntotal
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_real)
if auto_return:
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and op2.sort_method == 1:
n = nelements * ntotal
istart = obj.ielement
iend = istart + nelements
obj._times[obj.itime] = dt
self.obj_set_element(obj, istart, iend, data, nelements)
floats = frombuffer(data, dtype=op2.fdtype8).reshape(nelements, 7)
#[tx, ty, tz, rx, ry, rz]
obj.data[obj.itime, istart:iend, :] = floats[:, 1:].copy()
else:
n = oes_cbush_real_7(op2, data, obj,
nelements, ntotal, dt)
elif result_type == 1 and op2.num_wide == 13: # imag
if op2.is_stress:
obj_complex = ComplexCBushStressArray
else:
obj_complex = ComplexCBushStrainArray
ntotal = 52 * self.factor # 4*13
nelements = ndata // ntotal
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_complex)
if auto_return:
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and op2.sort_method == 1:
n = nelements * ntotal
itotal = obj.ielement
ielement2 = obj.itotal + nelements
itotal2 = ielement2
floats = frombuffer(data, dtype=op2.fdtype8).reshape(nelements, 13).copy()
obj._times[obj.itime] = dt
self.obj_set_element(obj, itotal, itotal2, data, nelements)
isave1 = [1, 2, 3, 4, 5, 6]
isave2 = [7, 8, 9, 10, 11, 12]
real_imag = apply_mag_phase(floats, is_magnitude_phase, isave1, isave2)
obj.data[obj.itime, itotal:itotal2, :] = real_imag
obj.itotal = itotal2
obj.ielement = ielement2
else:
n = oes_cbush_complex_13(op2, data, obj,
nelements, ntotal,
is_magnitude_phase)
else: # pragma: no cover
raise RuntimeError(op2.code_information())
#msg = op2.code_information()
#raise NotImplementedError(msg)
#return op2._not_implemented_or_skip(data, ndata, msg)
return n, nelements, ntotal
def _oes_cbush1d(self, data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix):
"""
reads stress/strain for element type:
- 40 : CBUSH1D
"""
op2 = self.op2
n = 0
if op2.is_stress:
result_name = prefix + 'cbush1d_stress_strain' + postfix
else:
result_name = prefix + 'cbush1d_stress_strain' + postfix
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
if result_type == 0 and op2.num_wide == 8: # real
if op2.is_stress:
obj_vector_real = RealBush1DStressArray
else:
#op2.create_transient_object(self.cbush1d_stress_strain, Bush1DStrain) # undefined
raise NotImplementedError('cbush1d_stress_strain; numwide=8')
ntotal = 32 * self.factor # 4*8
nelements = ndata // ntotal
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_real)
if auto_return:
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and op2.sort_method == 1:
n = nelements * ntotal
itotal = obj.itotal
itotal2 = itotal + nelements
itime = obj.itime
obj._times[itime] = dt
if 1: #obj.itime == 0:
ints = frombuffer(data, dtype=op2.idtype).reshape(nelements, 8).copy()
eids = ints[:, 0] // 10
fail = ints[:, 7]
obj.element[itotal:itotal2] = eids
obj.is_failed[itime, itotal:itotal2, 0] = fail
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, 8)
#[xxx, fe, ue, ve, ao, ae, ep, xxx]
obj.data[itime, itotal:itotal2, :] = floats[:, 1:7].copy()
obj.ielement = itotal2
obj.itotal = itotal2
else:
struct1 = Struct(op2._endian + op2._analysis_code_fmt + b'6fi')
for unused_i in range(nelements):
edata = data[n:n + ntotal]
out = struct1.unpack(edata) # num_wide=25
if op2.is_debug_file:
op2.binary_debug.write('CBUSH1D-40 - %s\n' % (str(out)))
(eid_device, fe, ue, ve, ao, ae, ep, fail) = out
eid, dt = get_eid_dt_from_eid_device(
eid_device, op2.nonlinear_factor, op2.sort_method)
# axial_force, axial_displacement, axial_velocity, axial_stress,
# axial_strain, plastic_strain, is_failed
obj.add_sort1(dt, eid, fe, ue, ve, ao, ae, ep, fail)
n += ntotal
elif result_type == 1 and op2.num_wide == 9: # imag
# TODO: vectorize object
ntotal = 36 * self.factor # 4*9
nelements = ndata // ntotal
if op2.is_stress:
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, ComplexCBush1DStressArray)
else:
raise NotImplementedError('self.cbush1d_stress_strain; complex strain')
if auto_return:
assert ntotal == op2.num_wide * 4
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and op2.sort_method == 1:
n = nelements * op2.num_wide * 4
itotal = obj.itotal
itotal2 = itotal + nelements
itime = obj.itime
obj._times[itime] = dt
self.obj_set_element(obj, itotal, itotal2, data, nelements)
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, 9).copy()
#[fer, uer, aor, aer,
# fei, uei, aoi, aei]
isave1 = [1, 3, 5, 7]
isave2 = [2, 4, 6, 8]
real_imag = apply_mag_phase(floats, is_magnitude_phase, isave1, isave2)
obj.data[obj.itime, itotal:itotal2, :] = real_imag
obj.ielement = itotal2
obj.itotal = itotal2
else:
struct1 = Struct(op2._endian + op2._analysis_code_fmt + b'8f')
for unused_i in range(nelements):
edata = data[n:n+ntotal]
out = struct1.unpack(edata) # num_wide=25
(eid_device,
fer, uer, aor, aer,
fei, uei, aoi, aei) = out
eid, dt = get_eid_dt_from_eid_device(
eid_device, op2.nonlinear_factor, op2.sort_method)
if is_magnitude_phase:
fe = polar_to_real_imag(fer, fei)
ue = polar_to_real_imag(uer, uei)
ao = polar_to_real_imag(aor, aoi)
ae = polar_to_real_imag(aer, aei)
else:
fe = complex(fer, fei)
ue = complex(uer, uei)
ao = complex(aor, aoi)
ae = complex(aer, aei)
obj.add_new_eid(op2.element_type, dt, eid, fe, ue, ao, ae)
else: # pragma: no cover
raise RuntimeError(op2.code_information())
#msg = op2.code_information()
#raise NotImplementedError(msg)
#return op2._not_implemented_or_skip(data, ndata, msg)
return n, nelements, ntotal
def _oes_crod_nonlinear(self, data, ndata, dt, unused_is_magnitude_phase,
result_type, prefix, postfix):
"""
reads stress/strain for element type:
- 87 : CTUBENL
- 89 : RODNL
- 92 : CONRODNL
"""
op2 = self.op2
n = 0
#prefix = 'nonlinear_'
if op2.is_stress:
if op2.element_type == 87:
result_name = prefix + 'ctube_stress' + postfix
name = 'CTUBENL-87'
elif op2.element_type == 89:
result_name = prefix + 'crod_stress' + postfix
name = 'RODNL-89'
elif op2.element_type == 92:
result_name = prefix + 'conrod_stress' + postfix
name = 'CONRODNL-92'
else: # pragma: no cover
raise RuntimeError(op2.code_information())
else:
if op2.element_type == 87:
result_name = prefix + 'ctube_strain' + postfix
name = 'CTUBENL-87'
elif op2.element_type == 89:
result_name = prefix + 'crod_strain' + postfix
name = 'RODNL-89'
elif op2.element_type == 92:
result_name = prefix + 'conrod_strain' + postfix
name = 'CONRODNL-92'
else: # pragma: no cover
raise RuntimeError(op2.code_information())
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
if result_type == 0 and op2.num_wide == 7: # real
ntotal = 28 * self.factor # 7*4 = 28
nelements = ndata // ntotal
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, RealNonlinearRodArray)
if auto_return:
return nelements * ntotal, None, None
obj = op2.obj
#if op2.is_debug_file:
#op2.binary_debug.write(' [cap, element1, element2, ..., cap]\n')
#op2.binary_debug.write(' cap = %i # assume 1 cap when there could have been multiple\n' % ndata)
#op2.binary_debug.write(' element1 = [eid_device, layer, o1, o2, t12, t1z, t2z, angle, major, minor, ovm)]\n')
#op2.binary_debug.write(' nelements=%i; nnodes=1 # centroid\n' % nelements)
if op2.use_vector and is_vectorized and op2.sort_method == 1:
n = nelements * op2.num_wide * 4
istart = obj.itotal
iend = istart + nelements
obj._times[obj.itime] = dt
if obj.itime == 0:
ints = frombuffer(data, dtype=op2.idtype8).reshape(nelements, 7).copy()
eids = ints[:, 0] // 10
obj.element[istart:iend] = eids
floats = frombuffer(data, dtype=op2.fdtype8).reshape(nelements, 7)
#[axial_stress, equiv_stress, total_strain,
# eff_plastic_creep_strain, eff_creep_strain, linear_torsional_stresss]
obj.data[obj.itime, istart:iend, :] = floats[:, 1:].copy()
else:
struct1 = Struct(op2._endian + mapfmt(op2._analysis_code_fmt + b'6f', self.size)) # 1+6=7
for unused_i in range(nelements):
edata = data[n:n+ntotal]
out = struct1.unpack(edata)
(eid_device, axial_stress, equiv_stress, total_strain,
eff_plastic_creep_strain, eff_creep_strain, linear_torsional_stresss) = out
eid, dt = get_eid_dt_from_eid_device(
eid_device, op2.nonlinear_factor, op2.sort_method)
if op2.is_debug_file:
op2.binary_debug.write('%s - %s\n' % (name, str(out)))
obj.add_sort1(dt, eid, axial_stress, equiv_stress, total_strain,
eff_plastic_creep_strain, eff_creep_strain, linear_torsional_stresss)
n += ntotal
else: # pragma: no cover
raise RuntimeError(op2.code_information())
return n, nelements, ntotal
def _oes_celas_nonlinear(self, data, ndata, dt, unused_is_magnitude_phase,
result_type, prefix, postfix):
"""
reads stress/strain for element type:
- 224 : CELAS1
- 226 : CELAS3
"""
op2 = self.op2
# 224-CELAS1
# 225-CELAS3
# NonlinearSpringStress
n = 0
numwide_real = 3
if op2.is_stress:
if op2.element_type == 224:
result_name = prefix + 'celas1_stress' + postfix # nonlinear_
elif op2.element_type == 225:
result_name = prefix + 'celas3_stress' + postfix # nonlinear_
else:
raise NotImplementedError('NonlinearSpringStrain')
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
if result_type == 0 and op2.num_wide == numwide_real:
assert op2.num_wide == 3, "num_wide=%s not 3" % op2.num_wide
ntotal = 12 * self.factor # 4*3
nelements = ndata // ntotal
if op2.is_stress:
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, RealNonlinearSpringStressArray)
else:
raise NotImplementedError('NonlinearSpringStrainArray') # undefined
if auto_return:
assert ntotal == op2.num_wide * 4
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and op2.sort_method == 1:
n = nelements * 4 * op2.num_wide
unused_itotal = obj.ielement
ielement = obj.ielement
ielement2 = obj.ielement + nelements
obj._times[obj.itime] = dt
if obj.itime == 0:
ints = frombuffer(data, dtype=op2.idtype).reshape(nelements, numwide_real).copy()
eids = ints[:, 0] // 10
assert eids.min() > 0, eids.min()
obj.element[ielement:ielement2] = eids
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, numwide_real)
#[force, stress]
obj.data[obj.itime, ielement:ielement2, :] = floats[:, 1:].copy()
obj.itotal = ielement2
obj.ielement = ielement2
else:
struct1 = Struct(op2._endian + op2._analysis_code_fmt + b'2f')
for unused_i in range(nelements):
edata = data[n:n+ntotal]
out = struct1.unpack(edata) # num_wide=3
(eid_device, force, stress) = out
eid, dt = get_eid_dt_from_eid_device(
eid_device, op2.nonlinear_factor, op2.sort_method)
if op2.is_debug_file:
op2.binary_debug.write('%s-%s - %s\n' % (op2.element_name, op2.element_type, str(out)))
obj.add_sort1(dt, eid, force, stress)
n += ntotal
else: # pragma: no cover
raise RuntimeError(op2.code_information())
return n, nelements, ntotal
def _oes_cbush_nonlinear(self, data, ndata, dt, unused_is_magnitude_phase,
result_type, prefix, postfix):
"""
reads stress/strain for element type:
- 226 : CBUSHNL
"""
op2 = self.op2
n = 0
if op2.is_stress:
if op2.element_type == 226:
result_name = prefix + 'cbush_force_stress_strain' + postfix
name = 'CBUSHNL-226'
else: # pragma: no cover
raise RuntimeError(op2.code_information())
else:
if op2.element_type == 226:
result_name = prefix + 'nonlinear_cbush_strain' + postfix
name = 'CBUSHNL-226'
else: # pragma: no cover
raise RuntimeError(op2.code_information())
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
if result_type == 0 and op2.num_wide == 19: # real
ntotal = 76 * self.factor # 19*4 = 76
nelements = ndata // ntotal
assert ndata % ntotal == 0
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, RealNonlinearBushArray)
if auto_return:
assert ntotal == op2.num_wide * 4
return nelements * ntotal, None, None
obj = op2.obj
#if op2.is_debug_file:
#op2.binary_debug.write(' [cap, element1, element2, ..., cap]\n')
#op2.binary_debug.write(' cap = %i # assume 1 cap when there could have been multiple\n' % ndata)
#op2.binary_debug.write(' element1 = [eid_device, layer, o1, o2, t12, t1z, t2z, angle, major, minor, ovm)]\n')
#op2.binary_debug.write(' nelements=%i; nnodes=1 # centroid\n' % nelements)
if op2.use_vector and is_vectorized and op2.sort_method == 1:
n = nelements * op2.num_wide * 4
istart = obj.itotal
iend = istart + nelements
obj._times[obj.itime] = dt
if obj.itime == 0:
ints = frombuffer(data, dtype=op2.idtype).reshape(nelements, 19).copy()
eids = ints[:, 0] // 10
obj.element[istart:iend] = eids
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, 19)
#[fx, fy, fz, otx, oty, otz, etx, ety, etz,
# mx, my, mz, orx, ory, orz, erx, ery, erz]
obj.data[obj.itime, istart:iend, :] = floats[:, 1:].copy()
else:
# N O N L I N E A R F O R C E S A N D S T R E S S E S I N B U S H E L E M E N T S ( C B U S H )
#
# F O R,C E S T R E S S S T R A I N
# ELEMENT ID. FORCE-X FORCE-Y FORCE-Z STRESS-TX STRESS-TY STRESS-TZ STRAIN-TX STRAIN-TY STRAIN-TZ
# MOMENT-X MOMENT-Y MOMENT-Z STRESS-RX STRESS-RY STRESS-RZ STRAIN-RX STRAIN-RY STRAIN-RZ
# 6 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0
# 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0
struct1 = Struct(op2._endian + op2._analysis_code_fmt + b'18f')
for unused_i in range(nelements):
edata = data[n:n+ntotal]
out = struct1.unpack(edata)
(eid_device, fx, fy, fz, otx, oty, otz, etx, ety, etz,
mx, my, mz, orx, ory, orz, erx, ery, erz) = out
eid, dt = get_eid_dt_from_eid_device(
eid_device, op2.nonlinear_factor, op2.sort_method)
if op2.is_debug_file:
op2.binary_debug.write('%s - %s\n' % (name, str(out)))
obj.add_sort1(dt, eid, fx, fy, fz, otx, oty, otz, etx, ety, etz,
mx, my, mz, orx, ory, orz, erx, ery, erz)
n += ntotal
else: # pragma: no cover
raise RuntimeError(op2.code_information())
return n, nelements, ntotal
def _oes_composite_solid_nx(self, data, ndata: int, dt, is_magnitude_phase: bool,
result_type: str, prefix: str, postfix: str):
"""
269: Composite HEXA element (CHEXAL)
270: Composite PENTA element (CPENTAL)
NX PCOMPS (CHEXAL, CPENTAL)
Linear
Etype Corner Center
====== ====== ======
CHEXAL-269 11 43
CPENTAL-??? 11??? 3+6*5???
TCODE,7 =0 Real
Q4CSTR=0 Center option
2 PLY I Lamina number (11)
3 FLOC CHAR4 Fiber location (BOT, MID, TOP)
4 GRID I Edge grid ID (center=0)
5 E11 RS Normal strain in the 1-direction
6 E22 RS Normal strain in the 2-direction
7 E33 RS Normal strain in the 3-direction
8 E12 RS Shear strain in the 12-plane
9 E23 RS Shear strain in the 23-plane
10 E13 RS Shear strain in the 13-plane
11 ETMAX1 RS von Mises strain
Q4CSTR=1 Center and Corner option (3+8*5=43)
2 PLY I Lamina number
3 FLOC CHAR4 Fiber location (BOT, MID, TOP)
4 GRID I Edge grid ID (center=0)
5 E11 RS Normal strain in the 1-direction
6 E22 RS Normal strain in the 2-direction
7 E33 RS Normal strain in the 3-direction
8 E12 RS Shear strain in the 12-plane
9 E23 RS Shear strain in the 23-plane
10 E13 RS Shear strain in the 13-plane
11 ETMAX1 RS Von Mises strain
For each fiber location requested (PLSLOC), words 4 through 11 repeat 5 times.
Complex
TCODE,7 =1 Real/imaginary
Q4CSTR=0 Center option
2 PLY I Lamina number
3 FLOC CHAR4 Fiber location (BOT, MID, TOP)
4 GRID I Edge grid ID (Center = 0)
5 E11r RS Normal strain in the 1-direction, real part
6 E22r RS Normal strain in the 2-direction, real part
7 E33r RS Normal strain in the 3-direction, real part
8 E12r RS Shear strain in the 12-plane, real part
9 E23r RS Shear strain in the 23-plane, real part
10 E13r RS Shear strain in the 13-plane, real part
11 E11i RS Normal strain in the 1-direction, imaginary part
12 E22i RS Normal strain in the 2-direction, imaginary part
13 E33i RS Normal strain in the 3-direction, imaginary part
14 E12i RS Shear strain in the 12-plane, imaginary part
15 EL23i RS Shear strain in the 23-plane, imaginary part
16 EL13i RS Shear strain in the 13-plane, imaginary part
Q4CSTR=1 Center and Corner option
2 PLY I Lamina number
3 FLOC CHAR4 Fiber location (BOT, MID, TOP)
4 GRID I Edge grid ID (Center = 0)
5 E11r RS Normal strain in the 1-direction, real part
6 E22r RS Normal strain in the 2-direction, real part
7 E33r RS Normal strain in the 3-direction, real part
8 E12r RS Shear strain in the 12-plane, real part
9 E23r RS Shear strain in the 23-plane, real part
10 E13r RS Shear strain in the 13-plane, real part
11 E11i RS Normal strain in the 1-direction, imaginary part
12 E22i RS Normal strain in the 2-direction, imaginary part
13 E33i RS Normal strain in the 3-direction, imaginary part
14 E12i RS Shear strain in the 12-plane, imaginary part
15 E23i RS Shear strain in the 23-plane, imaginary part
16 E13i RS Shear strain in the 13-plane, imaginary part
For each fiber location requested (PLSLOC), words 4 through 16 repeat 5 times.
"""
op2 = self.op2
n = 0
#assert op2.is_stress is True, op2.code_information()
stress_strain = 'stress' if op2.is_stress else 'strain'
if op2.element_type == 269:
result_name = f'{stress_strain}.chexa_composite_{stress_strain}'
elif op2.element_type == 270:
result_name = f'{stress_strain}.cpenta_composite_{stress_strain}'
else:
raise NotImplementedError(op2.code_information())
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
#if result_type == 0 and op2.num_wide == 43: # real
#op2.log.warning(f'skipping corner option for composite solid-{op2.element_name}-{op2.element_type}')
#struct9 = Struct(op2._endian + mapfmt(op2._analysis_code_fmt + b'i 4s i 5f', self.size)) # 9
obj_vector_real = RealSolidCompositeStressArray if op2.is_stress else RealSolidCompositeStrainArray
#obj_vector_real = RealSolidCompositeStressArray
if result_type == 0 and op2.num_wide == 11: # real; center
#op2.log.warning(f'skipping center option for composite solid-{op2.element_name}-{op2.element_type}')
ntotal = 44 * self.factor # 11 * 4
nelements = ndata // ntotal
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_real)
if auto_return:
assert ntotal == op2.num_wide * 4
return nelements * ntotal, None, None
obj = op2.obj # type: RealCompositeSolidStressArray
struct11 = Struct(op2._endian + mapfmt(op2._analysis_code_fmt + b'i 4s i 7f', self.size)) # 11
#sort_method = op2.sort_method
#add_eid_sort_x = getattr(obj, 'add_eid_sort' + str(op2.sort_method))
#add_sort_x = getattr(obj, 'add_sort' + str(op2.sort_method))
for unused_i in range(nelements):
edata = data[n:n+ntotal] # 4*11
out = struct11.unpack(edata)
#print(out)
(eid_device, layer, location_bytes, grid, o11, o22, o33, t12, t23, t13, ovm) = out
eid, dt = get_eid_dt_from_eid_device(
eid_device, op2.nonlinear_factor, op2.sort_method)
location = location_bytes.strip().decode('latin1')
assert location == 'MID', out
#print(f'eid,layer=({eid},{layer}) location={location!r} grid={grid} o11={o11:g} o22={o22:g} o33={o33:g} t12={t12:g} t1z={t13:g} t2z={t23:g} ovm={ovm:g}')
obj.add_sort1(dt, eid, layer, location, grid, o11, o22, o33, t12, t23, t13, ovm)
n += ntotal
elif result_type == 0 and op2.num_wide == 43: # real; center
#op2.log.warning(f'skipping center option for composite solid-{op2.element_name}-{op2.element_type}')
ntotal = 172 * self.factor # 43*4
nelements = ndata // ntotal
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_real)
if auto_return:
assert ntotal == op2.num_wide * 4
return nelements * ntotal, None, None
obj = op2.obj # type: RealCompositeSolidStressArray
structa = Struct(op2._endian + mapfmt(op2._analysis_code_fmt + b'i 4s', self.size)) # 3
structb = Struct(op2._endian + mapfmt(b'i 7f', self.size)) # 8
#sort_method = op2.sort_method
#add_eid_sort_x = getattr(obj, 'add_eid_sort' + str(op2.sort_method))
#add_sort_x = getattr(obj, 'add_sort' + str(op2.sort_method))
ntotal1 = 12 * self.factor # 4*3
ntotal2 = 32 * self.factor # 4*8
for unused_i in range(nelements):
edata = data[n:n+ntotal1] # 4*3, 4*40 = 4*43
#self.show_data(edata)
out = structa.unpack(edata)
#(13, 1, b' MID')
eid_device, layer, location_bytes = out
eid, dt = get_eid_dt_from_eid_device(
eid_device, op2.nonlinear_factor, op2.sort_method)
location = location_bytes.strip().decode('latin1')
assert location == 'MID', out
#print(out)
n += ntotal1
for unused_j in range(5):
edata = data[n:n+ntotal2]
out = structb.unpack(edata)
#print(' %s' % str(out))
(grid, o11, o22, o33, t12, t23, t13, ovm) = out
#print(f'eid,layer=({eid},{layer}) location={location!r} grid={grid} o11={o11:g} o22={o22:g} o33={o33:g} t12={t12:g} t1z={t13:g} t2z={t23:g} ovm={ovm:g}')
obj.add_sort1(dt, eid, layer, location, grid, o11, o22, o33, t12, t23, t13, ovm)
n += ntotal2
#print(out)
#(eid_device, layer, o1, o2, t12, t1z, t2z, angle, major, minor, ovm) = out
#print(out)
#n += ntotal
else:
raise NotImplementedError(op2.code_information())
return n, nelements, ntotal
def _oes_cbend(self, data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix):
"""
reads stress/strain for element type:
- 69 : CBEND
"""
op2 = self.op2
if op2.is_stress:
result_name = prefix + 'cbend_stress' + postfix
obj_vector_real = RealBendStressArray
obj_vector_complex = ComplexBendStressArray
obj_vector_random = RandomBendStressArray
else:
result_name = prefix + 'cbend_strain' + postfix
obj_vector_real = RealBendStrainArray
obj_vector_complex = ComplexBendStrainArray
obj_vector_random = RandomBendStrainArray
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
#print(op2.code_information())
if result_type == 0 and op2.num_wide == 21: # real
#TCODE,7 =0 Real
#2 GRID I External Grid Point identification number
#3 CA RS Circumferential Angle
#4 EC RS Long. strain at Point C
#5 ED RS Long. strain at Point D
#6 EE RS Long. strain at Point E
#7 EF RS Long. strain at Point F
#8 EMAX RS Maximum strain
#9 EMIN RS Minimum strain
#10 MST RS Margin of Safety in Tension
#11 MSC RS Margin of Safety in Compression
#Words 2 through 11 repeat 002 times
n = 0
ntotal = 84 * self.factor # 4*21
nelements = ndata // ntotal
assert ndata % ntotal == 0, 'ndata=%s ntotal=%s nelements=%s error=%s' % (ndata, ntotal, nelements, ndata % ntotal)
#nlayers = nelements * 2
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_real)
if auto_return:
return nelements * ntotal, None, None
obj = op2.obj
assert obj is not None
if op2.use_vector and is_vectorized and op2.sort_method == 1 and 0:
n = nelements * ntotal
itotal = obj.ielement
ielement2 = obj.itotal + nelements
itotal2 = ielement2
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, 4)
itime = obj.itime
obj._times[itime] = dt
if itime == 0:
ints = frombuffer(data, dtype=op2.idtype).reshape(nelements, 4)
eids = ints[:, 0] // 10
assert eids.min() > 0, eids.min()
obj.element[itotal:itotal2] = eids
#[max_strain, avg_strain, margin]
obj.data[itime, itotal:itotal2, :] = floats[:, 1:].copy()
obj.itotal = itotal2
obj.ielement = ielement2
else:
n = oes_cbend_real_21(op2, data, obj,
nelements, ntotal, dt)
#msg = ''
#if op2.read_mode == 2:
#msg = op2.code_information()
#n = op2._not_implemented_or_skip(data, ndata, msg)
#return n, None, None
elif result_type == 1 and op2.num_wide == 21: # complex
n = 0
ntotal = 84 * self.factor # 4*21
nelements = ndata // ntotal
assert ndata % ntotal == 0, 'ndata=%s ntotal=%s nelements=%s error=%s' % (ndata, ntotal, nelements, ndata % ntotal)
#TCODE,7 =1 Real / Imaginary
#2 GRID I External Grid Point identification number
#3 CA RS Circumferential Angle
#4 SCR RS Long. Stress at Point C
#5 SDR RS Long. Stress at Point D
#6 SER RS Long. Stress at Point E
#7 SFR RS Long. Stress at Point F
#8 SCI RS Long. Stress at Point C
#9 SDI RS Long. Stress at Point D
#10 SEI RS Long. Stress at Point E
#11 SFI RS Long. Stress at Point F
#Words 2 through 11 repeat 002 times
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_complex)
if auto_return:
assert ntotal == op2.num_wide * 4
return nelements * ntotal, None, None
obj = op2.obj
assert obj is not None
if op2.use_vector and is_vectorized and op2.sort_method == 1 and 0:
n = nelements * 4 * op2.num_wide
itotal = obj.ielement
ielement2 = obj.itotal + nelements
itotal2 = ielement2
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, 4)
itime = obj.itime
obj._times[itime] = dt
if itime == 0:
ints = frombuffer(data, dtype=op2.idtype).reshape(nelements, 4)
eids = ints[:, 0] // 10
assert eids.min() > 0, eids.min()
obj.element[itotal:itotal2] = eids
#[max_strain, avg_strain, margin]
obj.data[itime, itotal:itotal2, :] = floats[:, 1:].copy()
obj.itotal = itotal2
obj.ielement = ielement2
else:
ntotali = 40
struct1 = Struct(op2._endian + op2._analysis_code_fmt)
struct2 = Struct(op2._endian + b'i9f')
for unused_i in range(nelements):
edata = data[n:n + 4]
eid_device, = struct1.unpack(edata)
eid, dt = get_eid_dt_from_eid_device(
eid_device, op2.nonlinear_factor, op2.sort_method)
n += 4
for unused_j in range(2):
edata = data[n:n + ntotali]
out = struct2.unpack(edata)
if op2.is_debug_file:
op2.binary_debug.write('BEND-69 - eid=%s %s\n' % (eid, str(out)))
#print('BEND-69 - eid=%s %s\n' % (eid, str(out)))
(grid, angle, scr, sdr, ser, sfr,
sci, sdi, sei, sfi) = out
if is_magnitude_phase:
sc = polar_to_real_imag(scr, sci)
sd = polar_to_real_imag(sdr, sdi)
se = polar_to_real_imag(ser, sei)
sf = polar_to_real_imag(sfr, sfi)
else:
sc = complex(scr, sci)
sd = complex(sdr, sdi)
se = complex(ser, sei)
sf = complex(sfr, sfi)
obj.add_sort1(dt, eid, grid, angle, sc, sd, se, sf)
n += ntotali
elif result_type == 2 and op2.num_wide == 13:
n = 0
ntotal = 52 * self.factor # 4*13
nelements = ndata // ntotal
#TCODE,7 =2 Real
#2 GRID I External Grid Point identification number
#3 CA RS Circumferential Angle
#4 SC RS Long. Stress at Point C
#5 SD RS Long. Stress at Point D
#6 SE RS Long. Stress at Point E
#7 SF RS Long. Stress at Point F
#Words 2 through 7 repeat 002 times
#if op2.table_name != "OESPSD2":
#msg = ''
#if op2.read_mode == 2:
#msg = op2.code_information()
#n = op2._not_implemented_or_skip(data, ndata, msg)
#return n, None, None
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_random)
if auto_return:
assert ntotal == op2.num_wide * 4
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and op2.sort_method == 1 and 0:
n = nelements * 4 * op2.num_wide
itotal = obj.ielement
ielement2 = obj.itotal + nelements
itotal2 = ielement2
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, 4)
itime = obj.itime
obj._times[itime] = dt
if itime == 0:
ints = frombuffer(data, dtype=op2.idtype).reshape(nelements, 4)
eids = ints[:, 0] // 10
assert eids.min() > 0, eids.min()
obj.element[itotal:itotal2] = eids
#[max_strain, avg_strain, margin]
obj.data[itime, itotal:itotal2, :] = floats[:, 1:].copy()
obj.itotal = itotal2
obj.ielement = ielement2
else:
ntotali = 24
struct1 = Struct(op2._endian + op2._analysis_code_fmt)
struct2 = Struct(op2._endian + b'i5f')
for unused_i in range(nelements):
edata = data[n:n + 4]
#self.show_data(edata)
eid_device, = struct1.unpack(edata)
eid, dt = get_eid_dt_from_eid_device(
eid_device, op2.nonlinear_factor, op2.sort_method)
n += 4
for unused_i in range(2):
edata = data[n:n + ntotali]
out = struct2.unpack(edata)
if op2.is_debug_file:
op2.binary_debug.write('BEND-69 - eid=%s dt=%s %s\n' % (eid, dt, str(out)))
#print('BEND-69 - eid=%s dt=%s %s\n' % (eid, dt, str(out)))
(grid, angle, sc, sd, se, sf) = out
obj.add_sort1(dt, eid, grid, angle, sc, sd, se, sf)
n += ntotali
else: # pragma: no cover
raise RuntimeError(op2.code_information())
return n, nelements, ntotal
def _oes_cgap_nonlinear(self, data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix):
"""
reads stress/strain for element type:
- 86 : GAPNL
"""
op2 = self.op2
n = 0
if op2.is_stress:
result_name = prefix + 'cgap_stress' + postfix # nonlinear_
else:
result_name = prefix + 'cgap_strain' + postfix # nonlinear_
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
if result_type == 0 and op2.num_wide == 11: # real?
if op2.is_stress:
obj_vector_real = NonlinearGapStressArray
else:
raise NotImplementedError('NonlinearGapStrain')
ntotal = 44 * self.factor # 4*11
nelements = ndata // ntotal
assert ndata % ntotal == 0
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_real)
if auto_return:
return nelements * ntotal, None, None
obj = op2.obj
if op2.use_vector and is_vectorized and op2.sort_method == 1:
n = nelements * ntotal
ielement = obj.ielement
ielement2 = ielement + nelements
obj._times[obj.itime] = dt
self.obj_set_element(obj, ielement, ielement2, data, nelements)
#if obj.itime == 0:
#ints = frombuffer(data, dtype=op2.idtype).reshape(nelements, 11).copy()
#eids = ints[:, 0] // 10
#obj.element[ielement:ielement2] = eids
floats = frombuffer(data, dtype=op2.fdtype8).reshape(nelements, 11)
# skipping [form1, form2]
#[cpx, shy, shz, au, shv, shw, slv, slp]
obj.data[obj.itime, ielement:ielement2, :] = floats[:, 1:9].copy()
else:
if self.size == 4:
struct1 = Struct(op2._endian + op2._analysis_code_fmt + b'8f4s4s')
else:
struct1 = Struct(op2._endian + mapfmt(op2._analysis_code_fmt, self.size) + b'8d8s8s')
for unused_i in range(nelements):
edata = data[n:n + ntotal]
out = struct1.unpack(edata) # num_wide=25
(eid_device, cpx, shy, shz, au, shv, shw, slv, slp, form1, form2) = out
eid, dt = get_eid_dt_from_eid_device(
eid_device, op2.nonlinear_factor, op2.sort_method)
if op2.is_debug_file:
op2.binary_debug.write('CGAPNL-86 - %s\n' % str(out))
obj.add_sort1(dt, eid, cpx, shy, shz, au, shv, shw, slv, slp, form1, form2)
n += ntotal
else: # pragma: no cover
raise RuntimeError(op2.code_information())
return n, nelements, ntotal
def _oes_cbeam_nonlinear(self, data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix):
"""
reads stress/strain for element type:
- 94 : BEAMNL
"""
op2 = self.op2
n = 0
numwide_real = 51
numwide_random = 0
if op2.is_stress:
result_name = prefix + 'cbeam_stress' + postfix
else:
result_name = prefix + 'cbeam_strain' + postfix
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
if result_type == 0 and op2.num_wide == numwide_real:
msg = result_name
if op2.is_stress:
obj_vector_real = RealNonlinearBeamStressArray
else:
raise NotImplementedError('Nonlinear CBEAM Strain...this should never happen')
ntotal = numwide_real * 4 * self.factor # 204
nelements = ndata // ntotal
nlayers = nelements * 8
auto_return, is_vectorized = op2._create_oes_object4(
nlayers, result_name, slot, obj_vector_real)
if auto_return:
op2._data_factor = 8
return ndata, None, None
obj = op2.obj
if op2.is_debug_file:
op2.binary_debug.write(' [cap, element1, element2, ..., cap]\n')
#op2.binary_debug.write(' cap = %i # assume 1 cap when there could have been multiple\n' % ndata)
#op2.binary_debug.write(' #elementi = [eid_device, s1a, s2a, s3a, s4a, axial, smaxa, smina, MSt,\n')
#op2.binary_debug.write(' s1b, s2b, s3b, s4b, smaxb, sminb, MSc]\n')
#op2.binary_debug.write(' nelements=%i; nnodes=1 # centroid\n' % nelements)
if self.size == 4:
struct1 = Struct(op2._endian + b'2i 4s5f 4s5f 4s5f 4s5f i 4s5f 4s5f 4s5f 4s5f') # 2 + 6*8 + 1 = 51
else:
assert self.size == 8, self.size
struct1 = Struct(op2._endian + b'2q 8s5d 8s5d 8s5d 8s5d q 8s5d 8s5d 8s5d 8s5d') # 2 + 6*8 + 1 = 51
for unused_i in range(nelements): # num_wide=51
edata = data[n:n + ntotal]
out = struct1.unpack(edata)
if op2.is_debug_file:
op2.binary_debug.write('BEAMNL-94 - %s\n' % str(out))
#gridA, CA, long_CA, eqS_CA, tE_CA, eps_CA, ecs_CA,
# DA, long_DA, eqS_DA, tE_DA, eps_DA, ecs_DA,
# EA, long_EA, eqS_EA, tE_EA, eps_EA, ecs_EA,
# FA, long_FA, eqS_FA, tE_FA, eps_FA, ecs_FA,
#gridB, CB, long_CB, eqS_CB, tE_CB, eps_CB, ecs_CB,
# DB, long_DB, eqS_DB, tE_DB, eps_DB, ecs_DB,
# EB, long_EB, eqS_EB, tE_EB, eps_EB, ecs_EB,
# FB, long_FB, eqS_FB, tE_FB, eps_FB, ecs_FB,
# A
assert out[3-1].rstrip() == b' C', out[3-1]
assert out[9-1].rstrip() == b' D', out[9-1]
assert out[15-1].rstrip() == b' E', out[15-1]
assert out[21-1].rstrip() == b' F', out[21-1]
# B
assert out[28-1].rstrip() == b' C', out[28-1]
assert out[34-1].rstrip() == b' D', out[34-1]
assert out[40-1].rstrip() == b' E', out[40-1]
assert out[46-1].rstrip() == b' F', out[46-1]
eid_device = out[0]
eid, dt = get_eid_dt_from_eid_device(
eid_device, op2.nonlinear_factor, op2.sort_method)
obj.add_new_eid_sort1(dt, eid, *out[1:])
n += ntotal
elif result_type == 2 and op2.num_wide == numwide_random: # random
msg = op2.code_information()
raise NotImplementedError(msg)
#return op2._not_implemented_or_skip(data, ndata, msg)
else: # pragma: no cover
raise RuntimeError(op2.code_information())
return n, nelements, ntotal
def _oes_cbar_100(self, data, ndata, dt, is_magnitude_phase,
result_type, prefix, postfix):
"""
reads stress/strain for element type:
- 100 : BARS
"""
op2 = self.op2
n = 0
if op2.is_stress:
result_name = prefix + 'cbar_stress_10nodes' + postfix
else:
result_name = prefix + 'cbar_strain_10nodes' + postfix
if op2._results.is_not_saved(result_name):
return ndata, None, None
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
if result_type == 0 and op2.num_wide == 10: # real
if op2.is_stress:
obj_vector_real = RealBar10NodesStressArray
else:
obj_vector_real = RealBar10NodesStrainArray
ntotal = 10 * 4 * self.factor
nelements = ndata // ntotal
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_real)
if auto_return:
return ndata, None, None
if op2.is_debug_file:
op2.binary_debug.write(' [cap, element1, element2, ..., cap]\n')
#op2.binary_debug.write(' cap = %i # assume 1 cap when there could have been multiple\n' % ndata)
op2.binary_debug.write(' #elementi = [eid_device, sd, sxc, sxd, sxe, sxf, axial, smax, smin, MS]\n')
op2.binary_debug.write(' nelements=%i; nnodes=1 # centroid\n' % nelements)
obj = op2.obj
if op2.use_vector and is_vectorized and op2.sort_method == 1:
# self.itime = 0
# self.ielement = 0
# self.itotal = 0
#self.ntimes = 0
#self.nelements = 0
n = nelements * ntotal
istart = obj.itotal
iend = istart + nelements
obj._times[obj.itime] = dt
self.obj_set_element(obj, istart, iend, data, nelements)
floats = frombuffer(data, dtype=op2.fdtype8).reshape(nelements, 10)
#[sd, sxc, sxd, sxe, sxf, axial, smax, smin, MS]
obj.data[obj.itime, istart:iend, :] = floats[:, 1:].copy()
else:
n = oes_cbar100_real_10(op2, data, obj, nelements, ntotal, dt)
else: # pragma: no cover
raise RuntimeError(op2.code_information())
return n, nelements, ntotal
def _oes_hyperelastic_quad(self, data, ndata, dt, unused_is_magnitude_phase,
result_type, prefix, postfix):
"""
139-QUAD4FD
"""
op2 = self.op2
#if op2.is_stress:
result_name = prefix + 'hyperelastic_cquad4_strain' + postfix
if op2._results.is_not_saved(result_name):
return ndata, None, None
if result_type == 0 and op2.num_wide == 30:
obj_vector_real = HyperelasticQuadArray
op2._results._found_result(result_name)
slot = op2.get_result(result_name)
#op2.create_transient_object(result_name, slot, obj_vector_real)
ntotal = 120 * self.factor # 36+28*3
nelements = ndata // ntotal
#print(op2.code_information())
#print(op2.table_name_str)
auto_return, is_vectorized = op2._create_oes_object4(
nelements, result_name, slot, obj_vector_real)
if auto_return:
op2._data_factor = 4 # number of "layers" for an element
return nelements * ntotal, None, None
#return ndata, None, None
#if op2.is_debug_file:
#op2.binary_debug.write(' [cap, element1, element2, ..., cap]\n')
##op2.binary_debug.write(' cap = %i # assume 1 cap when there could have been multiple\n' % ndata)
#op2.binary_debug.write(' #elementi = [eid_device, sd, sxc, sxd, sxe, sxf, axial, smax, smin, MS]\n')
#op2.binary_debug.write(' nelements=%i; nnodes=1 # centroid\n' % nelements)
obj = op2.obj
if op2.use_vector and is_vectorized and op2.sort_method == 1:
# self.itime = 0
# self.ielement = 0
# self.itotal = 0
#self.ntimes = 0
#self.nelements = 0
n = nelements * ntotal
istart = obj.itotal
iend = istart + nelements * 4
obj._times[obj.itime] = dt
#if obj.itime == 0:
# 30 = 2 + 28 = 2 + 7*4
ints = frombuffer(data, dtype=op2.idtype).reshape(nelements, 30).copy()
#strs = frombuffer(data, dtype=self.sdtype)
ints2 = ints[:, 2:].reshape(nelements * 4, 7)
#strings = frombuffer(data, dtype=???)
eids = ints[:, 0] // 10
nids = ints2[:, 0]
eids2 = np.vstack([eids, eids, eids, eids]).T.ravel()
obj.element_node[istart:iend, 0] = eids2
obj.element_node[istart:iend, 1] = nids
#obj.element[istart:iend] = eids
# dropping off eid and the string word (some kind of Type)
floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, 30)[:, 2:].copy()
floats2 = floats.reshape(nelements * 4, 7)
#[oxx, oyy, txy, angle, majorp, minorp]
obj.data[obj.itime, istart:iend, :] = floats2[:, 1:]
else:
n = 0
# (2 + 7*4)*4 = 30*4 = 120
ntotal1 = 36 * self.factor # 4*9
ntotal2 = 28 * self.factor # 4*7
s1 = Struct(op2._endian + op2._analysis_code_fmt + b'4s i6f') # 1 + 4+1+6 = 12
s2 = Struct(op2._endian + b'i6f')
for unused_i in range(nelements):
edata = data[n:n+ntotal1]
out = s1.unpack(edata)
if op2.is_debug_file:
op2.binary_debug.write('CQUAD4FD-139A- %s\n' % (str(out)))
(eid_device, etype, nid, sx, sy, sxy, angle, smj, smi) = out
eid, dt = get_eid_dt_from_eid_device(
eid_device, op2.nonlinear_factor, op2.sort_method)
obj._add_new_eid_sort1(dt, eid, etype, nid, sx, sy, sxy, angle, smj, smi)
n += ntotal1
for unused_i in range(3): # TODO: why is this not 4?
edata = data[n:n + ntotal2]
out = s2.unpack(edata)
if op2.is_debug_file:
op2.binary_debug.write(' %s\n' % (str(out)))
(nid, sx, sy, sxy, angle, smj, smi) = out
obj._add_sort1(dt, eid, etype, nid, sx, sy, sxy, angle, smj, smi)
n += ntotal2
else:
raise RuntimeError(op2.code_information())
#msg = 'numwide=%s element_num=%s etype=%s' % (
#op2.num_wide, op2.element_type, op2.element_name)
#return op2._not_implemented_or_skip(data, ndata, msg), None, None
return n, nelements, ntotal
def _oes_plate_stress_34(self, data, ndata, unused_dt, unused_is_magnitude_phase,
unused_stress_name, unused_prefix, unused_postfix):
"""
271-CPLSTN3
275-CPLSTS3
"""
op2 = self.op2
msg = op2.code_information()
return op2._not_implemented_or_skip(data, ndata, msg), None, None
#if op2.element_type == 271:
#result_name = 'cplstn3'
#unused_nnodes = 1
#ntotal = 4 * 6
#elif op2.element_type == 275:
#result_name = 'cplsts3'
#unused_nnodes = 1
#ntotal = 4 * 6
#else: # pragma: no cover
#raise RuntimeError(op2.code_information())
#if op2.is_stress:
#obj_vector_real = RealCPLSTRNPlateStressArray
#result_name += '_stress'
#else:
#obj_vector_real = RealCPLSTRNPlateStrainArray
#result_name += '_strain'
#numwide_real = ntotal // 4
#if op2.format_code == 1 and op2.num_wide == numwide_real:
##ntotal = 4 * (1 + 6 * (nnodes))
#nelements = ndata // ntotal
##op2._data_factor = 10 # TODO: why is this 10?
#if op2.is_stress:
#obj_vector_real = RealCPLSTRNPlateStressArray
##result_name = 'cplstn3_stress'
#else:
#obj_vector_real = RealCPLSTRNPlateStressArray
##result_name = 'cplstn3_strain'
#slot = op2.get_result(result_name)
#auto_return, is_vectorized = op2._create_oes_object4(
#nelements, result_name, slot, obj_vector_real)
#if auto_return:
#assert ntotal == op2.num_wide * 4
#return nelements * ntotal, None, None
#obj = op2.obj
##if op2.use_vector and is_vectorized and op2.sort_method == 1:
##n = nelements * op2.num_wide * 4
#istart = obj.itotal
#iend = istart + nelements
#obj._times[obj.itime] = dt
#self.obj_set_element(obj, istart, iend, data, nelements)
#floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, numwide_real)
#results = floats[:, 1:].copy()
##print('results.shape', results.shape)
##[oxx, oyy, ozz, txy, ovm]
#obj.data[obj.itime, istart:iend, :] = results
#else:
#msg = 'sort1 Type=%s num=%s' % (op2.element_name, op2.element_type)
#return op2._not_implemented_or_skip(data, ndata, msg), None, None
def _oes_plate_stress_68(self, data, ndata, unused_dt, unused_is_magnitude_phase,
unused_stress_name, unused_prefix, unused_postfix):
# 276-CPLSTS4
# 277-CPLSTS6
# 278-CPLSTS8
op2 = self.op2
msg = op2.code_information()
return op2._not_implemented_or_skip(data, ndata, msg), None, None
#if op2.element_type == 276:
#result_name = 'cplsts4'
#nnodes = 5 # 4 + 1
#ntotal = 4 * 32
#elif op2.element_type == 277:
#result_name = 'cplsts6'
#nnodes = 4
#ntotal = 4 * 26
#elif op2.element_type == 278:
#result_name = 'cplsts8'
#nnodes = 5
#ntotal = 4 * 32
#else:
#raise RuntimeError(op2.code_information())
#if op2.is_stress:
#obj_vector_real = RealCPLSTRNPlateStressArray
#result_name += '_stress'
#else:
#obj_vector_real = RealCPLSTRNPlateStrainArray
#result_name += '_strain'
#numwide_real = 2 + 6 * (nnodes)
#assert ntotal // 4 == numwide_real, 'notal/4=%s numwide_real=%s\n%s' % (
#ntotal // 4, numwide_real, op2.code_information())
#ntotal = numwide_real * 4
#if op2.format_code == 1 and op2.num_wide == numwide_real:
#nelements = ndata // ntotal
##op2._data_factor = 10 # TODO: why is this 10?
#if op2.is_stress:
#obj_vector_real = RealCPLSTRNPlateStressArray
##result_name = 'cplstn3_stress'
#else:
#obj_vector_real = RealCPLSTRNPlateStressArray
##result_name = 'cplstn3_strain'
#slot = getattr(op2, result_name)
#nlayers = nelements * nnodes
#auto_return, is_vectorized = op2._create_oes_object4(
#nlayers, result_name, slot, obj_vector_real)
#if auto_return:
#op2._data_factor = nnodes
#assert ntotal == op2.num_wide * 4
#return nelements * ntotal, None, None
#obj = op2.obj
##if op2.use_vector and is_vectorized and op2.sort_method == 1:
#n = nlayers * op2.num_wide * 4
#istart = obj.itotal
#iend = istart + nlayers
#obj._times[obj.itime] = dt
#if obj.itime == 0:
#print(frombuffer(data, dtype=op2.idtype).size)
#print('nelements=%s numwide=%s' % (nelements, numwide_real))
#ints = frombuffer(data, dtype=op2.idtype).reshape(nelements, numwide_real)
#eids = ints[:, 0] // 10
##obj.element[istart:iend] = eids
#floats = frombuffer(data, dtype=op2.fdtype).reshape(nelements, numwide_real).copy()
#print('floats[:, 2:].shape', floats[:, 2:].shape)
#print('nnelements=%s nnodes=%s numwide//nodes=%s' % (nelements, nnodes, (numwide_real-2) / nnodes))
#results = floats[:, 2:].reshape(nelements, nnodes * 6)
##[oxx, oyy, ozz, txy, ovm]
#obj.data[obj.itime, istart:iend, :] = results
#else:
#msg = 'sort1 Type=%s num=%s' % (op2.element_name, op2.element_type)
#return op2._not_implemented_or_skip(data, ndata, msg)
def obj_set_element(self, obj, ielement, ielement2, data, nelements):
op2 = self.op2
if obj.itime == 0:
ints = frombuffer(data, dtype=op2.idtype8).reshape(nelements, op2.num_wide).copy()
eids = ints[:, 0] // 10
assert eids.min() > 0, eids.min()
obj.element[ielement:ielement2] = eids
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,628
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/op4/test/test_op4.py
|
"""defines the command line argument ``test_op4``"""
import os
import sys
import time
from traceback import print_exc
import pyNastran
from pyNastran.op4.op4 import read_op4
def run_lots_of_files(files, write_op4=True,
debug=True, save_cases=True, skip_files=None,
stop_on_failure=False, nstart=0, nstop=1000000000):
"""runs lots of op4 files"""
if skip_files is None:
skip_files = []
n = ''
failed_cases = []
nfailed = 0
ntotal = 0
npassed = 0
t0 = time.time()
for (i, op4file) in enumerate(files[nstart:nstop], nstart): # 149
base_name = os.path.basename(op4file)
#if baseName not in skipFiles and not base_name.startswith('acms') and i not in nSkip:
if base_name not in skip_files and '#' not in op4file:
print("%"*80)
print('file=%s\n' % op4file)
n = '%s ' % i
sys.stderr.write('%sfile=%s\n' %(n, op4file))
ntotal += 1
is_passed = run_op4(op4file,
debug=debug,
stop_on_failure=stop_on_failure) # True/False
if not is_passed:
sys.stderr.write('**file=%s\n' % op4file)
failed_cases.append(op4file)
nfailed += 1
else:
npassed += 1
#sys.exit('end of test...test_op4.py')
if save_cases:
with open('failed_cases.in', 'wb') as failed_file:
for op4file in failed_cases:
failed_file.write('%s\n' % op4file)
seconds = time.time()-t0
minutes = seconds/60.
print("dt = %s seconds = %s minutes" % (seconds, minutes))
msg = '-----done with all models %s/%s=%.2f%% nFailed=%s-----' %(
npassed, ntotal, 100.*npassed/float(ntotal), ntotal-npassed)
print(msg)
sys.exit(msg)
def run_op4(op4_filename, write_op4=True, debug=True,
stop_on_failure=False):
"""run an op4"""
#print('***debug=%s' % debug)
assert '.op4' in op4_filename.lower(), 'op4_filename=%s is not an OP4' % op4_filename
is_passed = False
stop_on_failure = True
delete_op4 = True
#debug = True
try:
matrices = read_op4(op4_filename, debug=debug)
keys = list(matrices.keys())
keys.sort()
print('matrices =', keys)
#if 0:
#matrices2 = op4.read_op4(op4_filename)
#print(matrices)
#print('matrices =', matrices.keys())
#assert list(sorted(matrices.keys())) == list(sorted(matrices2.keys()))
#for key, (form, matrix) in sorted(matrices.items()):
#form2, matrix2 = matrices2[key]
#assert form == form2
#delta = matrix - matrix2
#assert np.array_equal(matrix, matrix2), 'delta=\n%s' % delta
if write_op4:
model = os.path.splitext(op4_filename)[0]
model.write_op4(model+'.test_op4_ascii.op4', matrices, is_binary=False)
model.write_op4(model+'.test_op4_binary.op4', matrices, is_binary=True)
if delete_op4:
try:
os.remove(model+'.test_op4_ascii.op4')
os.remove(model+'.test_op4_binary.op4')
except Exception:
pass
is_passed = True
except KeyboardInterrupt:
sys.stdout.flush()
print_exc(file=sys.stdout)
sys.stderr.write('**file=%s\n' % op4_filename)
sys.exit('keyboard stop...')
#except RuntimeError: # the op2 is bad, not my fault
# is_passed = True
# if stop_on_failure:
# raise
# else:
# is_passed = True
except IOError: # missing file
if stop_on_failure:
raise
#except AssertionError:
# is_passed = True
#except RuntimeError:
# is_passed = True
except SystemExit:
#print_exc(file=sys.stdout)
#sys.exit('stopping on sys.exit')
raise
#except NameError: # variable isnt defined
# if stop_on_failure:
# raise
# else:
# is_passed = True
#except IndexError:
# is_passed = True
except SyntaxError: #Param Parse
if stop_on_failure:
raise
is_passed = True
except Exception:
#print(e)
if stop_on_failure:
raise
else:
print_exc(file=sys.stdout)
is_passed = False
return is_passed
def main():
"""defines the command line argument ``test_op4``"""
from docopt import docopt
ver = str(pyNastran.__version__)
msg = "Usage:\n"
# all
# release
# current
msg += "test_op4 [-o] [-d] OP4_FILENAME\n"
msg += " test_op4 -h | --help\n"
msg += " test_op4 -v | --version\n"
msg += "\n"
msg += "Tests to see if an OP4 will work with pyNastran %s.\n" % ver
msg += "\n"
msg += "Positional Arguments:\n"
msg += " OP4_FILENAME Path to OP4 file\n"
msg += "\n"
msg += "Options:\n"
msg += " -d, --debug Developer Debug (default=False)\n"
msg += " -o, --write_op4 Writes the op2 to fem.test_op4.op4 (default=True)\n"
msg += " -h, --help Show this help message and exit\n"
msg += " -v, --version Show program's version number and exit\n"
if len(sys.argv) == 1:
sys.exit(msg)
data = docopt(msg, version=ver)
#print("data", data)
for key, value in sorted(data.items()):
print("%-12s = %r" % (key.strip('--'), value))
time0 = time.time()
run_op4(
data['OP4_FILENAME'],
write_op4=data['--write_op4'],
debug=data['--debug'],
)
print("dt = %f" % (time.time() - time0))
if __name__ == '__main__': # pragma: no cover
main()
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,629
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/bdf/bdf_interface/get_card.py
|
"""
defines various methods to access high level BDF data:
- GetCard()
- get_card_ids_by_card_types(self, card_types=None, reset_type_to_slot_map=False,
stop_on_missing_card=False, combine=False)
- get_rslot_map(self, reset_type_to_slot_map=False)
- get_cards_by_card_types(self, card_types, reset_type_to_slot_map=False,
stop_on_missing_card=False)
- get_SPCx_node_ids(self, spc_id, stop_on_failure=True)
- get_SPCx_node_ids_c1( spc_id, stop_on_failure=True)
- get_reduced_loads(self, load_id, scale=1., skip_scale_factor0=True, msg='')
- get_reduced_dloads(self, dload_id, scale=1., skip_scale_factor0=True, msg='')
- get_node_ids_with_elements(self, eids, msg='')
- get_elements_nodes_by_property_type(self, dtype='int32',
save_element_types=False)
- get_elements_properties_nodes_by_element_type(self, dtype='int32', solids=None)
- get_element_ids_list_with_pids(self, pids=None)
- get_pid_to_node_ids_and_elements_array(self, pids=None, etypes=None, idtype='int32')
- get_element_ids_dict_with_pids(self, pids=None, stop_if_no_eids=True)
- get_node_id_to_element_ids_map(self)
- get_node_id_to_elements_map(self)
- get_property_id_to_element_ids_map(self)
- get_material_id_to_property_ids_map(self)
- get_reduced_mpcs(self, mpc_id)
- get_reduced_spcs(self, spc_id)
- get_spcs(self, spc_id, consider_nodes=False)
"""
# pylint: disable=C0103
from __future__ import annotations
from copy import deepcopy
from collections import defaultdict
from typing import List, Dict, Set, Tuple, Optional, Union, Any, TYPE_CHECKING
import numpy as np
from pyNastran.bdf.bdf_interface.get_methods import GetMethods
from pyNastran.utils.numpy_utils import integer_types
from pyNastran.bdf.mesh_utils.dvxrel import get_dvprel_ndarrays
from pyNastran.bdf.mesh_utils.mpc_dependency import (
get_mpc_node_ids, get_mpc_node_ids_c1,
get_rigid_elements_with_node_ids, get_dependent_nid_to_components,
get_lines_rigid, get_mpcs)
if TYPE_CHECKING: # pragma: no cover
from cpylog import SimpleLogger
from pyNastran.bdf.bdf import BDF
class GetCard(GetMethods):
"""defines various methods to access high level BDF data"""
def __init__(self) -> None:
self._type_to_slot_map = {}
GetMethods.__init__(self)
def get_card_ids_by_card_types(self, card_types: List[str]=None,
reset_type_to_slot_map: bool=False,
stop_on_missing_card: bool=False,
combine: bool=False) -> Dict[str, List[int]]:
"""
Parameters
----------
card_types : str / List[str] / default=None
the list of keys to consider (list of strings; string)
None : all cards
reset_type_to_slot_map : bool
should the mapping dictionary be rebuilt (default=False);
set to True if you added cards
stop_on_missing_card : bool
crashes if you request a card and it doesn't exist
combine : bool; default=False
change out_dict into out_list
combine the list of cards
Returns
-------
out_dict: dict[str]=List[ids]
the key=card_type, value=the ID of the card object
out_list: List[ids]
value=the ID of the card object
useful
Examples
---------
>>> out_dict = model.get_card_ids_by_card_types(
card_types=['GRID', 'CTRIA3', 'CQUAD4'], combine=False)
>>> out_dict = {
'GRID' : [1, 2, 10, 42, 1000],
'CTRIA3' : [1, 2, 3, 5],
'CQUAD4' : [4],
}
**shell elements**
>>> out_dict = model.get_card_ids_by_card_types(
card_types=['CTRIA3', 'CQUAD4'], combine=True)
>>> out_dict = {
[1, 2, 3, 4, 5],
}
"""
if card_types is None:
card_types = list(self.cards_to_read)
if isinstance(card_types, str):
card_types = [card_types]
elif not isinstance(card_types, (list, tuple)):
raise TypeError('card_types must be a list/tuple; type=%s' % type(card_types))
#if reset_type_to_slot_map or self._type_to_slot_map is None:
#self._type_to_slot_map = rslot_map
if reset_type_to_slot_map:
self._reset_type_to_slot_map()
#out_dict = {
#(key) : (self._type_to_id_map[key] if key in self.card_count else [])
#for key in card_types
#}
out_dict = {}
for key in card_types:
if key in self.card_count:
out_dict[key] = sorted(self._type_to_id_map[key])
else:
if stop_on_missing_card:
raise RuntimeError('%r is not in the card_count; keys=%s' %
str(sorted(self.card_count.keys())))
out_dict[key] = []
if combine:
out_list = []
for key, value in sorted(out_dict.items()):
out_list += value
return out_list
return out_dict
def _reset_type_to_slot_map(self) -> Dict[str, str]:
"""resets self._type_to_slot_map"""
rslot_map = defaultdict(list)
for dict_name, card_names in self._slot_to_type_map.items():
#print('card_names=%s dict_name=%s' % (card_names, dict_name))
card_name0 = card_names[0]
if card_name0 in ['DTABLE', 'GRDSET', 'SESUP', 'DOPTPRM', 'MONPNT1', 'SUPORT',
'MKAERO1', 'MATHP']:
pass
else:
adict = getattr(self, dict_name)
if isinstance(adict, dict):
for key, card in adict.items():
if isinstance(card, list):
alist = card
for cardi in alist:
rslot_map[cardi.type].append(key)
#msg = '%s; names=%s \ncard=%s' % (type(card), card_names, card)
#raise NotImplementedError(msg)
else:
rslot_map[card.type].append(key)
elif isinstance(adict, list):
alist = adict
for value in alist:
if isinstance(value, list):
msg = '%s; names=%s value=%s' % (type(value), card_names, value)
raise NotImplementedError(msg)
if value.type in ['CSET1', 'CSET']:
pass
#rslot_map[value.type] = value.
else:
raise NotImplementedError('list; names=%s' % card_names)
else:
raise NotImplementedError('%s; names=%s' % (type(adict), card_names))
return rslot_map
def get_rslot_map(self, reset_type_to_slot_map=False) -> Dict[str, str]:
"""gets the rslot_map"""
if (reset_type_to_slot_map or self._type_to_slot_map is None or
len(self._type_to_slot_map) == 0):
self.reset_rslot_map()
rslot_map = self._type_to_slot_map
assert 'GRID' in rslot_map
return rslot_map
def reset_rslot_map(self) -> None:
"""helper method for get_rslot_map"""
rslot_map = {}
for key, values in self._slot_to_type_map.items():
for value in values:
rslot_map[value] = key
self._type_to_slot_map = rslot_map
@property
def nid_map(self) -> Dict[int, int]:
"""
Gets the GRID/SPOINT/EPOINT ids to a sorted order.
Parameters
----------
sort_ids : bool; default=True
sort the ids
Returns
-------
nid_map : Dict[nid] : i
nid : int
the GRID/SPOINT/EPOINT id
i : int
the index
..note :: GRIDs, SPOINTs, & EPOINTs are stored in separate slots,
so they are unorganized.
..note :: see ``self.get_nid_map(sort_ids=False)`` for the unsorted version
"""
return self.get_nid_map(sort_ids=True)
def get_nid_map(self, sort_ids: bool=True) -> Dict[int, int]:
"""
Maps the GRID/SPOINT/EPOINT ids to a sorted/unsorted order.
Parameters
----------
sort_ids : bool; default=True
sort the ids
Returns
-------
nid_map : Dict[nid] : i
nid : int
the GRID/SPOINT/EPOINT id
i : int
the index
..note :: GRIDs, SPOINTs, & EPOINTs are stored in separate slots,
so they are unorganized.
"""
nids = []
index_nids = []
i = 0
for nid in self.nodes:
nids.append(nid)
index_nids.append(i)
i += 1
for nid in self.spoints:
nids.append(nid)
index_nids.append(i)
i += 1
for nid in self.epoints:
nids.append(nid)
index_nids.append(i)
i += 1
if sort_ids:
inids = np.argsort(nids)
nids = np.sort(nids)
index_nids = np.array(index_nids)[inids]
nid_map = {}
for nid, i in zip(nids, index_nids):
nid_map[nid] = i
return nid_map
def get_cards_by_card_types(self, card_types: List[str],
reset_type_to_slot_map: bool=False,
stop_on_missing_card: bool=False) -> None:
"""
Parameters
----------
card_types : List[str]
the list of keys to consider
reset_type_to_slot_map : bool
should the mapping dictionary be rebuilt (default=False);
set to True if you added cards
stop_on_missing_card : bool
crashes if you request a card and it doesn't exist
Returns
-------
out_dict : dict[str] = List[BDFCard()]
the key=card_type, value=the card object
"""
if not isinstance(card_types, (list, tuple)):
raise TypeError(f'card_types={card_types!r} must be a list/tuple; '
f'type={type(card_types)}')
#self._type_to_id_map = {
# 'CQUAD4' : [1, 2, 3]
#}
#self._slot_to_type_map = {'elements' : [CQUAD4, CTRIA3]}
rslot_map = self.get_rslot_map(reset_type_to_slot_map=False)
out = {}
for card_type in card_types:
if card_type not in self.card_count:
if stop_on_missing_card:
keys = str(sorted(self.card_count.keys()))
raise RuntimeError(f'{card_type} is not in the card_count; keys={keys}')
out[card_type] = []
continue
#print('card_type=%r' % card_type)
try:
key = rslot_map[card_type] # update attributes.py ~line 740
except Exception:
print(rslot_map.keys())
self.log.error("card_type=%r' hasn't been added to "
"self._slot_to_type_map...check for typos")
raise
try:
slot = getattr(self, key)
except AttributeError:
if hasattr(self.zona, key):
slot = getattr(self.zona, key)
else:
raise
ids = self._type_to_id_map[card_type]
cards = []
if isinstance(ids, bool):
continue
for idi in ids:
try:
card = slot[idi]
except KeyError:
print(slot)
msg = 'key=%r id=%r cannot be found\n' % (key, idi)
msg += 'id=%s not found. Allowed=%s' % (
key, np.unique(ids))
#print(msg)
raise KeyError(msg)
except TypeError:
msg = 'key=%s id=%s cannot be found' % (key, idi)
#print(msg)
raise TypeError(msg)
if isinstance(card, list):
for cardi in card: # loads/spc/mpc
if cardi.type == card_type: # loads
cards.append(cardi)
else:
cards.append(card)
#for card in cards:
#print('%s' % str(card).split('\n')[0])
out[card_type] = cards
return out
def get_SPCx_node_ids(self, spc_id: int,
consider_spcadd: bool=True,
stop_on_failure: bool=True) -> List[int]:
"""
Get the SPC/SPCADD/SPC1/SPCAX IDs.
Parameters
----------
spc_id : int
the SPC id
stop_on_failure : bool; default=True
errors if parsing something new
Returns
-------
node_ids : List[int]
the constrained associated node ids
"""
spcs = self.get_reduced_spcs(
spc_id, consider_spcadd=consider_spcadd, stop_on_failure=stop_on_failure)
warnings = ''
node_ids = []
for card in spcs:
if card.type == 'SPC':
nids = card.node_ids
elif card.type == 'SPC1':
nids = card.node_ids
elif card.type in ['GMSPC', 'SPCAX']:
warnings += str(card)
continue
else:
warnings += str(card)
continue
node_ids += nids
if warnings:
self.log.warning("get_SPCx_node_ids doesn't consider:\n%s" % warnings.rstrip('\n'))
return node_ids
def get_SPCx_node_ids_c1(self, spc_id: int, stop_on_failure: bool=True) -> Dict[str, List[int]]:
"""
Get the SPC/SPCADD/SPC1/SPCAX IDs.
Parameters
----------
spc_id : int
the SPC id
stop_on_failure : bool; default=True
errors if parsing something new
Returns
-------
node_ids_c1 : Dict[component] = node_ids
component : str
the DOF to constrain
node_ids : List[int]
the constrained node ids
"""
spcs = self.get_reduced_spcs(spc_id, stop_on_failure=stop_on_failure)
node_ids_c1 = defaultdict(str)
#print('spcs = ', spcs)
warnings = ''
for card in spcs:
if card.type == 'SPC':
for nid, c1 in zip(card.node_ids, card.components):
assert nid is not None, card.node_ids
node_ids_c1[nid] += c1
elif card.type == 'SPC1':
nids = card.node_ids
c1 = card.components
for nid in nids:
node_ids_c1[nid] += c1
elif card.type in ['GMSPC', 'SPCAX']:
warnings += str(card)
else:
msg = 'get_SPCx_node_ids_c1 doesnt supprt %r' % card.type
if stop_on_failure:
raise RuntimeError(msg)
self.log.warning(msg)
if warnings:
self.log.warning("get_SPCx_node_ids_c1 doesn't consider:\n%s" % warnings.rstrip('\n'))
return node_ids_c1
def get_MPCx_node_ids(self, mpc_id: int,
consider_mpcadd: bool=True,
stop_on_failure: bool=True) -> List[List[int]]:
"""see ``pyNastran.bdf.mesh_utils.mpc_dependency.get_mpc_node_ids(...)``"""
lines = get_mpc_node_ids(
self, mpc_id,
consider_mpcadd=consider_mpcadd,
stop_on_failure=stop_on_failure)
return lines
def get_MPCx_node_ids_c1(self, mpc_id: int,
consider_mpcadd: bool=True,
stop_on_failure: bool=True) -> Tuple[Dict[str, List[int]],
Dict[str, List[int]]]:
"""see ``pyNastran.bdf.mesh_utils.mpc_dependency.get_mpc_node_ids_c1(...)``"""
independent_node_ids_c1, dependent_node_ids_c1 = get_mpc_node_ids_c1(
self, mpc_id,
consider_mpcadd=consider_mpcadd,
stop_on_failure=stop_on_failure)
return independent_node_ids_c1, dependent_node_ids_c1
def get_mpcs(self, mpc_id: int, consider_mpcadd: bool=True,
stop_on_failure: bool=True) -> Tuple[List[int], List[str]]:
"""see ``pyNastran.bdf.mesh_utils.mpc_dependency.get_mpcs(...)``"""
nids, comps = get_mpcs(self, mpc_id, consider_mpcadd=consider_mpcadd,
stop_on_failure=stop_on_failure)
return nids, comps
def get_rigid_elements_with_node_ids(self, node_ids):
"""see ``pyNastran.bdf.mesh_utils.mpc_dependency.get_rigid_elements_with_node_ids(...)``"""
rbes = get_rigid_elements_with_node_ids(self, node_ids)
return rbes
def get_dependent_nid_to_components(self, mpc_id=None, stop_on_failure=True):
"""see ``pyNastran.bdf.mesh_utils.mpc_dependency.get_dependent_nid_to_components(...)``"""
dependent_nid_to_components = get_dependent_nid_to_components(
self, mpc_id=mpc_id, stop_on_failure=stop_on_failure)
return dependent_nid_to_components
def _get_rigid(self) -> Any:
"""see ``pyNastran.bdf.mesh_utils.mpc_dependency.get_lines_rigid(...)``"""
lines_rigid = get_lines_rigid(self)
return lines_rigid
def _get_dvprel_ndarrays(self, nelements: int, pids: np.ndarray,
fdtype='float32', idtype='int32'):
"""see ``pyNastran.bdf.mesh_utils.dvxrel.get_dvprel_ndarrays(...)``"""
dvprel_dict = get_dvprel_ndarrays(
self, nelements, pids, fdtype=fdtype, idtype=idtype)
return dvprel_dict
def get_reduced_loads(self, load_case_id, scale=1.,
consider_load_combinations=True,
skip_scale_factor0=False,
stop_on_failure=True, msg=''):
"""
Accounts for scale factors.
Parameters
----------
load_case_id : int
the desired LOAD id
consider_load_combinations : bool; default=True
look at the LOAD card
scale : float; default=1.0
additional scale factor on top of the existing LOADs
skip_scale_factor0 : bool; default=False
Skip loads with scale factor=0.0.
Nastran does not do this.
Nastran will fail if referenced loads do not exist.
stop_on_failure : bool; default=True
errors if parsing something new
msg : str
debug message
Returns
-------
loads : List[loads]
a series of load objects
scale_factors : List[float]
the associated scale factors
is_grav : bool
is there a gravity card
.. warning:: assumes xref=True
"""
if not isinstance(load_case_id, integer_types):
msg = 'load_case_id must be an integer; type=%s, load_case_id:\n%r' % (
type(load_case_id), load_case_id)
raise TypeError(msg)
try:
load_case = self.Load(
load_case_id, consider_load_combinations=consider_load_combinations, msg=msg)
except KeyError:
if stop_on_failure:
raise
self.log.error("could not find expected LOAD/LOADSET id=%s" % load_case_id)
return []
loads, scale_factors, is_grav = self._reduce_load_case(load_case, scale=scale)
assert len(loads) == len(scale_factors)
return loads, scale_factors, is_grav
def _reduce_load_case(self, load_case, scale=1., consider_load_combinations=True,
unallowed_load_ids=None, msg=''):
"""reduces a load case"""
scale_factors_out = []
loads_out = []
is_grav_out = False
if unallowed_load_ids is None:
unallowed_load_ids = []
for load in load_case:
if load.type == 'LOAD':
load_ids = load.get_load_ids()
load_scale = load.scale * scale
scale_factors = load.scale_factors
assert len(load_ids) == len(scale_factors), str(load)
scale_factors_temp = [load_scale * scalei for scalei in scale_factors]
for load_idi, scalei in zip(load_ids, scale_factors_temp):
# prevents recursion
if load_idi in unallowed_load_ids:
msg = 'There is a recursion error. LOAD trace=%s; load_id=%s' % (
unallowed_load_ids, load_idi)
raise RuntimeError(msg)
unallowed_load_ids2 = deepcopy(unallowed_load_ids)
unallowed_load_ids2.append(load_idi)
load_casei = self.Load(
load_idi, consider_load_combinations=consider_load_combinations, msg=msg)
loadsi, scale_factorsi, is_gravi = self._reduce_load_case(
load_casei, scale=scalei,
consider_load_combinations=consider_load_combinations,
unallowed_load_ids=unallowed_load_ids2)
if is_gravi:
is_grav_out = True
scale_factors_out += scale_factorsi
loads_out += loadsi
elif load.type in 'GRAV':
scale_factors_out.append(scale)
loads_out.append(load)
is_grav_out = True
else:
scale_factors_out.append(scale)
loads_out.append(load)
return loads_out, scale_factors_out, is_grav_out
def get_reduced_dloads(self, dload_id, scale=1., consider_dload_combinations=True,
skip_scale_factor0=False, msg=''):
"""
Accounts for scale factors.
Parameters
----------
dload_id : int
the desired DLOAD id
consider_dload_combinations : bool; default=True
look at the DLOAD card
scale : float; default=1.0
additional scale factor on top of the existing LOADs
skip_scale_factor0 : bool; default=False
Skip loads with scale factor=0.0.
Nastran does not do this.
Nastran will fail if referenced loads do not exist.
msg : str
debug message
Returns
-------
dloads : List[loads]
a series of dload objects
scale_factors : List[float]
the associated scale factors
.. warning:: assumes xref=True
"""
dload_case = self.DLoad(
dload_id,
consider_dload_combinations=consider_dload_combinations,
msg=msg)
dloads, scale_factors = self._reduce_dload_case(
dload_case, scale=scale, skip_scale_factor0=skip_scale_factor0,
msg=msg)
return dloads, scale_factors
def _reduce_dload_case(self, dload_case, scale=1., unallowed_dload_ids=None,
skip_scale_factor0=False, msg=''):
"""
Reduces a dload case
Parameters
----------
dload_case : List[???]
a series of DLOAD cards
scale : float; default=1.0
additional scale factor on top of the existing LOADs
unallowed_dload_ids : List[int]; default=None
helper to prevent recursion
skip_scale_factor0 : bool; default=False
Skip loads with scale factor=0.0.
Nastran does not do this.
Nastran will fail if referenced loads do not exist.
msg : str
debug message
Returns
-------
dloads : List[loads]
a series of dload objects
scale_factors : List[float]
the associated scale factors
"""
scale_factors_out = []
dloads_out = []
if unallowed_dload_ids is None:
unallowed_dload_ids = []
assert isinstance(dload_case, list), dload_case
for dload in dload_case:
if dload.type == 'DLOAD':
dload_ids = dload.get_load_ids()
load_scale = dload.scale * scale
scale_factors = dload.scale_factors
if len(dload_ids) != len(scale_factors):
msg = 'dload_ids=%s scale_factors=%s\n%s' % (
dload_ids, scale_factors, str(dload))
raise ValueError(msg)
scale_factors_temp = [load_scale * scalei for scalei in scale_factors]
for dload_idi, scalei in zip(dload_ids, scale_factors_temp):
# prevents recursion
if dload_idi in unallowed_dload_ids:
msg = 'There is a recursion error. DLOAD trace=%s; dload_id=%s' % (
unallowed_dload_ids, dload_idi)
raise RuntimeError(msg)
unallowed_dload_ids2 = deepcopy(unallowed_dload_ids)
unallowed_dload_ids2.append(dload_idi)
dload_casei = self.DLoad(dload_idi, msg=msg)
dloadsi, scale_factorsi = self._reduce_dload_case(
dload_casei, scale=scalei, unallowed_dload_ids=unallowed_dload_ids2, )
scale_factors_out += scale_factorsi
dloads_out += dloadsi
else:
scale_factors_out.append(scale)
dloads_out.append(dload)
return dloads_out, scale_factors_out
def _get_maps(self, eids: Optional[List[int]]=None,
map_names: Optional[List[str]]=None,
consider_0d: bool=True,
consider_0d_rigid: bool=True,
consider_1d: bool=True,
consider_2d: bool=True,
consider_3d: bool=True) -> Any:
"""
Gets a series of mappings (e.g. node_id to element_id)
eids : List[int]
the element ids to consider
map_names : List[str]; default=None -> all
'edge_to_eid_map', 'eid_to_edge_map', 'nid_to_edge_map', 'nid_to_eid_map'
consider_0d : bool; default=True
considers CELASx, CDAMPx, CFAST
consider_0d_rigid : bool; default=True
considers MPC, RBAR, RBE2, RBE3, RSPLINE elements
consider_1d : bool; default=True
considers CONROD, CROD, CBAR, CBEAM elements
consider_2d : bool; default=True
considers CQUAD4, CQUAD8, CQUADR, CQUAD,
CTRIA3, CTRIA6, CTRIAX, CTRIAX6, CSHEAR elements
consider_3d : bool; default=True
considers CTETRA, CPENTA, CPYRAM, CHEXA elements
.. todo:: consider_0d support
.. todo:: consider_0d_rigid support
"""
allowed_maps = [
'edge_to_eid_map',
'eid_to_edge_map',
'nid_to_edge_map',
#'edge_to_nid_map', # unnecessary
#'eid_to_eid_map', # not added yet
'nid_to_eid_map',
#'face_to_edge_map', # what does this look like?
]
if map_names is None:
map_names = allowed_maps
else:
if isinstance(map_names, str):
map_names = [map_names]
if not isinstance(map_names, (list, tuple)):
msg = 'map_names=%s must be a list or tuple; not %s' % (
map_names, type(map_names))
raise TypeError(msg)
for name in map_names:
if name not in allowed_maps:
msg = 'name=%r; allowed=%s' % (name, sorted(allowed_maps.keys()))
raise RuntimeError(msg)
eid_to_edge_map = {}
eid_to_nid_map = {}
edge_to_eid_map = defaultdict(set)
nid_to_edge_map = defaultdict(set) #set() ???
nid_to_eid_map = defaultdict(set)
if eids is None:
eids = self.elements.keys()
types_to_consider = []
if consider_0d:
types_to_consider += []
if consider_0d_rigid:
types_to_consider += []
if consider_1d:
types_to_consider += ['CROD', 'CONROD', 'CBAR', 'CBEAM', 'CBEAM3']
if consider_2d:
types_to_consider += ['CTRIA3', 'CTRIAX', 'CTRIA6', 'CTRIAX6',
'CQUAD4', 'CQUAD', 'CQUAD8', 'CQUADR', 'CQUADX', 'CQUADX8',
'CSHEAR']
if consider_3d:
types_to_consider += ['CTETRA', 'CPENTA', 'CPYRAM', 'CHEXA']
for eid in eids:
elem = self.elements[eid]
if elem.type not in types_to_consider:
continue
node_ids = elem.node_ids
edges = elem.get_edge_ids()
eid_to_edge_map[eid] = edges
eid_to_nid_map[eid] = node_ids
for nid in node_ids:
nid_to_eid_map[nid].add(eid)
for edge in edges:
assert not isinstance(edge, integer_types), 'edge=%s elem=\n%s' % (edge, elem)
assert edge[0] < edge[1], 'edge=%s elem=\n%s' % (edge, elem)
try:
edge_to_eid_map[edge].add(eid)
except TypeError:
print(elem)
raise
for nid in edge:
nid_to_edge_map[nid].add(tuple(edge))
out = {}
allowed_maps = [
#'edge_to_eid_map',
#'eid_to_edge_map',
#'nid_to_edge_map',
#'edge_to_nid_map', # unnecessary
#'nid_to_eid_map',
]
for key in map_names:
if key == 'edge_to_eid_map':
out[key] = edge_to_eid_map
elif key == 'eid_to_edge_map':
out[key] = eid_to_edge_map
elif key == 'nid_to_edge_map':
out[key] = nid_to_edge_map
elif key == 'nid_to_eid_map':
out[key] = nid_to_eid_map
#elif key == 'eid_to_eid_map': # not added yet
#out[key] = eid_to_eid_map
else:
self.log.error('missing map %r' % key)
return out
def get_node_ids_with_elements(self, eids: List[int], msg: str='') -> Set[int]:
"""
Get the node IDs associated with a list of element IDs
Parameters
----------
eids : List[int]
list of element ID
msg : str
An additional message to print out if an element is not found
Returns
-------
node_ids : Set[int]
set of node IDs
For example::
eids = [1, 2, 3] # list of elements with pid=1
msg = ' which are required for pid=1'
node_ids = bdf.get_node_ids_with_elements(eids, msg=msg)
"""
if isinstance(eids, integer_types):
eids = [eids]
nids2 = set()
for eid in eids:
element = self.Element(eid, msg=msg)
self.log.debug("element.pid = %s" % (element.pid))
nids = set(element.node_ids)
nids2.update(nids)
return nids2
def get_elements_nodes_by_property_type(
self,
dtype: str='int32',
save_element_types: bool=False) -> Tuple[Dict[Tuple[str, int], Tuple[List[int], List[int]]]]:
"""
Gets a dictionary of (etype, pid) to [eids, node_ids]
Parameters
----------
dtype : str; default='int32'
the type of the integers
save_element_types : bool; default=False
adds the etype_to_eids_pids_nids output
Returns
-------
etype_pid_to_eids_nids : dict[(etype, pid)] : [eids, nids]
etype : str
the element type
pid : int
the property id
CONRODS have a pid of 0
eids : (neids, ) int ndarray
the elements with the property id of pid
nids : (neids, nnodes/element) int ndarray
the nodes corresponding to the element
etype_to_eids_pids_nids : dict[etype] : [eids, pids, nids]
Enabled by save_element_types; default=None
etype : str
the element type
eids : (neids, ) int ndarray
the elements with the property id of pid
pids : (neids, ) int ndarray
the property ids
CONRODS have a pid of 0
nids : (neids, nnodes/element) int ndarray
the nodes corresponding to the element
"""
etype_to_eids_pids_nids = self.get_elements_properties_nodes_by_element_type(dtype=dtype)
output = {}
for etype, (eids, pids, nids) in etype_to_eids_pids_nids.items():
upids = np.unique(pids)
for upid in upids:
ipid = np.where(pids == upid)[0]
output[(etype, upid)] = [eids[ipid], nids[ipid, :]]
if save_element_types:
return output, etype_to_eids_pids_nids
return output, None
def _upcast_int_dtype(self, dtype: str) -> str:
"""helper for 64-bit integers"""
if dtype == 'int32' and len(self.nodes) and max(self.nodes) > 2147483647:
# or max(self.elements) > 2147483647):
dtype = 'int64'
return dtype
def get_elements_properties_nodes_by_element_type(self,
dtype: str='int32',
solids: Optional[Dict[str, Any]]=None,
stop_if_no_eids: bool=True) -> np.array:
"""
Gets a dictionary of element type to [eids, pids, node_ids]
Parameters
----------
dtype : str; default='int32'
the type of the integers
solids : dict[etype] : value
etype : str
the element type
should only be CTETRA, CHEXA, CPENTA, CPYRAM
value : varies
(nnodes_min, nnodes_max) : Tuple(int, int)
the min/max number of nodes for the element
(nnodes, ) : Tuple(int, )
the number of nodes
useful if you only have CTETRA4s or only want CTETRA10s
fails if you're wrong (and too low)
Returns
-------
etype_to_eids_pids_nids : dict[etype] : [eids, pids, nids]
etype : str
the element type
eids : (neids, ) int ndarray
the elements with the property id of pid
pids : (neids, ) int ndarray
the property ids
CONRODS have a pid of 0
nids : (neids, nnodes/element) int ndarray
the nodes corresponding to the element
"""
dtype = self._upcast_int_dtype(dtype)
etypes_no_pids = {
'CELAS4', 'CDAMP4', 'CHBDYG', 'GENEL',
}
etypes = {
'CELAS1', 'CELAS2', 'CELAS3', 'CELAS4',
'CDAMP1', 'CDAMP2', 'CDAMP3', 'CDAMP4', 'CDAMP5',
'CROD', 'CONROD', 'CTUBE',
'CBAR', 'CBEAM', 'CBEND', 'CBEAM3',
'CSHEAR', 'CVISC',
'CTRIA3', 'CTRIA6', 'CTRIAR',
'CQUAD4', 'CQUAD8', 'CQUADR', 'CQUAD',
'CPLSTN3', 'CPLSTN6', 'CPLSTN4', 'CPLSTN8',
#'CPLSTS3', 'CPLSTS6', 'CPLSTS4', 'CPLSTS8',
'CTRAX3', 'CTRAX6', 'CTRIAX', 'CTRIAX6',
'CQUADX', 'CQUADX4', 'CQUADX8',
'CTETRA', 'CPENTA', 'CHEXA', 'CPYRAM',
'CBUSH', 'CBUSH1D', 'CBUSH2D', 'CFAST', 'CGAP',
# not supported
'GENEL', 'CHBDYG',
}
output = {}
if solids is None:
solids = {
'CTETRA' : (4, 10),
#'CTETRA' : (10, ),
'CHEXA' : (8, 20),
'CPENTA' : (6, 15),
'CPYRAM' : (5, 13),
}
etypes_found = []
for etype in etypes:
if etype not in self._type_to_id_map:
continue
eids_list = self._type_to_id_map[etype]
if not eids_list:
continue
etypes_found.append(etype)
eids = np.array(eids_list, dtype=dtype)
neids = len(eids)
eid0 = eids[0]
elem0 = self.elements[eid0]
nnodes = len(elem0.nodes)
if etype not in solids or len(solids[etype]) == 1:
pids = np.zeros(neids, dtype=dtype)
nids = np.zeros((neids, nnodes), dtype=dtype)
for i, eid in enumerate(eids):
elem = self.elements[eid]
if elem.type in etypes_no_pids:
pid = 0
else:
pid = elem.Pid()
assert pid is not None, elem
pids[i] = pid
nidsi = elem.node_ids
try:
nids[i, :] = nidsi
except TypeError:
#print(elem)
#print('nidsi =', nidsi)
nidsi2 = [nid if nid is not None else 0
for nid in nidsi]
try:
nids[i, :] = nidsi2
except Exception:
print(elem)
print(nidsi)
print(nidsi2)
raise
output[etype] = [eids, pids, nids]
else:
# SOLID elements can be variable length
nnodes_min = min(solids[etype])
nnodes_max = max(solids[etype])
pids = np.zeros(neids, dtype='int32')
nids = np.zeros((neids, nnodes_max), dtype=dtype)
ieids_max = []
ieids_min = []
for i, eid in enumerate(eids):
elem = self.elements[eid]
pid = elem.Pid()
assert pid is not None, elem
pids[i] = pid
nidsi = elem.node_ids
nnodesi = len(nidsi)
if nnodesi == nnodes_max:
ieids_max.append(i)
else:
ieids_min.append(i)
#self.log.info(str(elem))
try:
nids[i, :nnodesi] = nidsi
except TypeError:
#print(elem)
#print('nidsi =', nidsi)
nidsi2 = [nid if nid is not None else 0
for nid in nidsi]
try:
nids[i, :] = nidsi2
except Exception:
raise
if len(ieids_max):
etype_max = elem.type + str(nnodes_max)
ieids_max = np.array(ieids_max, dtype=dtype)
output[etype_max] = [eids[ieids_max], pids[ieids_max], nids[ieids_max, :]]
if len(ieids_min):
etype_min = elem.type + str(nnodes_min)
ieids_min = np.array(ieids_min, dtype=dtype)
output[etype_min] = [eids[ieids_min], pids[ieids_min], nids[ieids_min, :nnodes_min]]
if stop_if_no_eids:
msg = (
'get_elements_properties_nodes_by_element_type output is empty; '
'nelements=%s; etypes_found=%s' % (
len(self.elements), etypes_found)) # etypes_found
self.log.warning(msg)
else:
assert len(output), 'get_elements_properties_nodes_by_element_type output is empty...'
return output
#--------------------
# ELEMENT CARDS
def get_element_ids_list_with_pids(self, pids: Optional[List[int]]=None) -> List[int]:
"""
Gets all the element IDs with a specific property ID.
Parameters
----------
pids : List[int]; default=None -> all
list of property ID
Returns
-------
element_ids : List[int]
the element ids
For example, we want to get all the element ids with ``pids=[1, 2, 3]``
.. code-block:: python
model = BDF()
model.read_bdf(bdf_filename)
pids = [1, 2, 3]
eids_list = model.get_element_ids_list_with_pids(pids)
>>> eids_list
[10, 11, 20, 21, 30, 31]
"""
etypes_no_pids = [
'CELAS4', 'CDAMP4', 'CHBDYG', 'GENEL',
]
if pids is None:
pids = set(list(self.properties.keys()))
elif isinstance(pids, integer_types):
pids = {pids}
else:
assert isinstance(pids, (list, tuple)), 'pids=%s type=%s' % (pids, type(pids))
eids2 = []
for eid, element in sorted(self.elements.items()):
if element.type in etypes_no_pids:
pid = 0
else:
pid = element.Pid()
if pid in pids:
eids2.append(eid)
return eids2
def get_pid_to_node_ids_and_elements_array(self,
pids: Union[List[int], int, None]=None,
etypes: Optional[List[str]]=None,
idtype: str='int32',
msg: str='') -> Tuple[Dict[int, str], np.ndarray]:
"""
a work in progress
Parameters
----------
pids : List[int]
list of property ID
etypes : List[str]
element types to consider
Returns
-------
pid_to_eids_ieids_map : dict[(pid, etype)] = eid_ieid
eid_ieid : (Nelements, 2) int ndarray
eid is the element id
ieid is the index in the node_ids array
node_ids : (nelements, nnodes) int ndarray
nelements : int
the number of elements in the property type
nnodes : int
varies based on the element type
"""
if pids is None:
pids = list(self.properties)
elif isinstance(pids, integer_types):
pids = [int]
assert isinstance(pids, (list, tuple, np.ndarray)), 'pids=%s type=%s' % (pids, type(pids))
model = self
try:
etype_to_nids_map, pid_to_eids_ieids_map = _get_pid_to_node_ids_and_elements_array(
model, pids, etypes, msg, idtype)
except OverflowError:
assert idtype == 'int32', 'idtype=%r while overflowing...' % idtype
etype_to_nids_map, pid_to_eids_ieids_map = _get_pid_to_node_ids_and_elements_array(
model, pids, etypes, msg, idtype='int64')
#etype_to_nids_map, pid_to_eids_ieids_map = self.get_pid_to_node_ids_and_elements_array(
#pids=pids, etypes=etypes, idtype='int64')
return pid_to_eids_ieids_map
def get_element_ids_dict_with_pids(self,
pids: Union[List[int], int, None]=None,
stop_if_no_eids: bool=True, msg: str='') -> Dict[int, List[int]]:
"""
Gets all the element IDs with a specific property ID.
Parameters
----------
pids : List[int] / int
list of property ID
stop_if_no_eids : bool; default=True
prevents crashing if there are no elements
setting this to False really doesn't make sense for non-DMIG models
Returns
-------
element_ids : dict[pid] = List[eid]
dictionary of lists by property
pid : int
property id
eid : int
element id
For example, we want all the elements with ``pids=[4, 5, 6]``,
but we want them in separate groups
.. code-block:: python
model = BDF()
model.read_bdf(bdf_filename)
pids = [4, 5, 6]
eids_dict = model.get_element_ids_dict_with_pids(pids)
>>> eids_dict
{
4 : [40, 41],
5 : [50, 51],
6 : [60, 61],
}
# consider all properties
eids_dict = model.get_element_ids_dict_with_pids()
{
1 : [1, 2, 3],
4 : [40, 41],
5 : [50, 51],
6 : [60, 61],
}
Notes
-----
What happens with CONRODs?
"""
if pids is None:
pids = list(self.properties)
elif isinstance(pids, integer_types):
pids = [pids]
assert isinstance(pids, (list, tuple, np.ndarray)), 'pids=%s type=%s' % (pids, type(pids))
pid_to_eids_map = {}
for pid in pids:
pid_to_eids_map[pid] = []
elem_count = 0
#element_type_to_dmap_id = {
#'CONROD' : -10,
#'CELAS2' : -12,
#'CELAS4' : -14,
#'CDAMP2' : -21,
#'CDAMP4' : -23,
#'CHBDYG' : -108,
#}
elements_without_properties = {
'CONROD', 'CELAS2', 'CELAS4', 'CDAMP2', 'CDAMP4',
'CHBDYG', 'GENEL'}
log = self.log
for eid, element in self.elements.items():
try:
pid = element.Pid()
except AttributeError:
if element.type in elements_without_properties:
continue
print(element)
raise
if pid in pids:
pid_to_eids_map[pid].append(eid)
elem_count += 1
if elem_count == 0 and stop_if_no_eids:
raise RuntimeError('no elements with properties found%s\ncard_count=%s' % (
msg, str(self.card_count)))
elif elem_count == 0:
log.warning('no elements with properties found%s' % msg)
return pid_to_eids_map
def get_node_id_to_element_ids_map(self) -> Dict[int, List[int]]:
"""
Returns a dictionary that maps node IDs to a list of elemnent IDs
.. todo:: support 0d or 1d elements
.. todo:: support elements with missing nodes
(e.g. CQUAD8 with missing nodes)
"""
nid_to_eids_map = {}
for nid in self.nodes: # initalize the mapper
nid_to_eids_map[nid] = []
if self.spoints: # SPOINTs
for nid in sorted(self.spoints): # SPOINTs
nid_to_eids_map[nid] = []
skip_cards = {
'CCONEAX',
}
for (eid, element) in self.elements.items(): # load the mapper
if element.type in skip_cards:
continue
try:
# not supported for 0-D and 1-D elements
nids = element.node_ids
except AttributeError:
print(element.type)
else:
for nid in nids: # (e.g. CQUAD8 with missing node)
if nid:
nid_to_eids_map[nid].append(eid)
return nid_to_eids_map
def get_node_id_to_elements_map(self) -> Dict[int, List[int]]:
"""
Returns a dictionary that maps node IDs to a list of elements.
Returns
-------
nid_to_elements_map : Dict[nid]=List[eid]
node id to a list of elements
.. todo:: support 0d or 1d elements
.. todo:: support elements with missing nodes
(e.g. CQUAD8 with missing nodes)
"""
nid_to_elements_map = {}
for nid in self.nodes: # initalize the mapper
nid_to_elements_map[nid] = []
for nid in self.spoints:
nid_to_elements_map[nid] = []
for nid in self.epoints:
nid_to_elements_map[nid] = []
skip_cards = {
'CCONEAX',
}
for element in self.elements.values(): # load the mapper
if element.type in skip_cards:
continue
try:
# not supported for 0-D and 1-D elements
nids = element.node_ids
except AttributeError:
print(element.type)
else:
for nid in nids: # (e.g. CQUAD8 with missing node)
if nid:
nid_to_elements_map[nid].append(element)
return nid_to_elements_map
def get_property_id_to_element_ids_map(self, msg: str='') -> Dict[int, List[int]]:
"""
Returns a dictionary that maps a property ID to a list of elements.
Returns
-------
pid_to_eids_map : Dict[pid]=List[eid]
property id to a list of elements
msg : str; default=''
a message added to the error message
"""
pid_to_eids_map = {}
pids = self.property_ids
for pid in pids:
pid_to_eids_map[pid] = []
#for pid in self.phbdys.keys():
#assert pid not in pid_to_eids_map, 'pid=%s is already used and must be used by PHBDY' % pid
#pid_to_eids_map[pid] = []
elements_without_properties = {
'CONROD', 'CONM2', 'CELAS2', 'CELAS4', 'CDAMP2', 'CDAMP4',
'GENEL', 'CHACAB', 'CAABSF',
# nastran 95
'CHEXA1', 'CHEXA2',
'CIHEX1', 'CIHEX2',
}
thermal_elements = {'CHBDYP'}
elements_without_properties.update(thermal_elements)
skip_elements = elements_without_properties
for eid in self.element_ids:
element = self.Element(eid)
element_type = element.type
if element_type in skip_elements:
continue
if hasattr(element, 'pid'):
pid = element.Pid()
if pid < 0: # CTRIAX6
continue
try:
pid_to_eids_map[pid].append(eid)
except KeyError:
print(element)
raise KeyError('pid=%s is invalid for card%s=\n%s' % (pid, msg, str(element)))
return pid_to_eids_map
def get_material_id_to_property_ids_map(self, msg: str='') -> Dict[int, List[int]]:
"""
Returns a dictionary that maps a material ID to a list of properties
Returns
-------
mid_to_pids_map : dict[int] = int
the mapping
msg : str; default=''
a message added to the error message
.. code-block:: python
>>> mid_to_pid_map = get_material_id_to_property_ids_map()
>>> mid = 1
>>> pids = get_material_id_to_property_ids_map[mid]
>>> pids
[1, 2, 3]
.. note:: all properties require an mid to be counted (except for
PCOMP, which has multiple mids)
"""
mid_to_pids_map = {}
mids = self.get_material_ids()
for mid in mids:
mid_to_pids_map[mid] = []
properties_without_materials = {
'PGAP', 'PELAS', 'PVISC', 'PBUSH', 'PDAMP', 'PFAST', 'PBUSH1D',
'PACABS', 'PAABSF', 'PACBAR',
}
for pid in self.property_ids:
prop = self.Property(pid)
prop_type = prop.type
if prop_type in ['PCOMP', 'PCOMPG']:
mids = prop.Mids()
for mid in mids:
if pid not in mid_to_pids_map[mid]:
mid_to_pids_map[mid].append(pid)
else: # PCOMP
if hasattr(prop, 'mid') and prop.Mid() in mids:
if pid not in mid_to_pids_map[mid]:
mid_to_pids_map[mid].append(pid)
elif prop_type in properties_without_materials:
pass
elif prop_type in ['PSHELL']:
mids = prop.material_ids
for i, mid in enumerate(mids):
if mid is None or mid == 0:
continue
try:
mid_to_pids_map[mid].append(pid)
except KeyError:
print(prop)
raise KeyError('i=%s mid=%s is invalid for card%s=\n%s' % (
i, mid, msg, str(prop)))
else:
mid = prop.Mid()
try:
mid_to_pids_map[mid].append(pid)
except KeyError:
print(prop)
raise KeyError('mid=%s is invalid for card %s=\n%s' % (mid, msg, str(prop)))
return mid_to_pids_map
def get_reduced_nsms(self, nsm_id: int,
consider_nsmadd: bool=True,
stop_on_failure: bool=True) -> List[Any]:
"""
Get all traced NSMs that are part of a set
Parameters
----------
nsm_id : int
the NSM id
consider_nsmadd : bool
NSMADDs should not be considered when referenced from an NSMADD
from a case control, True should be used.
stop_on_failure : bool; default=True
errors if parsing something new
Returns
-------
mpcs : List[NSM]
the various NSMs
"""
if not isinstance(nsm_id, integer_types):
msg = 'nsm_id must be an integer; type=%s, nsm_id=\n%r' % (type(nsm_id), nsm_id)
raise TypeError(msg)
try:
nsms = self.NSM(nsm_id, consider_nsmadd=consider_nsmadd)
except KeyError:
if stop_on_failure:
raise
self.log.error("could not find expected NSM id=%s" % nsm_id)
return []
nsms2 = []
for nsm in nsms:
if nsm.type == 'NSMADD':
for nsmi in nsm.nsm_ids:
if isinstance(nsmi, list):
for nsmii in nsmi:
if isinstance(nsmii, integer_types):
nsmiii = nsmii
else:
nsmiii = nsmii.conid
nsms2i = self.get_reduced_nsms(
nsmiii, consider_nsmadd=False,
stop_on_failure=stop_on_failure)
nsms2 += nsms2i
else:
assert isinstance(nsmi, integer_types), nsmi
nsms2i = self.get_reduced_nsms(
nsmi, consider_nsmadd=False,
stop_on_failure=stop_on_failure)
nsms2 += nsms2i
else:
nsms2.append(nsm)
return nsms2
def get_reduced_mpcs(self, mpc_id: int,
consider_mpcadd: bool=False,
stop_on_failure: bool=True) -> List[Any]:
"""
Get all traced MPCs that are part of a set
Parameters
----------
mpc_id : int
the MPC id
consider_mpcadd : bool
MPCADDs should not be considered when referenced from an MPCADD
from a case control, True should be used.
stop_on_failure : bool; default=True
errors if parsing something new
Returns
-------
mpcs : List[MPC]
the various MPCs
"""
if not isinstance(mpc_id, integer_types):
msg = 'mpc_id must be an integer; type=%s, mpc_id=\n%r' % (type(mpc_id), mpc_id)
raise TypeError(msg)
try:
mpcs = self.MPC(mpc_id, consider_mpcadd=consider_mpcadd)
except KeyError:
if stop_on_failure:
raise
self.log.error("could not find expected MPC id=%s" % mpc_id)
return []
mpcs2 = []
for mpc in mpcs:
if mpc.type == 'MPCADD':
for mpci in mpc.mpc_ids:
if isinstance(mpci, list):
for mpcii in mpci:
if isinstance(mpcii, integer_types):
mpciii = mpcii
else:
mpciii = mpcii.conid
mpcs2i = self.get_reduced_mpcs(
mpciii, consider_mpcadd=False,
stop_on_failure=stop_on_failure)
mpcs2 += mpcs2i
else:
assert isinstance(mpci, integer_types), mpci
mpcs2i = self.get_reduced_mpcs(
mpci, consider_mpcadd=False,
stop_on_failure=stop_on_failure)
mpcs2 += mpcs2i
else:
mpcs2.append(mpc)
return mpcs2
def get_reduced_spcs(self, spc_id: int,
consider_spcadd: bool=True,
stop_on_failure: bool=True) -> List[Any]:
"""
Get all traced SPCs that are part of a set
Parameters
----------
spc_id : int
the SPC id
consider_spcadd : bool
SPCADDs should not be considered when referenced from an SPCADD
from a case control, True should be used.
stop_on_failure : bool; default=True
errors if parsing something new
Returns
-------
spcs : List[SPC]
the various SPCs
"""
if not isinstance(spc_id, integer_types):
msg = 'spc_id must be an integer; type=%s, spc_id=\n%r' % (type(spc_id), spc_id)
raise TypeError(msg)
try:
spcs = self.SPC(spc_id, consider_spcadd=consider_spcadd)
except KeyError:
if stop_on_failure:
raise
self.log.error("could not find expected SPC id=%s" % spc_id)
return []
spcs2 = []
for spc in spcs:
if spc.type == 'SPCADD':
for spci in spc.sets:
if isinstance(spci, list):
for spcii in spci:
if isinstance(spcii, integer_types):
spciii = spcii
else:
spciii = spcii.conid
spcs2i = self.get_reduced_spcs(
spciii,
consider_spcadd=False,
stop_on_failure=stop_on_failure)
spcs2 += spcs2i
else:
assert isinstance(spci, integer_types), spci
spcs2i = self.get_reduced_spcs(
spci,
consider_spcadd=False,
stop_on_failure=stop_on_failure)
spcs2 += spcs2i
else:
spcs2.append(spc)
return spcs2
def get_spcs(self, spc_id: int,
consider_nodes: bool=False,
stop_on_failure: bool=True) -> Tuple[List[int], List[str]]:
"""
Gets the SPCs in a semi-usable form.
Parameters
----------
spc_id : int
the desired SPC ID
consider_nodes : bool; default=False
True : consider the GRID card PS field
False: consider the GRID card PS field
Returns
-------
nids : List[int]
the constrained nodes
comps : List[str]
the components that are constrained on each node
Considers:
- SPC
- SPC1
- SPCADD
- GRID
Doesn't consider:
- non-zero enforced value on SPC
- GMSPC
"""
warnings = ''
spcs = self.get_reduced_spcs(spc_id, consider_spcadd=True, stop_on_failure=stop_on_failure)
nids = []
comps = []
for spc in spcs:
if spc.type == 'SPC1':
nodes = spc.nodes
nnodes = len(nodes)
nids += nodes
comps += [str(spc.components)] * nnodes
elif spc.type == 'SPC':
for nid, comp, unused_enforced in zip(spc.nodes, spc.components, spc.enforced):
nids.append(nid)
comps.append(comp)
else:
warnings += str(spc)
#raise NotImplementedError(spc.type)
if warnings:
self.log.warning("get_spcs doesn't consider:\n%s" % warnings.rstrip('\n'))
if consider_nodes:
for nid, node in self.nodes.items():
if node.ps:
nids.append(nid)
comps.append(node.ps)
return nids, comps
def get_mklist(self) -> np.ndarray:
"""gets the MKLIST vector from MKAERO1/MKAERO2"""
mklist = []
mkarray = np.array([])
for mkaero in self.mkaeros:
mklist += mkaero.mklist()
if mklist:
mkarray = np.hstack([mklist])
#new_array = [tuple(row) for row in mkarray]
#unique_pairs = np.lib.arraysetops.unique(new_array, axis=0).tolist()
return mkarray
def _get_pid_to_node_ids_and_elements_array(model: BDF,
pids: List[int],
etypes: List[str],
msg: str,
idtype: str):
"""
a work in progress
Parameters
----------
pids : List[int]
list of property ID
etypes : List[str]
element types to consider
msg : str
???
idetype : str
'int32', 'int64'
Returns
-------
pid_to_eids_ieids_map : dict[(pid, etype)] = eid_ieid
eid_ieid : (Nelements, 2) int ndarray
eid is the element id
ieid is the index in the node_ids array
node_ids : (nelements, nnodes) int ndarray
nelements : int
the number of elements in the property type
nnodes : int
varies based on the element type
"""
log = model.log
nnodes_map = {
'CTETRA': (4, 10),
'CPYRAM': (5, 13),
'CPENTA': (6, 15),
'CHEXA': (8, 20),
}
skip_elements = ['CONROD']
etypes_no_pids = [
'CELAS4', 'CDAMP4', 'CHBDYG', 'GENEL',
]
etypes_none_nodes = [
'CELAS1', 'CELAS2', 'CELAS4',
'CDAMP1', 'CDAMP2', 'CDAMP4', 'CDAMP5',
'CBUSH', 'CBUSH1D', 'CFAST',
'CTRIAX', 'CQUADX', 'CTRIAX6',
'CTRIA6', 'CQUAD8', 'CQUAD',
'CTETRA', 'CPENTA', 'CHEXA', 'CPYRAM',
'CRAC2D', 'CRAC3D', 'CHBDYP', #'CHBDYG',
'CHACAB', 'CAABSF',
'CBEAM3',
]
# ------------------------------------------------------------
etypes_ = model._slot_to_type_map['elements']
if etypes is None:
etypes = etypes_
etype_to_nids_map = {}
pid_to_eids_ieids_map = defaultdict(list)
pid_to_eids_map = {}
for pid in pids:
pid_to_eids_map[pid] = []
for etype in etypes_:
if etype not in etypes_:
continue
eids = model._type_to_id_map[etype]
if len(eids) == 0:
continue
if etype in skip_elements:
log.warning('skipping etype=%s because there are no properties%s' % (
etype, msg))
continue
# get the number of nodes of the first element
eid = eids[0]
element0 = model.elements[eid]
try:
node_check, nnodes = nnodes_map[element0.type]
except KeyError:
node_check = 0
nnodes = len(element0.node_ids)
neids = len(eids)
node_ids = np.zeros((neids, nnodes), dtype=idtype)
if etype in etypes_none_nodes:
for ieid, eid in enumerate(eids):
element = model.elements[eid]
node_idsi = element.node_ids
try:
node_ids[ieid, :len(node_idsi)] = [
nid if nid is not None else 0
for nid in node_idsi]
except Exception:
log.error('This error can occur when you have '
'linear and quadratic solid elements '
'within the same model\n%s' % element)
raise
if etype in etypes_no_pids:
pid = 0
else:
pid = element.Pid()
#nids_to_pids_map[]
pid_to_eids_ieids_map[(pid, etype)].append((eid, ieid))
if node_check:
# try to reduce the dimension of the array
max_nid = node_ids[:, node_check:].max()
if max_nid == 0:
node_ids = node_ids[:, :node_check]
else:
try:
for ieid, eid in enumerate(eids):
element = model.elements[eid]
node_idsi = element.node_ids
try:
node_ids[ieid, :] = node_idsi
except TypeError:
print(element)
print(node_idsi)
raise
if etype in etypes_no_pids:
pid = 0
else:
pid = element.Pid()
#nids_to_pids_map[]
pid_to_eids_ieids_map[(pid, etype)].append((eid, ieid))
except TypeError:
print(etype)
print(element)
raise
etype_to_nids_map[etype] = node_ids
for key, value in pid_to_eids_ieids_map.items():
pid_to_eids_ieids_map[key] = np.array(value, dtype=idtype)
return etype_to_nids_map, pid_to_eids_ieids_map
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,630
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/bdf/cards/loads/static_loads.py
|
# pylint: disable=R0902,R0904,R0914
"""
All static loads are defined in this file. This includes:
* LOAD
* GRAV
* ACCEL
* ACCEL1
* FORCE / MOMENT
* FORCE1 / MOMENT1
* FORCE2 / MOMENT2
* MOMENT
* PLOAD
* PLOAD2
* PLOAD4
"""
from __future__ import annotations
from typing import TYPE_CHECKING
import numpy as np
from numpy import array, cross, allclose, unique
from numpy.linalg import norm # type: ignore
#from pyNastran.bdf.errors import CrossReferenceError
from pyNastran.bdf import MAX_INT
from pyNastran.utils.numpy_utils import integer_types, float_types
from pyNastran.bdf.cards.loads.loads import Load, LoadCombination
from pyNastran.bdf.field_writer_8 import set_blank_if_default
from pyNastran.bdf.cards.base_card import BaseCard, expand_thru, expand_thru_by # _node_ids,
from pyNastran.bdf.cards.collpase_card import collapse_thru_by
from pyNastran.bdf.bdf_interface.assign_type import (
integer, integer_or_blank, double, double_or_blank, string, string_or_blank,
integer_or_string, fields, integer_string_or_blank, integer_or_double)
from pyNastran.bdf.field_writer_8 import print_card_8, print_float_8, set_string8_blank_if_default
from pyNastran.bdf.field_writer_16 import (
print_card_16, print_float_16, set_string16_blank_if_default)
from pyNastran.bdf.field_writer_double import print_card_double, print_scientific_double
if TYPE_CHECKING: # pragma: no cover
from pyNastran.bdf.bdf import BDF
class LOAD(LoadCombination):
"""
+------+-----+------+------+----+-----+----+----+----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+======+=====+======+======+====+=====+====+====+====+
| LOAD | SID | S | S1 | L1 | S2 | L2 | S3 | L3 |
+------+-----+------+------+----+-----+----+----+----+
| | S4 | L4 | etc. | | | | | |
+------+-----+------+------+----+-----+----+----+----+
| LOAD | 101 | -0.5 | 1.0 | 3 | 6.2 | 4 | | |
+------+-----+------+------+----+-----+----+----+----+
"""
type = 'LOAD'
@classmethod
def _init_from_empty(cls):
sid = 1
scale = 1.
scale_factors = [1.]
load_ids = [1]
return cls(sid, scale, scale_factors, load_ids, comment='')
def __init__(self, sid, scale, scale_factors, load_ids, comment=''):
"""
Creates a LOAD card
Parameters
----------
sid : int
load id
scale : float
overall scale factor
scale_factors : List[float]
individual scale factors (corresponds to load_ids)
load_ids : List[int]
individual load_ids (corresponds to scale_factors)
comment : str; default=''
a comment for the card
.. note:: MSC can handle self-referencing loads, NX cannot
"""
LoadCombination.__init__(self, sid, scale, scale_factors, load_ids,
comment=comment)
def get_load_types(self):
"""
.. note:: requires a cross referenced load
"""
load_types = []
for loads in self.load_ids_ref:
for load in loads:
if isinstance(load, LOAD):
lid = load.lid
if isinstance(lid, list):
load_types += load.type
else: # int
load_types += [load.type] + load.get_load_types()
elif isinstance(load, (Load0, Load1, Load2, PLOAD4, GRAV)):
load_types += [load.type]
else:
raise NotImplementedError(load)
load_types = list(set(load_types))
#print("load_types = ", load_types)
return load_types
def get_reduced_loads(self, resolve_load_card=False, filter_zero_scale_factors=False):
"""
Get all load objects in a simplified form, which means all
scale factors are already applied and only base objects
(no LOAD cards) will be returned.
Parameters
----------
resolve_load_card : bool; default=False
Nastran requires that LOAD cards do not reference other load cards
This feature can be enabled.
filter_zero_scale_factors : bool; default=False
Nastran does not filter loads with a 0.0 scale factor. So, if you
have a 0.0 load, but are missing load ids, Nastran will throw a
fatal error.
.. todo:: lots more object types to support
"""
scale_factors = []
loads = []
simple_loads = [
'FORCE', 'FORCE1', 'FORCE2',
'MOMENT', 'MOMENT1', 'MOMENT2',
'PLOAD1', 'PLOAD2', 'PLOAD4',
'GRAV', 'ACCEL', 'ACCEL1']
load_scale = self.scale # global
for (loads_pack, i_scale) in zip(self.load_ids, self.scale_factors):
scale = i_scale * load_scale # actual scale = global * local
if isinstance(loads_pack, integer_types):
raise RuntimeError('the load have not been cross-referenced')
if scale == 0.0 and filter_zero_scale_factors:
continue
for load in loads_pack:
if simple_loads:
loads.append(load)
scale_factors.append(scale) # local
elif isinstance(load, LOAD):
if not resolve_load_card:
msg = (
'A LOAD card cannot reference another LOAD card\n'
'current:\n%s\n'
'new:\n%s' % (str(self), str(load))
)
raise RuntimeError(msg)
load_data = load.get_reduced_loads(
resolve_load_card=True,
filter_zero_scale_factors=filter_zero_scale_factors)
(reduced_scale_factors, reduced_loads) = load_data
loads += reduced_loads
scale_factors += [scale * j_scale
for j_scale in reduced_scale_factors]
else:
msg = ('%s isnt supported in get_reduced_loads method'
% load.__class__.__name__)
raise NotImplementedError(msg)
return (scale_factors, loads)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
load_ids2 = []
msg = ', which is required by LOAD=%s' % (self.sid)
for load_id in self.load_ids:
if load_id == self.sid:
msg = 'Type=%s sid=%s load_id=%s creates a recursion error' % (
self.type, self.sid, load_id)
raise RuntimeError(msg)
load_id2 = model.Load(load_id, consider_load_combinations=True, msg=msg)
assert isinstance(load_id2, list), load_id2
load_ids2.append(load_id2)
self.load_ids_ref = load_ids2
def safe_cross_reference(self, model: BDF, xref_errors, debug=True):
load_ids2 = []
msg = ', which is required by LOAD=%s' % (self.sid)
for load_id in self.load_ids:
try:
load_id2 = model.Load(load_id, consider_load_combinations=True, msg=msg)
except KeyError:
if debug:
msg = 'Couldnt find load_id=%i, which is required by %s=%s' % (
load_id, self.type, self.sid)
print(msg)
continue
load_ids2.append(load_id2)
self.load_ids_ref = load_ids2
def raw_fields(self):
list_fields = ['LOAD', self.sid, self.scale]
load_ids = self.get_load_ids()
for (scale_factor, load_id) in zip(self.scale_factors, load_ids):
list_fields += [scale_factor, self.LoadID(load_id)]
if len(load_ids) != len(self.scale_factors):
msg = 'nload_ids=%s nscale_factors=%s and arent the same\n' % (
len(load_ids), len(self.scale_factors))
msg = 'load_ids=%s\n' % (load_ids)
msg += 'scale_factors=%s\n' % (self.scale_factors)
msg += print_card_8(list_fields)
msg += str(self.get_stats())
raise IndexError(msg)
return list_fields
def repr_fields(self):
return self.raw_fields()
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.raw_fields()
if size == 8:
return self.comment + print_card_8(card)
return self.comment + print_card_16(card)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.load_ids = self.get_load_ids()
self.load_ids_ref = None
class CLOAD(LoadCombination):
"""
Static Load Combination for Superelement Loads (Superposition)
references excite ids (e.g., an LSEQ); looks like a LOAD
"""
type = 'CLOAD'
@classmethod
def _init_from_empty(cls):
sid = 1
scale = 1.
scale_factors = [1.]
load_ids = [1]
return cls(sid, scale, scale_factors, load_ids, comment='')
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
load_ids2 = []
msg = ', which is required by CLOAD=%s' % (self.sid)
for load_id in self.load_ids:
if load_id == self.sid:
msg = 'Type=%s sid=%s load_id=%s creates a recursion error' % (
self.type, self.sid, load_id)
raise RuntimeError(msg)
#print(model.load_combinations)
load_id2 = []
for loadset, load_combinations in model.load_combinations.items():
for load in load_combinations:
if load.type in ['CLOAD']:
continue
if load_id == load.excite_id:
load_id2.append(load)
#load_id2 = model.Load(load_id, consider_load_combinations=True, msg=msg)
assert isinstance(load_id2, list), load_id2
assert len(load_id2) > 0, f'could not find references for CLOAD load_id={load_id}'
load_ids2.append(load_id2)
self.load_ids_ref = load_ids2
def safe_cross_reference(self, model: BDF, xref_errors, debug=True):
self.cross_reference(model)
def get_load_ids(self):
if self.load_ids_ref is None:
return self.load_ids
excite_ids = []
#print(self.load_ids_ref)
for loads in self.load_ids_ref:
excite_idsi = set([])
for load in loads:
excite_id = load.excite_id
excite_idsi.add(excite_id)
assert len(excite_idsi) == 1, excite_idsi
excite_ids.append(excite_idsi.pop())
assert len(excite_ids) > 0, excite_ids
return excite_ids
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.load_ids = self.get_load_ids()
self.load_ids_ref = None
def raw_fields(self):
list_fields = ['CLOAD', self.sid, self.scale]
load_ids = self.get_load_ids()
for (scale_factor, load_id) in zip(self.scale_factors, load_ids):
load_idi = self.LoadID(load_id)
list_fields += [scale_factor, load_idi]
if len(load_ids) != len(self.scale_factors):
msg = 'nload_ids=%s nscale_factors=%s and arent the same\n' % (
len(load_ids), len(self.scale_factors))
msg = 'load_ids=%s\n' % (load_ids)
msg += 'scale_factors=%s\n' % (self.scale_factors)
msg += print_card_8(list_fields)
msg += str(self.get_stats())
raise IndexError(msg)
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.raw_fields()
if size == 8:
return self.comment + print_card_8(card)
return self.comment + print_card_16(card)
class GRAV(BaseCard):
"""
Defines acceleration vectors for gravity or other acceleration loading.
+------+-----+-----+------+-----+-----+------+-----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
+======+=====+=====+======+=====+=====+======+=====+
| GRAV | SID | CID | A | N1 | N2 | N3 | MB |
+------+-----+-----+------+-----+-----+------+-----+
| GRAV | 1 | 3 | 32.2 | 0.0 | 0.0 | -1.0 | |
+------+-----+-----+------+-----+-----+------+-----+
"""
type = 'GRAV'
@classmethod
def _init_from_empty(cls):
sid = 1
scale = 1.
N = [1., 1., 1.]
return GRAV(sid, scale, N, cid=0, mb=0, comment='')
def __init__(self, sid, scale, N, cid=0, mb=0, comment=''):
"""
Creates an GRAV card
Parameters
----------
sid : int
load id
scale : float
scale factor for load
N : (3, ) float ndarray
the acceleration vector in the cid frame
cid : int; default=0
the coordinate system for the load
mb : int; default=0
???
comment : str; default=''
a comment for the card
"""
if comment:
self.comment = comment
#: Set identification number
self.sid = sid
#: Coordinate system identification number.
self.cid = cid
#: scale factor
self.scale = scale
#: Acceleration vector components measured in coordinate system CID
self.N = np.asarray(N)
#: Indicates whether the CID coordinate system is defined in the
#: main Bulk Data Section (MB = -1) or the partitioned superelement
#: Bulk Data Section (MB = 0). Coordinate systems referenced in the
#: main Bulk Data Section are considered stationary with respect to
#: the assembly basic coordinate system. See Remark 10.
#: (Integer; Default = 0)
self.mb = mb
self.cid_ref = None
assert not allclose(max(abs(self.N)), 0.), ('GRAV N is a zero vector, '
'N=%s' % str(self.N))
def validate(self):
if not isinstance(self.scale, float):
msg = 'scale=%s type=%s' % (self.scale, type(self.scale))
raise TypeError(msg)
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a GRAV card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
sid = integer(card, 1, 'sid')
cid = integer_or_blank(card, 2, 'cid', 0)
scale = double(card, 3, 'scale')
N = array([double_or_blank(card, 4, 'N1', 0.0),
double_or_blank(card, 5, 'N2', 0.0),
double_or_blank(card, 6, 'N3', 0.0)])
mb = integer_or_blank(card, 7, 'mb', 0)
assert len(card) <= 8, f'len(GRAV card) = {len(card):d}\ncard={card}'
return GRAV(sid, scale, N, cid=cid, mb=mb, comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
"""
Adds a GRAV card from the OP2
Parameters
----------
data : List[varies]
a list of fields defined in OP2 format
comment : str; default=''
a comment for the card
"""
sid = data[0]
cid = data[1]
unused_a = data[2]
N = array(data[3:6])
mb = data[6]
scale = 1.
assert len(data) == 7
return GRAV(sid, scale, N, cid=cid, mb=mb, comment=comment)
def get_loads(self):
return [self]
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by GRAV sid=%s' % self.sid
self.cid_ref = model.Coord(self.cid, msg=msg)
def safe_cross_reference(self, model: BDF, xref_errors, debug=True):
# msg = "Couldn't find CORDx=%s which is required by GRAV sid=%s" % (self.cid, self.sid)
msg = ', which is required by GRAV sid=%s' % self.sid
self.cid_ref = model.safe_coord(self.cid, self.sid, xref_errors, msg=msg)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.cid = self.Cid()
self.cid_ref = None
def Cid(self):
if self.cid_ref is not None:
return self.cid_ref.cid
return self.cid
def GravityVector(self):
"""returns the gravity vector in absolute coordinates"""
if self.Cid() == 0:
return self.N
## TODO: shouldn't be scaled by the ???
p = self.cid_ref.transform_vector_to_global(self.N)
return self.scale * p
def raw_fields(self):
N = list(self.N)
list_fields = ['GRAV', self.sid, self.Cid(), self.scale] + N + [self.mb]
return list_fields
def repr_fields(self):
N = []
for n in self.N:
N.append(set_blank_if_default(n, 0.0))
mb = set_blank_if_default(self.mb, 0)
list_fields = ['GRAV', self.sid, self.Cid(), self.scale] + N + [mb]
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.raw_fields()
if size == 8:
return self.comment + print_card_8(card)
return self.comment + print_card_16(card)
class ACCEL(BaseCard):
"""
Acceleration Load
Defines static acceleration loads, which may vary over a region of
the structural model. The load variation is based upon the tabular
input defined on this Bulk Data entry.
+-------+------+------+--------+------+-----+-----+--------+-----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=======+======+======+========+======+=====+=====+========+=====+
| ACCEL | SID | CID | N1 | N2 | N3 | DIR | | |
+-------+------+------+--------+------+-----+-----+--------+-----+
| | LOC1 | VAL1 | LOC2 | VAL2 | Continues in Groups of 2 |
+-------+------+------+--------+------+--------------------------+
| ACCEL | 100 | 2 | 0.0 | 1.0 | 2.0 | X | | |
+-------+------+------+--------+------+-----+-----+--------+-----+
| | 1.0 | 1.1 | 2.0 | 2.1 | 3.0 | 3.1 | 4.0 | 4.1 |
+-------+------+------+--------+------+-----+-----+--------+-----+
"""
type = 'ACCEL'
@classmethod
def _init_from_empty(cls):
sid = 1
N = [1., 2., 3.]
direction = 'X'
locs = [0., 1.]
vals = [1., 2.]
return ACCEL(sid, N, direction, locs, vals, cid=0, comment='')
def __init__(self, sid, N, direction, locs, vals, cid=0, comment=''):
"""
Creates an ACCEL card
Parameters
----------
sid : int
load id
N : (3, ) float ndarray
the acceleration vector in the cid frame
direction : str
Component direction of acceleration variation
{X, Y, Z}
locs : List[float]
Location along direction DIR in coordinate system CID for
specification of a load scale factor.
vals : List[float]
The load scale factor associated with location LOCi
cid : int; default=0
the coordinate system for the load
comment : str; default=''
a comment for the card
"""
if comment:
self.comment = comment
#: Load set identification number (Integer>0)
self.sid = sid
#: Coordinate system identification number. (Integer>0: Default=0)
self.cid = cid
#: Components of the acceleration vector measured in coordinate system
#: CID. (Real; at least one Ni != 0)
self.N = np.asarray(N, dtype='float64')
#: Component direction of acceleration variation. (Character; one of X,Y or Z)
self.direction = direction
self.locs = array(locs, dtype='float64')
self.vals = array(vals, dtype='float64')
self.cid_ref = None
def validate(self):
self.N = np.asarray(self.N)
assert max(abs(self.N)) > 0.
assert self.direction in ['X', 'Y', 'Z'], 'dir=%r' % self.direction
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a ACCEL card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
sid = integer(card, 1, 'sid')
cid = integer_or_blank(card, 2, 'cid', 0)
N = [double_or_blank(card, 3, 'N1', 0.0),
double_or_blank(card, 4, 'N2', 0.0),
double_or_blank(card, 5, 'N3', 0.0)]
direction = string(card, 6, 'dir')
i = 9
locs = []
vals = []
j = 0
nfields = len(card)
while i < nfields:
#raise NotImplementedError('ACCEL-line 2')
loc = double(card, i, 'loc%i' % j)
val = double(card, i, 'loc%i' % j)
#print('i=%s j=%s len=%s loc=%s val=%s' % (i, j, len(card), loc, val))
locs.append(loc)
vals.append(val)
j += 1
i += 2
return ACCEL(sid, N, direction, locs, vals, cid=cid, comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by ACCEL sid=%s' % self.sid
self.cid_ref = model.Coord(self.cid, msg=msg)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.cid = self.Cid()
self.cid_ref = None
def safe_cross_reference(self, model: BDF, xref_errors, debug=True):
msg = ', which is required by ACCEL sid=%s' % self.sid
self.cid_ref = model.safe_coord(self.cid, self.sid, xref_errors, msg=msg)
def Cid(self):
if self.cid_ref is not None:
return self.cid_ref.cid
return self.cid
def get_loads(self):
return [self]
def raw_fields(self):
list_fields = [
'ACCEL', self.sid, self.Cid(),
self.N[0], self.N[1], self.N[2], self.direction, None, None,
]
for loc, val in zip(self.locs, self.vals):
list_fields += [loc, val]
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.raw_fields()
if size == 8:
return self.comment + print_card_8(card)
if is_double:
return self.comment + print_card_double(card)
return self.comment + print_card_16(card)
class ACCEL1(BaseCard):
"""
Acceleration Load
Defines static acceleration loads at individual GRID points.
+--------+---------+---------+-----+----+----+----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 |
+========+=========+=========+=====+====+====+====+
| ACCEL1 | SID | CID | A | N1 | N2 | N3 |
+--------+---------+---------+-----+----+----+----+
| | GRIDID1 | GRIDID2 | etc | | | |
+--------+---------+---------+-----+----+----+----+
"""
type = 'ACCEL1'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
sid = 1
N = [1., 2., 3.]
scale = 1.
nodes = [1, 3, 4]
return ACCEL1(sid, scale, N, nodes, cid=0, comment='')
def __init__(self, sid, scale, N, nodes, cid=0, comment=''):
"""
Creates an ACCEL1 card
Parameters
----------
sid : int
load id
scale : float
scale factor for load
N : (3, ) float ndarray
the acceleration vector in the cid frame
direction : str
Component direction of acceleration variation
{X, Y, Z}
nodes : List[int]
the nodes to apply acceleration to
cid : int; default=0
the coordinate system for the load
comment : str; default=''
a comment for the card
"""
if comment:
self.comment = comment
#: Load set identification number (Integer>0)
self.sid = sid
#: Coordinate system identification number. (Integer>0: Default=0)
self.cid = cid
#: Acceleration vector scale factor. (Real)
self.scale = scale
#: Components of the acceleration vector measured in coordinate system
#: CID. (Real; at least one Ni != 0)
self.N = np.asarray(N)
#: nodes to apply the acceleration to
self.nodes = expand_thru_by(nodes)
assert max(abs(self.N)) > 0.
self.nodes_ref = None
self.cid_ref = None
def validate(self):
assert len(self.N) == 3, 'N=%r' % self.N
# self.N = np.asarray(self.N)
assert isinstance(self.cid, integer_types), 'cid=%r' % self.cid
assert isinstance(self.scale, float_types), 'scale=%r' % self.scale
assert isinstance(self.nodes, list), 'nodes=%r' % self.nodes
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a ACCEL1 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
sid = integer(card, 1, 'sid')
cid = integer_or_blank(card, 2, 'cid', 0)
scale = double(card, 3, 'scale')
N = [double_or_blank(card, 4, 'N1', 0.0),
double_or_blank(card, 5, 'N2', 0.0),
double_or_blank(card, 6, 'N3', 0.0)]
nodes = fields(integer_or_string, card, 'node', i=9, j=len(card))
return ACCEL1(sid, scale, N, nodes, cid=cid, comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by ACCEL1 sid=%s' % self.sid
self.cid_ref = model.Coord(self.cid, msg=msg)
self.nodes_ref = model.EmptyNodes(self.node_ids, msg=msg)
def safe_cross_reference(self, model: BDF, xref_errors):
msg = ', which is required by ACCEL1 sid=%s' % self.sid
self.cid_ref = model.safe_coord(self.cid, self.sid, xref_errors, msg=msg)
self.nodes_ref = model.EmptyNodes(self.node_ids, msg=msg)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.cid = self.Cid()
self.nodes = self.node_ids
self.nodes_ref = None
self.cid_ref = None
def Cid(self):
if self.cid_ref is not None:
return self.cid_ref.cid
return self.cid
@property
def node_ids(self):
#msg = ', which is required by ACCEL1 sid=%s' % self.sid
#_node_ids(self.nodes, allow_empty_nodes=True, msg=msg)
return self._node_ids(nodes=self.nodes_ref)
def _node_ids(self, nodes=None): # this function comes from BaseCard.py
"""returns node_ids for repr functions"""
if not nodes:
nodes = self.nodes
if isinstance(nodes[0], integer_types):
node_ids = [node for node in nodes]
else:
node_ids = [node.nid for node in nodes]
assert 0 not in node_ids, 'node_ids = %s' % (node_ids)
return node_ids
def get_loads(self):
return [self]
def raw_fields(self):
list_fields = [
'ACCEL1', self.sid, self.Cid(), self.scale,
self.N[0], self.N[1], self.N[2], None, None
] + collapse_thru_by(self.node_ids)
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.raw_fields()
if size == 8:
return self.comment + print_card_8(card)
if is_double:
return self.comment + print_card_double(card)
return self.comment + print_card_16(card)
#class Force(Load):
#"""Generic class for all Forces"""
#type = 'Force'
#def __init__(self):
#Load.__init__(self)
#def get_loads(self):
#return [self]
#def F(self):
#return self.xyz * self.mag
#def get_reduced_loads(self, resolve_load_card=False, filter_zero_scale_factors=False):
#scale_factors = [1.]
#loads = self.F()
#return(scale_factors, loads)
#def write_card(self, size: int=8, is_double: bool=False) -> str:
#card = self.raw_fields()
#if size == 8:
#return self.comment + print_card_8(card)
#if is_double:
#return self.comment + print_card_double(card)
#return self.comment + print_card_16(card)
#class Moment(Load):
#"""Generic class for all Moments"""
#type = 'Moment'
#def __init__(self):
#Load.__init__(self)
#def get_loads(self):
#return [self]
#def get_reduced_loads(self, resolve_load_card=False, filter_zero_scale_factors=False):
#scale_factors = [1.]
#loads = {
#self.node: self.M()
#}
#return(scale_factors, loads)
#def write_card(self, size: int=8, is_double: bool=False) -> str:
#card = self.raw_fields()
#if size == 8:
#return self.comment + print_card_8(card)
#if is_double:
#return self.comment + print_card_double(card)
#return self.comment + print_card_16(card)
class Load0(BaseCard):
"""common class for FORCE, MOMENT"""
@classmethod
def export_to_hdf5(cls, h5_file, model, loads):
"""exports the loads in a vectorized way"""
#encoding = model._encoding
#comments = []
sid = []
node = []
cid = []
mag = []
xyz = []
for load in loads:
#comments.append(loads.comment)
sid.append(load.sid)
node.append(load.node)
cid.append(load.cid)
mag.append(load.mag)
xyz.append(load.xyz)
#h5_file.create_dataset('_comment', data=comments)
h5_file.create_dataset('sid', data=sid)
h5_file.create_dataset('node', data=node)
h5_file.create_dataset('cid', data=cid)
h5_file.create_dataset('mag', data=mag)
h5_file.create_dataset('xyz', data=xyz)
def __init__(self, sid, node, mag, xyz, cid=0, comment=''):
"""
Creates a FORCE/MOMENT card
Parameters
----------
sid : int
load id
node : int
the node to apply the load to
mag : float
the load's magnitude
xyz : (3, ) float ndarray
the load direction in the cid frame
cid : int; default=0
the coordinate system for the load
comment : str; default=''
a comment for the card
"""
BaseCard.__init__(self)
if comment:
self.comment = comment
self.sid = sid
self.node = node
self.cid = cid
self.mag = mag
self.xyz = np.asarray(xyz, dtype='float64')
assert self.xyz.size == 3, self.xyz.shape
assert isinstance(self.cid, integer_types), self.cid
self.node_ref = None
self.cid_ref = None
def validate(self):
assert isinstance(self.cid, integer_types), self.cid
assert isinstance(self.mag, float), self.mag
assert self.xyz.size == 3, self.xyz.shape
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a FORCE/MOMENT card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
sid = integer(card, 1, 'sid')
node = integer(card, 2, 'node')
cid = integer_or_blank(card, 3, 'cid', 0)
mag = double(card, 4, 'mag')
xyz = array([double_or_blank(card, 5, 'X1', 0.0),
double_or_blank(card, 6, 'X2', 0.0),
double_or_blank(card, 7, 'X3', 0.0)])
assert len(card) <= 8, 'len(%s card) = %i\ncard=%s' % (cls.type, len(card), card)
return cls(sid, node, mag, xyz, cid=cid, comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
"""
Adds a FORCE/MOMENT card from the OP2
Parameters
----------
data : List[varies]
a list of fields defined in OP2 format
comment : str; default=''
a comment for the card
"""
sid = data[0]
node = data[1]
cid = data[2]
mag = data[3]
xyz = array(data[4:7])
return cls(sid, node, mag, xyz, cid=cid, comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by %s sid=%s' % (self.type, self.sid)
self.node_ref = model.Node(self.node, msg=msg)
self.cid_ref = model.Coord(self.cid, msg=msg)
def safe_cross_reference(self, model: BDF, xref_errors, debug=True):
msg = ', which is required by %s sid=%s' % (self.type, self.sid)
# try:
self.node_ref = model.Node(self.node, msg=msg)
self.cid_ref = model.safe_coord(self.cid, self.sid, xref_errors, msg=msg)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.cid = self.Cid()
self.cid_ref = None
def get_loads(self):
return [self]
@property
def node_id(self):
if self.node_ref is not None:
return self.node_ref.nid
return self.node
def Cid(self):
if self.cid_ref is not None:
return self.cid_ref.cid
return self.cid
@property
def scaled_vector(self):
return self.xyz * self.mag
def to_global(self):
return self.cid_ref.transform_vector_to_global(self.scaled_vector)
#def to_local(self):
#return self.cd_ref.transform_vector_to_local(self.scaled_vector)
def raw_fields(self):
list_fields = [self.type, self.sid, self.node_id, self.Cid(),
self.mag] + list(self.xyz)
return list_fields
def repr_fields(self):
cid = set_blank_if_default(self.Cid(), 0)
list_fields = [self.type, self.sid, self.node_id, cid,
self.mag] + list(self.xyz)
return list_fields
class FORCE(Load0):
"""
Defines a static concentrated force at a grid point by specifying a
scale factor and a vector that determines the direction.
+-------+-----+------+-------+------+------+------+------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
+=======+=====+======+=======+======+======+======+======+
| FORCE | SID | NODE | CID | MAG | FX | FY | FZ |
+-------+-----+------+-------+------+------+------+------+
| FORCE | 3 | 1 | | 100. | 0. | 0. | 1. |
+-------+-----+------+-------+------+------+------+------+
"""
type = 'FORCE'
def __init__(self, sid, node, mag, xyz, cid=0, comment=''):
"""
Creates a FORCE card
Parameters
----------
sid : int
load id
node : int
the node to apply the load to
mag : float
the load's magnitude
xyz : (3, ) float ndarray
the load direction in the cid frame
cid : int; default=0
the coordinate system for the load
comment : str; default=''
a comment for the card
"""
Load0.__init__(self, sid, node, mag, xyz, cid=cid, comment=comment)
def write_card(self, size: int=8, is_double: bool=False) -> str:
if size == 8:
cids = set_string8_blank_if_default(self.Cid(), 0)
msg = 'FORCE %8i%8i%8s%8s%8s%8s%8s\n' % (
self.sid, self.node_id,
cids, print_float_8(self.mag), print_float_8(self.xyz[0]),
print_float_8(self.xyz[1]), print_float_8(self.xyz[2]))
else:
cids = set_string16_blank_if_default(self.Cid(), 0)
if is_double:
msg = ('FORCE* %16i%16i%16s%s\n'
'* %16s%16s%16s\n') % (
self.sid, self.node_id,
cids, print_scientific_double(self.mag),
print_scientific_double(self.xyz[0]),
print_scientific_double(self.xyz[1]),
print_scientific_double(self.xyz[2]))
else:
msg = ('FORCE* %16i%16i%16s%s\n'
'* %16s%16s%16s\n') % (
self.sid, self.node_id,
cids, print_float_16(self.mag), print_float_16(self.xyz[0]),
print_float_16(self.xyz[1]), print_float_16(self.xyz[2]))
return self.comment + msg
class Load1(BaseCard):
"""common class for FORCE1, MOMENT1"""
_properties = ['node_id', 'node_ids', 'scaled_vector']
@classmethod
def _init_from_empty(cls):
sid = 1
node = 1
mag = 1.
g1 = 2
g2 = 3
return cls(sid, node, mag, g1, g2, comment='')
def __init__(self, sid, node, mag, g1, g2, comment=''):
"""
Creates a FORCE1/MOMENT1 card
Parameters
----------
sid : int
load id
node : int
the node to apply the load to
mag : float
the load's magnitude
n1 / n2 : int / int
defines the load direction
n = n2 - n1
comment : str; default=''
a comment for the card
"""
BaseCard.__init__(self)
if comment:
self.comment = comment
self.sid = sid
self.node = node
self.mag = mag
self.g1 = g1
self.g2 = g2
self.node_ref = None
self.g1_ref = None
self.g2_ref = None
self.xyz = None
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a FORCE1/MOMENT1 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
sid = integer(card, 1, 'sid')
node = integer(card, 2, 'node')
mag = double(card, 3, 'mag')
g1 = integer(card, 4, 'g1')
g2 = integer(card, 5, 'g2')
assert len(card) == 6, 'len(%s card) = %i\ncard=%s' % (cls.type, len(card), card)
return cls(sid, node, mag, g1, g2, comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
"""
Adds a FORCE1/MOMENT1 card from the OP2
Parameters
----------
data : List[varies]
a list of fields defined in OP2 format
comment : str; default=''
a comment for the card
"""
sid = data[0]
node = data[1]
mag = data[2]
g1 = data[3]
g2 = data[4]
return cls(sid, node, mag, g1, g2, comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by %s sid=%s' % (self.type, self.sid)
self.node_ref = model.Node(self.node, msg=msg)
self.g1_ref = model.Node(self.g1, msg=msg)
self.g2_ref = model.Node(self.g2, msg=msg)
self.xyz = self.g2_ref.get_position() - self.g1_ref.get_position()
normalize(self)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.node = self.node_id
self.g1 = self.G1()
self.g2 = self.G2()
self.node_ref = None
self.g1_ref = None
self.g2_ref = None
def safe_cross_reference(self, model: BDF, safe_coord, debug=True):
""".. todo:: cross reference and fix repr function"""
return self.cross_reference(model)
#msg = ', which is required by FORCE1 sid=%s' % self.sid
#self.node_ref = model.Node(self.node, msg=msg)
#self.g1_ref = model.Node(self.g1, msg=msg)
#self.g2_ref = model.Node(self.g2, msg=msg)
#self.xyz = self.g2.get_position() - self.g1.get_position()
#normalize(self)
def get_loads(self):
return [self]
@property
def scaled_vector(self):
return self.xyz * self.mag
@property
def node_ids(self):
return [self.node_id, self.G1(), self.G2()]
def G1(self):
if self.g1_ref is not None:
return self.g1_ref.nid
return self.g1
def G2(self):
if self.g2_ref is not None:
return self.g2_ref.nid
return self.g2
@property
def node_id(self):
if self.node_ref is not None:
return self.node_ref.nid
return self.node
def to_global(self):
return self.scaled_vector
def raw_fields(self):
list_fields = [self.type, self.sid, self.node_id, self.mag, self.G1(), self.G2()]
return list_fields
def repr_fields(self):
return self.raw_fields()
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.raw_fields()
if size == 8:
return self.comment + print_card_8(card)
if is_double:
return self.comment + print_card_double(card)
return self.comment + print_card_16(card)
class FORCE1(Load1):
"""
Defines a static concentrated force at a grid point by specification of a
magnitude and two grid points that determine the direction.
+--------+-----+----+-------+----+----+
| 1 | 2 | 3 | 4 | 5 | 6 |
+========+=====+====+=======+====+====+
| FORCE1 | SID | G | F | G1 | G2 |
+--------+-----+----+-------+----+----+
| FORCE1 | 6 | 13 | -2.93 | 16 | 13 |
+--------+-----+----+-------+----+----+
"""
type = 'FORCE1'
def __init__(self, sid, node, mag, g1, g2, comment=''):
"""
Creates a FORCE1 card
Parameters
----------
sid : int
load id
node : int
the node to apply the load to
mag : float
the load's magnitude
n1 / n2 : int / int
defines the load direction
n = n2 - n1
comment : str; default=''
a comment for the card
"""
Load1.__init__(self, sid, node, mag, g1, g2, comment)
#Force.__init__(self)
class Load2(BaseCard):
"""common class for FORCE2, MOMENT2"""
_properties = ['node_id', 'node_ids', ]
@classmethod
def _init_from_empty(cls):
sid = 1
node = 1
mag = 1.
g1 = 2
g2 = 3
g3 = 4
g4 = 5
return cls(sid, node, mag, g1, g2, g3, g4, comment='')
def __init__(self, sid, node, mag, g1, g2, g3, g4, comment=''):
"""
Creates a FORCE2/MOMENT2 card
Parameters
----------
sid : int
load id
node : int
the node to apply the load to
mag : float
the load's magnitude
g1 / g2 / g3 / g4 : int / int / int / int
defines the load direction
n = (g2 - g1) x (g4 - g3)
comment : str; default=''
a comment for the card
"""
if comment:
self.comment = comment
self.sid = sid
self.node = node
self.mag = mag
self.g1 = g1
self.g2 = g2
self.g3 = g3
self.g4 = g4
self.node_ref = None
self.g1_ref = None
self.g2_ref = None
self.g3_ref = None
self.g4_ref = None
self.xyz = None
def validate(self):
assert isinstance(self.sid, integer_types), str(self)
assert self.g1 is not None, self.g1
assert self.g2 is not None, self.g2
assert self.g3 is not None, self.g3
assert self.g4 is not None, self.g3
assert self.g1 != self.g2, 'g1=%s g2=%s' % (self.g1, self.g2)
assert self.g3 != self.g4, 'g3=%s g4=%s' % (self.g3, self.g4)
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a FORCE2/MOMENT2 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
sid = integer(card, 1, 'sid')
node = integer(card, 2, 'node')
mag = double(card, 3, 'mag')
g1 = integer(card, 4, 'g1')
g2 = integer(card, 5, 'g2')
g3 = integer(card, 6, 'g3')
g4 = integer(card, 7, 'g4')
assert len(card) == 8, 'len(%s card) = %i\ncard=%s' % (cls.type, len(card), card)
return cls(sid, node, mag, g1, g2, g3, g4, comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
"""
Adds a FORCE2/MOMENT2 card from the OP2
Parameters
----------
data : List[varies]
a list of fields defined in OP2 format
comment : str; default=''
a comment for the card
"""
sid = data[0]
node = data[1]
mag = data[2]
g1 = data[3]
g2 = data[4]
g3 = data[5]
g4 = data[6]
return cls(sid, node, mag, g1, g2, g3, g4, comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by %s sid=%s' % (self.type, self.sid)
self.node_ref = model.Node(self.node, msg=msg)
self.g1_ref = model.Node(self.g1, msg=msg)
self.g2_ref = model.Node(self.g2, msg=msg)
self.g3_ref = model.Node(self.g3, msg=msg)
self.g4_ref = model.Node(self.g4, msg=msg)
xyz1 = self.g1_ref.get_position()
xyz2 = self.g2_ref.get_position()
xyz3 = self.g3_ref.get_position()
xyz4 = self.g4_ref.get_position()
v21 = xyz2 - xyz1
try:
v21 /= norm(v21)
except FloatingPointError:
msg = 'v1=v21=%s norm(v21)=%s\n' % (v21, norm(v21))
msg += 'g1.get_position()=%s\n' % xyz1
msg += 'g2.get_position()=%s' % xyz2
raise FloatingPointError(msg)
v2 = xyz4 - xyz3
try:
v2 /= norm(v2)
except FloatingPointError:
msg = 'v2=v43=%s norm(v43)=%s\n' % (v2, norm(v2))
msg += 'g3.get_position()=%s\n' % xyz3
msg += 'g4.get_position()=%s' % xyz4
raise FloatingPointError(msg)
xyz = cross(v21, v2)
self.xyz = xyz
msgi = 'xyz1=%s xyz2=%s xyz3=%s xyz4=%s\nv21=%s v43 (or v31)=%s\nxyz=%s' % (
xyz1, xyz2, xyz3, xyz4, v21, v2, self.xyz)
normalize(self, msgi)
def safe_cross_reference(self, model: BDF, safe_coord, debug=True):
""".. todo:: cross reference and fix repr function"""
msg = ', which is required by %s sid=%s' % (self.type, self.sid)
is_failed = False
try:
self.node_ref = model.Node(self.node, msg=msg)
except KeyError:
is_failed = True
model.log.warning('failed to cross-reference NODE=%i%s' % (self.node, msg))
try:
self.g1_ref = model.Node(self.g1, msg=msg)
xyz1 = self.g1_ref.get_position()
except KeyError:
is_failed = True
model.log.warning('failed to cross-reference G1=%i%s' % (self.g1, msg))
try:
self.g2_ref = model.Node(self.g2, msg=msg)
xyz2 = self.g2_ref.get_position()
except KeyError:
is_failed = True
model.log.warning('failed to cross-reference G2=%i%s' % (self.g2, msg))
try:
self.g3_ref = model.Node(self.g3, msg=msg)
xyz3 = self.g3_ref.get_position()
except KeyError:
is_failed = True
model.log.warning('failed to cross-reference G3=%i%s' % (self.g3, msg))
if not is_failed:
v21 = xyz2 - xyz1
if self.g4 is not None:
try:
self.g4_ref = model.Node(self.g4, msg=msg)
except KeyError:
is_failed = True
if not is_failed:
xyz4 = self.g4_ref.get_position()
model.log.warning('failed to cross-reference G4=%i%s' % (self.g4, msg))
else:
xyz3, xyz4 = xyz1, xyz3
if not is_failed:
v43 = xyz4 - xyz3
v2 = v43
try:
v21 /= norm(v21)
except FloatingPointError:
msg = 'v21=%s norm(v21)=%s\n' % (v21, norm(v21))
msg += 'g1.get_position()=%s\n' % xyz1
msg += 'g2.get_position()=%s' % xyz2
raise FloatingPointError(msg)
try:
v43 /= norm(v43)
except FloatingPointError:
msg = 'v43=%s norm(v43)=%s\n' % (v43, norm(v43))
msg += 'g3.get_position()=%s\n' % xyz3
msg += 'g4.get_position()=%s' % xyz4
raise FloatingPointError(msg)
self.xyz = cross(v21, v43)
#msgi = 'xyz1=%s xyz2=%s xyz3=%s xyz4=%s\nv21=%s v43 (or v31)=%s\nxyz=%s' % (
#xyz1, xyz2, xyz3, xyz4, v21, v2, self.xyz)
normalize(self, msg)
@property
def scaled_vector(self):
return self.xyz * self.mag
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.node = self.node_id
self.g1 = self.G1()
self.g2 = self.G2()
self.g3 = self.G3()
self.g4 = self.G4()
self.node_ref = None
self.g1_ref = None
self.g2_ref = None
self.g3_ref = None
self.g4_ref = None
self.xyz = None
def get_loads(self):
return [self]
@property
def node_id(self):
if self.node_ref is not None:
return self.node_ref.nid
return self.node
def G1(self):
if self.g1_ref is not None:
return self.g1_ref.nid
return self.g1
def G2(self):
if self.g2_ref is not None:
return self.g2_ref.nid
return self.g2
def G3(self):
if self.g3_ref is not None:
return self.g3_ref.nid
return self.g3
def G4(self):
if self.g4_ref is not None:
return self.g4_ref.nid
return self.g4
@property
def node_ids(self):
return [self.node_id, self.G1(), self.G2(), self.G3(), self.G4()]
def _node_ids(self, nodes=None):
"""returns nodeIDs for repr functions"""
if not nodes:
nodes = self.nodes
if isinstance(nodes[0], integer_types):
return nodes
return [node.nid for node in nodes]
def raw_fields(self):
(node, g1, g2, g3, g4) = self._node_ids([self.node, self.g1, self.g2, self.g3, self.g4])
list_fields = [self.type, self.sid, node, self.mag, g1, g2, g3, g4]
return list_fields
def repr_fields(self):
return self.raw_fields()
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.raw_fields()
if size == 8:
return self.comment + print_card_8(card)
if is_double:
return self.comment + print_card_double(card)
return self.comment + print_card_16(card)
#def get_reduced_loads(self, resolve_load_card=False, filter_zero_scale_factors=False):
#scale_factors = [1.]
#loads = self.F()
#return(scale_factors, loads)
class FORCE2(Load2):
"""
Defines a static concentrated force at a grid point by specification of a
magnitude and four grid points that determine the direction.
+--------+-----+---+---+----+----+----+----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
+========+=====+===+===+====+====+====+====+
| FORCE2 | SID | G | F | G1 | G2 | G3 | G4 |
+--------+-----+---+---+----+----+----+----+
"""
type = 'FORCE2'
_properties = ['scaled_vector', 'node_id', 'node_ids']
def __init__(self, sid, node, mag, g1, g2, g3, g4, comment=''):
Load2.__init__(self, sid, node, mag, g1, g2, g3, g4, comment)
class MOMENT(Load0):
"""
Defines a static concentrated moment at a grid point by specifying a
scale factor and a vector that determines the direction.
+--------+-----+---+-----+-----+-----+-----+-----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
+========+=====+===+=====+=====+=====+=====+=====+
| MOMENT | SID | G | CID | M | N1 | N2 | N3 |
+--------+-----+---+-----+-----+-----+-----+-----+
| MOMENT | 2 | 5 | 6 | 2.9 | 0.0 | 1.0 | 0.0 |
+--------+-----+---+-----+-----+-----+-----+-----+
"""
type = 'MOMENT'
def __init__(self, sid, node, mag, xyz, cid=0, comment=''):
"""
Creates a MOMENT card
Parameters
----------
sid : int
load id
node : int
the node to apply the load to
mag : float
the load's magnitude
xyz : (3, ) float ndarray
the load direction in the cid frame
cid : int; default=0
the coordinate system for the load
comment : str; default=''
a comment for the card
"""
Load0.__init__(self, sid, node, mag, xyz, cid=cid, comment=comment)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.node = self.node_id
self.cid = self.Cid()
self.node_ref = None
self.cid_ref = None
@property
def node_ids(self):
"""all the nodes referenced by the load"""
return [self.node_id]
@property
def node_id(self):
if self.node_ref is None:
return self.node
return self.node_ref.nid
def raw_fields(self):
list_fields = ['MOMENT', self.sid, self.node_id, self.Cid(),
self.mag] + list(self.xyz)
return list_fields
def repr_fields(self):
cid = set_blank_if_default(self.Cid(), 0)
list_fields = ['MOMENT', self.sid, self.node_id, cid,
self.mag] + list(self.xyz)
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
node_id = self.node_id
if max(self.sid, node_id) > MAX_INT:
size = 16
if size == 8:
scid = set_string8_blank_if_default(self.Cid(), 0)
msg = 'MOMENT %8i%8i%8s%8s%8s%8s%8s\n' % (
self.sid, node_id,
scid, print_float_8(self.mag), print_float_8(self.xyz[0]),
print_float_8(self.xyz[1]), print_float_8(self.xyz[2]))
else:
scid = set_string16_blank_if_default(self.Cid(), 0)
if is_double:
msg = ('MOMENT* %16i%16i%16s%s\n'
'* %16s%16s%16s\n') % (
self.sid, node_id,
scid, print_scientific_double(self.mag),
print_scientific_double(self.xyz[0]),
print_scientific_double(self.xyz[1]),
print_scientific_double(self.xyz[2]))
else:
msg = ('MOMENT* %16i%16i%16s%s\n'
'* %16s%16s%16s\n') % (
self.sid, node_id,
scid, print_float_16(self.mag), print_float_16(self.xyz[0]),
print_float_16(self.xyz[1]), print_float_16(self.xyz[2]))
return self.comment + msg
class MOMENT1(Load1):
"""
Defines a static concentrated moment at a grid point by specifying a
magnitude and two grid points that determine the direction.
+---------+-----+----+-------+----+----+
| 1 | 2 | 3 | 4 | 5 | 6 |
+=========+=====+====+=======+====+====+
| MOMENT1 | SID | G | M | G1 | G2 |
+---------+-----+----+-------+----+----+
| MOMENT1 | 6 | 13 | -2.93 | 16 | 13 |
+---------+-----+----+-------+----+----+
"""
type = 'MOMENT1'
def __init__(self, sid, node, mag, g1, g2, comment=''):
"""
Creates a MOMENT1 card
Parameters
----------
sid : int
load id
node : int
the node to apply the load to
mag : float
the load's magnitude
n1 / n2 : int / int
defines the load direction
n = n2 - n1
comment : str; default=''
a comment for the card
"""
Load1.__init__(self, sid, node, mag, g1, g2, comment)
#Moment.__init__(self)
class MOMENT2(Load2):
"""
Defines a static concentrated moment at a grid point by specification
of a magnitude and four grid points that determine the direction.
+---------+-----+---+---+----+----+----+----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
+=========+=====+===+===+====+====+====+====+
| MOMENT2 | SID | G | M | G1 | G2 | G3 | G4 |
+---------+-----+---+---+----+----+----+----+
"""
type = 'MOMENT2'
_properties = ['scaled_vector', 'node_id', 'node_ids']
def __init__(self, sid, node, mag, g1, g2, g3, g4, comment=''):
Load2.__init__(self, sid, node, mag, g1, g2, g3, g4, comment)
class PLOAD(Load):
"""
Static Pressure Load
Defines a uniform static pressure load on a triangular or quadrilateral surface
comprised of surface elements and/or the faces of solid elements.
+-------+-----+------+----+----+----+----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 |
+=======+=====+======+====+====+====+====+
| PLOAD | SID | P | G1 | G2 | G3 | G4 |
+-------+-----+------+----+----+----+----+
| PLOAD | 1 | -4.0 | 16 | 32 | 11 | |
+-------+-----+------+----+----+----+----+
"""
type = 'PLOAD'
_properties = ['node_ids', ]
@classmethod
def _init_from_empty(cls):
sid = 1
pressure = 1.
nodes = [1, 2, 3]
return PLOAD(sid, pressure, nodes, comment='')
def __init__(self, sid, pressure, nodes, comment=''):
"""
Creates a PLOAD card, which defines a uniform pressure load on a
shell/solid face or arbitrarily defined quad/tri face.
Parameters
----------
sid : int
load id
pressure : float
the pressure to apply
nodes : List[int]
The nodes that are used to define the normal are defined
using the same method as the CTRIA3/CQUAD4 normal.
n = 3 or 4
comment : str; default=''
a comment for the card
"""
if comment:
self.comment = comment
self.sid = sid
self.pressure = pressure
self.nodes = nodes
assert len(self.nodes) in [3, 4], 'nodes=%s' % self.nodes
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a PLOAD card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
sid = integer(card, 1, 'sid')
pressure = double(card, 2, 'pressure')
nodes = [integer(card, 3, 'n1'),
integer(card, 4, 'n2'),
integer(card, 5, 'n3')]
n4 = integer_or_blank(card, 6, 'n4', 0)
if n4:
nodes.append(n4)
assert len(card) <= 7, f'len(PLOAD card) = {len(card):d}\ncard={card}'
return PLOAD(sid, pressure, nodes, comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
"""
Adds a PLOAD card from the OP2
Parameters
----------
data : List[varies]
a list of fields defined in OP2 format
comment : str; default=''
a comment for the card
"""
sid = data[0]
pressure = data[1]
nodes = data[2:]
if nodes[-1] == 0:
nodes = list(nodes)
nodes.pop()
return PLOAD(sid, pressure, nodes, comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
pass
def safe_cross_reference(self, model: BDF, safe_coord):
return self.cross_reference(model)
@staticmethod
def uncross_reference() -> None:
"""Removes cross-reference links"""
pass
def get_loads(self):
return [self]
def raw_fields(self):
list_fields = ['PLOAD', self.sid, self.pressure] + self.node_ids
return list_fields
def repr_fields(self):
return self.raw_fields()
def write_card(self, size: int=8, is_double: bool=False) -> str:
"""
The writer method used by BDF.write_card()
Parameters
-----------
size : int; default=8
the size of the card (8/16)
"""
card = self.raw_fields()
if size == 8:
return self.comment + print_card_8(card)
if is_double:
return self.comment + print_card_double(card)
return self.comment + print_card_16(card)
class PLOAD1(Load):
"""
Applied Load on CBAR, CBEAM or CBEND Elements
Defines concentrated, uniformly distributed, or linearly distributed
applied loads to the CBAR or CBEAM elements at user-chosen points
along the axis. For the CBEND element, only distributed loads over
an entire length may be defined.
+--------+-----+------+------+-------+-----+-------+-----+-------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+========+=====+======+======+=======+=====+=======+=====+=======+
| PLOAD1 | SID | EID | TYPE | SCALE | X1 | P1 | X2 | P2 |
+--------+-----+------+------+-------+-----+-------+-----+-------+
| PLOAD1 | 25 | 1065 | MY | FRPR | 0.2 | 2.5E3 | 0.8 | 3.5E3 |
+--------+-----+------+------+-------+-----+-------+-----+-------+
"""
type = 'PLOAD1'
valid_types = ['FX', 'FY', 'FZ', 'FXE', 'FYE', 'FZE',
'MX', 'MY', 'MZ', 'MXE', 'MYE', 'MZE']
# LE: length-based; FR: fractional; PR:projected
valid_scales = ['LE', 'FR', 'LEPR', 'FRPR']
@classmethod
def _init_from_empty(cls):
sid = 1
eid = 1
load_type = 'FX'
scale = 1.
x1 = 0.5
p1 = 1.
return PLOAD1(sid, eid, load_type, scale, x1, p1, x2=None, p2=None, comment='')
def __init__(self, sid, eid, load_type, scale, x1, p1, x2=None, p2=None, comment=''):
"""
Creates a PLOAD1 card, which may be applied to a CBAR/CBEAM
Parameters
----------
sid : int
load id
eid : int
element to apply the load to
load_type : str
type of load that's applied
valid_types = {FX, FY, FZ, FXE, FYE, FZE,
MX, MY, MZ, MXE, MYE, MZE}
scale : str
Determines scale factor for X1, X2.
{LE, FR, LEPR, FRPR}
x1 / x2 : float / float
the starting/end position for the load application
the default for x2 is x1
p1 / p2 : float / float
the magnitude of the load at x1 and x2
the default for p2 is p1
comment : str; default=''
a comment for the card
Point Load : x1 == x2
Distributed Load : x1 != x2
"""
if comment:
self.comment = comment
if x2 is None:
x2 = x1
if p2 is None:
p2 = p1
self.sid = sid
self.eid = eid
self.load_type = load_type
self.scale = scale
self.x1 = x1
self.p1 = p1
self.x2 = x2
self.p2 = p2
self.eid_ref = None
@property
def Type(self):
return self.load_type
@Type.setter
def Type(self, load_type):
self.load_type = load_type
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a PLOAD1 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
sid = integer(card, 1, 'sid')
eid = integer(card, 2, 'eid')
load_type = string(card, 3, 'Type ("%s")' % '", "'.join(cls.valid_types))
scale = string(card, 4, 'scale ("%s")' % '", "'.join(cls.valid_scales))
x1 = double(card, 5, 'x1')
p1 = double(card, 6, 'p1')
x2 = double_or_blank(card, 7, 'x2', x1)
p2 = double_or_blank(card, 8, 'p2', p1)
assert len(card) <= 9, f'len(PLOAD1 card) = {len(card):d}\ncard={card}'
return PLOAD1(sid, eid, load_type, scale, x1, p1, x2, p2, comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
"""
Adds a PLOAD1 card from the OP2
Parameters
----------
data : List[varies]
a list of fields defined in OP2 format
comment : str; default=''
a comment for the card
"""
sid = data[0]
eid = data[1]
load_type = data[2]
scale = data[3]
x1 = data[4]
p1 = data[5]
x2 = data[6]
p2 = data[7]
load_type = cls.valid_types[load_type - 1]
scale = cls.valid_scales[scale - 1]
return PLOAD1(sid, eid, load_type, scale, x1, p1, x2, p2, comment=comment)
def validate(self):
if self.load_type not in self.valid_types:
msg = '%s is an invalid type on the PLOAD1 card; valid_types=[%s]' % (
self.load_type, ', '.join(self.valid_types).rstrip(', '))
raise RuntimeError(msg)
if self.scale not in self.valid_scales:
msg = '%s is an invalid scale on the PLOAD1 card; valid_scales=[%s]' % (
self.scale, ', '.join(self.valid_scales).rstrip(', '))
raise RuntimeError(msg)
assert 0.0 <= self.x1 <= self.x2, '0.0 <= x1 <= x2 -> x1=%s x2=%s' % (self.x1, self.x2)
if self.scale in ['FR', 'FRPR']:
assert self.x1 <= 1.0, 'x1=%r' % self.x1
assert self.x2 <= 1.0, 'x2=%r' % self.x2
if self.scale not in self.valid_scales:
msg = '%s is an invalid scale on the PLOAD1 card; valid_scales=[%s]' % (
self.scale, ', '.join(self.valid_scales))
raise RuntimeError(msg)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by PLOAD1 sid=%s' % self.sid
self.eid_ref = model.Element(self.eid, msg=msg)
def safe_cross_reference(self, model: BDF, safe_coord):
return self.cross_reference(model)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.eid = self.Eid()
self.eid_ref = None
def get_loads(self):
return [self]
def Eid(self):
if self.eid_ref is not None:
return self.eid_ref.eid
return self.eid
def raw_fields(self):
list_fields = ['PLOAD1', self.sid, self.Eid(), self.load_type, self.scale,
self.x1, self.p1, self.x2, self.p2]
return list_fields
def repr_fields(self):
return self.raw_fields()
def write_card(self, size: int=8, is_double: bool=False) -> str:
"""
The writer method used by BDF.write_card()
Parameters
-----------
size : int; default=8
the size of the card (8/16)
"""
card = self.raw_fields()
if size == 8:
return self.comment + print_card_8(card)
if is_double:
return self.comment + print_card_double(card)
return self.comment + print_card_16(card)
class PLOAD2(Load):
"""
+--------+-----+------+------+------+------+------+------+------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+========+=====+======+======+======+=============+======+======+
| PLOAD2 | SID | P | EID1 | EID2 | EID3 | EID4 | EID5 | EID6 |
+--------+-----+------+------+------+------+------+------+------+
| PLOAD2 | 21 | -3.6 | 4 | 16 | 2 | | | |
+--------+-----+------+------+------+------+------+------+------+
| PLOAD2 | SID | P | EID1 | THRU | EID2 | | | |
+--------+-----+------+------+------+------+------+------+------+
"""
type = 'PLOAD2'
_properties = ['element_ids', ]
@classmethod
def _init_from_empty(cls):
sid = 1
pressure = 1.
eids = [1, 2]
return PLOAD2(sid, pressure, eids, comment='')
def __init__(self, sid: int, pressure: float,
eids: List[int], comment: str=''):
"""
Creates a PLOAD2 card, which defines an applied load normal to the quad/tri face
Parameters
----------
sid : int
load id
pressure : float
the pressure to apply to the elements
eids : List[int]
the elements to apply pressure to
n < 6 or a continouus monotonic list of elements (e.g., [1, 2, ..., 1000])
comment : str; default=''
a comment for the card
"""
if comment:
self.comment = comment
if isinstance(eids, integer_types):
self.eids = [eids]
self.sid = sid
self.pressure = pressure
self.eids = eids
self.eids_ref = None
@classmethod
def add_card(cls, card, comment: str=''):
"""
Adds a PLOAD2 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
sid = integer(card, 1, 'sid')
pressure = double(card, 2, 'p')
if integer_string_or_blank(card, 4, 'THRU') == 'THRU':
e1 = integer(card, 3, 'Element1')
e2 = integer(card, 5, 'Element1')
eids = [i for i in range(e1, e2 + 1)]
assert len(card) == 6, f'len(PLOAD2 card) = {len(card):d}\ncard={card}'
else:
eids = fields(integer, card, 'eid', i=3, j=len(card))
assert len(eids) <= 6, f'A maximum of 6 eids may be on the PLOAD2; n={len(eids)}\ncard={card}'
return PLOAD2(sid, pressure, eids, comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
"""
Adds a PLOAD2 card from the OP2
Parameters
----------
data : List[varies]
a list of fields defined in OP2 format
comment : str; default=''
a comment for the card
"""
sid = data[0]
pressure = data[1]
eids = list(data[2:])
return PLOAD2(sid, pressure, eids, comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by PLOAD2 sid=%s' % self.sid
self.eids_ref = model.Elements(self.eids, msg=msg)
def safe_cross_reference(self, model: BDF, xref_errors: Dict[Any, Any]) -> None:
msg = ', which is required by PLOAD2 sid=%s' % self.sid
self.eids_ref = model.safe_elements(self.eids, self.sid, xref_errors, msg=msg)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.eids = self.element_ids
self.eids_ref = None
@property
def element_ids(self):
if self.eids_ref is not None:
eids = [elem.eid for elem in self.eids_ref]
else:
eids = self.eids
return self.eids
def get_loads(self):
return [self]
def raw_fields_separate(self, model: BDF) -> List[List[Any]]:
cards = []
for eid in self.element_ids:
if eid not in model.elements:
continue
list_fields = ['PLOAD2', self.sid, self.pressure, eid]
cards.append(list_fields)
return cards
def raw_fields(self) -> List[Any]:
list_fields = ['PLOAD2', self.sid, self.pressure]
eids = self.element_ids
if len(eids) <= 6:
list_fields += eids
else:
eids.sort()
delta_eid = eids[-1] - eids[0] + 1
if delta_eid != len(eids):
msg = 'eids=%s len(eids)=%s delta_eid=%s must be continuous' % (
eids, len(eids), delta_eid)
raise RuntimeError(msg)
#list_fields += eids
list_fields += [eids[0], 'THRU', eids[-1]]
return list_fields
def repr_fields(self) -> List[Any]:
return self.raw_fields()
def write_card_separate(self, model: BDF, size: int=8, is_double: bool=False) -> str:
cards = self.raw_fields_separate(model)
msg = self.comment
for card in cards:
if size == 8:
msg += print_card_8(card)
else:
if is_double:
msg += print_card_double(card)
else:
msg += print_card_16(card)
return msg
def write_card(self, size: int=8, is_double: bool=False) -> str:
"""
The writer method used by BDF.write_card()
Parameters
-----------
size : int; default=8
the size of the card (8/16)
"""
card = self.raw_fields()
if size == 8:
return self.comment + print_card_8(card)
if is_double:
return self.comment + print_card_double(card)
return self.comment + print_card_16(card)
#def PLOAD4_func(self, sid, eids, pressures,
#g1=None, g34=None, cid=0, nvector=None, surf_or_line='SURF',
#line_load_dir='NORM', comment=''):
#"""
#Creates a PLOAD4 card
#Solid Format
#============
#Defines a pressure load on a face of a CHEXA, CPENTA, or CTETRA element.
#+--------+-----+-----+----+----+------+------+------+-------+
#| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
#+========+=====+=====+====+====+======+======+======+=======+
#| PLOAD4 | SID | EID | P1 | P2 | P3 | P4 | G1 | G3/G4 |
#+--------+-----+-----+----+----+------+------+------+-------+
#| | CID | N1 | N2 | N3 | SORL | LDIR | | |
#+--------+-----+-----+----+----+------+------+------+-------+
#Shell Format
#============
#Defines a pressure load on a face of a CTRIA3, CTRIA6, CTRIAR,
#CQUAD4, CQUAD8, or CQUADR element.
#+--------+-----+-----+----+----+------+------+------+-------+
#| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
#+========+=====+=====+====+====+======+======+======+=======+
#| PLOAD4 | SID | EID | P1 | P2 | P3 | P4 | THRU | EID2 |
#+--------+-----+-----+----+----+------+------+------+-------+
#| | CID | N1 | N2 | N3 | SORL | LDIR | | |
#+--------+-----+-----+----+----+------+------+------+-------+
#.. warning:: NX does not support SORL and LDIR, MSC does
#"""
#if g34 is None:
#return PLOAD4Solid(
#sid, eids, pressures,
#g1=None, g34=None, cid=0, nvector=None, surf_or_line='SURF',
#line_load_dir='NORM', comment='')
#return PLOAD4Shell(
#sid, eids, pressures, cid=0, nvector=None, surf_or_line='SURF',
#line_load_dir='NORM', comment='')
#class PLOAD4Shell(PLOAD4):
#def __init__(self, sid, eids, pressures, g1=None, g34=None, cid=0,
#nvector=None, surf_or_line='SURF',
#line_load_dir='NORM', comment=''):
#PLOAD4.__init__(self, sid, eids, pressures, g1=None, g34=None,
#cid=0, nvector=None,
#surf_or_line='SURF',
#line_load_dir='NORM',
#comment='')
#class PLOAD4Shell(PLOAD4):
#def __init__(self, sid, eids, pressures, g1=None, g34=None, cid=0,
#nvector=None, surf_or_line='SURF',
#line_load_dir='NORM', comment=''):
#PLOAD4.__init__(self, sid, eids, pressures, g1=g1, g34=g34,
#cid=cid, nvector=nvector,
#surf_or_line=surf_or_line,
#line_load_dir=line_load_dir,
#comment=comment)
class PLOAD4(Load):
"""
``Solid Format``
Defines a pressure load on a face of a CHEXA, CPENTA, or CTETRA element.
+--------+-----+-----+----+----+------+------+------+-------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+========+=====+=====+====+====+======+======+======+=======+
| PLOAD4 | SID | EID | P1 | P2 | P3 | P4 | G1 | G3/G4 |
+--------+-----+-----+----+----+------+------+------+-------+
| | CID | N1 | N2 | N3 | SORL | LDIR | | |
+--------+-----+-----+----+----+------+------+------+-------+
``Shell Format``
Defines a pressure load on a face of a CTRIA3, CTRIA6, CTRIAR,
CQUAD4, CQUAD8, or CQUADR element.
+--------+-----+-----+----+----+------+------+------+-------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+========+=====+=====+====+====+======+======+======+=======+
| PLOAD4 | SID | EID | P1 | P2 | P3 | P4 | THRU | EID2 |
+--------+-----+-----+----+----+------+------+------+-------+
| | CID | N1 | N2 | N3 | SORL | LDIR | | |
+--------+-----+-----+----+----+------+------+------+-------+
.. warning:: NX does not support SORL and LDIR, MSC does
"""
type = 'PLOAD4'
_properties = ['node_ids', 'element_ids']
@classmethod
def _init_from_empty(cls):
sid = 1
eids = [1]
pressures = [1.]
g1 = None
g34 = None
return PLOAD4(sid, eids, pressures, g1, g34,
cid=0,
surf_or_line='SURF')
def __init__(self, sid, eids, pressures, g1, g34,
cid=0, nvector=None, surf_or_line='SURF',
line_load_dir='NORM', comment=''):
"""
Creates a PLOAD4 card
Parameters
----------
sid : int
the load id
eids : List[int, ...]
shells : the range of element ids; must be sequential
solids : must be length 1
pressures : List[float, float, float, float] / float
float : turned into a list of length 4
List[float] :
tri : must be length 4 (the last value should be the same as the 0th value)
quad : must be length 4
g1 : int/None
only used for solid elements
g34 : int / None
only used for solid elements
cid : int; default=0
the coordinate system for nvector
nvector : (3, ) float ndarray
blank : load acts normal to the face
float : the local pressure vector
surf_or_line : str; default='SURF'
SURF : surface load
LINE : line load (only defined for QUADR, TRIAR)
not supported
line_load_dir : str; default='NORM'
direction of the line load (see surf_or_line); {X, Y, Z, TANG, NORM}
not supported
comment : str; default=''
a comment for the card
TODO: fix the way "pressures" works
"""
if nvector is None:
nvector = np.zeros(3, dtype='float64')
else:
nvector = np.asarray(nvector, dtype='float64')
if comment:
self.comment = comment
if isinstance(eids, integer_types):
eids = [eids]
if isinstance(pressures, float_types):
pressures = [pressures] * 4
# TODO: handle default pressure as input
self.sid = sid
# these can be greater than 1 if it's a shell (not a solid)
self.eids = eids
self.pressures = np.asarray(pressures, dtype='float64')
if surf_or_line == 'SURF':
inan = np.isnan(self.pressures)
self.pressures[inan] = pressures[0]
#: used for solid element only
self.g1 = g1
#: g3/g4 - different depending on CHEXA/CPENTA or CTETRA
self.g34 = g34
#: Coordinate system identification number. See Remark 2.
#: (Integer >= 0;Default=0)
self.cid = cid
self.nvector = nvector
# flag with values of SURF/LINE
self.surf_or_line = surf_or_line
# Line load direction
#
# 1. X, Y, Z : line load in x/y/z in the element coordinate
# system
# 2. TANG : line load is tangent to the edge pointing
# from G1 to G2
# 3. NORM : line load is in the mean plane, normal to the
# edge and pointing outwards from the element
#
# if cid=N123 = 0: line_load_dir_default=NORM
self.line_load_dir = line_load_dir
#self.eid_ref = None
self.g1_ref = None
self.g34_ref = None
self.cid_ref = None
self.eids_ref = None
def validate(self):
if self.surf_or_line not in ['SURF', 'LINE']:
raise RuntimeError('PLOAD4; sid=%s surf_or_line=%r' % (self.sid, self.surf_or_line))
if self.line_load_dir not in ['LINE', 'X', 'Y', 'Z', 'TANG', 'NORM']:
raise RuntimeError(self.line_load_dir)
assert self.g1 != 0, str(self)
assert self.g34 != 0, str(self)
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a PLOAD4 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
sid = integer(card, 1, 'sid')
eid = integer(card, 2, 'eid')
p1 = double_or_blank(card, 3, 'p1', 0.0)
pressures = [
p1,
double_or_blank(card, 4, 'p2'),
double_or_blank(card, 5, 'p3'),
double_or_blank(card, 6, 'p4')]
eids = [eid]
g1_thru = integer_string_or_blank(card, 7, 'g1/THRU')
if g1_thru == 'THRU' and integer_or_blank(card, 8, 'eid2'):
# alternate form
eid2 = integer(card, 8, 'eid2')
if eid2:
eids = list(unique(
expand_thru([eid, 'THRU', eid2], set_fields=False, sort_fields=False)
))
g1 = None
g34 = None
else:
# standard form
eids = [eid]
g1 = integer_or_blank(card, 7, 'g1')
g34 = integer_or_blank(card, 8, 'g34')
# If both (CID, N1, n2, N3) and LDIR are blank, then the default is
# LDIR=NORM.
cid = integer_or_blank(card, 9, 'cid')
n1 = double_or_blank(card, 10, 'N1', 0.)
n2 = double_or_blank(card, 11, 'N2', 0.)
n3 = double_or_blank(card, 12, 'N3', 0.)
nvector = array([n1, n2, n3])
surf_or_line = string_or_blank(card, 13, 'sorl', 'SURF')
line_load_dir = string_or_blank(card, 14, 'ldir', 'NORM')
assert len(card) <= 15, f'len(PLOAD4 card) = {len(card):d}\ncard={card}'
return PLOAD4(sid, eids, pressures, g1, g34, cid, nvector,
surf_or_line, line_load_dir, comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
"""
Adds a PLOAD4 card from the OP2
Parameters
----------
data : List[varies]
a list of fields defined in OP2 format
comment : str; default=''
a comment for the card
"""
sid = data[0]
eid = data[1]
pressures = data[2]
g1 = data[3]
g34 = data[4]
if g1 == 0:
g1 = None
if g34 == 0:
g34 = None
cid = data[5]
nvector = data[6]
if cid == 0 and nvector == [0., 0., 0.]:
# these are apparently the secret defaults
# it just means to use the normal vector
cid = None
nvector = None
surf_or_line = data[7]
eids = [eid]
if data[7] is None:
surf_or_line = 'SURF'
assert data[8] is None, data
line_load_dir = 'NORM'
else:
surf_or_line = data[7]
line_load_dir = data[8]
pload4 = PLOAD4(sid, eids, pressures, g1, g34, cid, nvector,
surf_or_line, line_load_dir, comment=comment)
assert sid < 10000000, pload4
if cid is not None:
assert cid < 10000000, pload4
return pload4
def get_loads(self):
return [self]
def Cid(self):
"""gets the coordinate system object"""
if self.cid_ref is not None:
return self.cid_ref.cid
return self.cid
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by PLOAD4 sid=%s' % self.sid
if self.cid is not None:
self.cid_ref = model.Coord(self.cid, msg=msg)
if self.g1 is not None:
self.g1_ref = model.Node(self.g1, msg=msg + '; g1')
if self.g34 is not None:
self.g34_ref = model.Node(self.g34, msg=msg + '; g34')
if self.eids:
self.eids_ref = model.Elements(self.eids, msg=msg)
def safe_cross_reference(self, model: BDF, xref_errors, debug=True):
msg = ', which is required by PLOAD4 sid=%s' % self.sid
#self.eid = model.Element(self.eid, msg=msg)
if self.cid is not None:
self.cid_ref = model.safe_coord(self.cid, self.sid, xref_errors, msg=msg)
#self.eid_ref = self.eid
if self.g1 is not None:
try:
self.g1_ref = model.Node(self.g1, msg=msg)
except KeyError:
model.log.warning('Could not find g1=%s%s' % (self.g1, msg))
if self.g34 is not None:
try:
self.g34_ref = model.Node(self.g34, msg=msg)
except KeyError:
model.log.warning('Could not find g34=%s%s' % (self.g34, msg))
#if self.eids:
msgia = 'Could not find element=%%s%s\n' % msg
self.eids_ref, msgi = model.safe_get_elements(self.eids, msg=msgia)
if msgi:
model.log.warning(msgi.rstrip())
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.cid = self.Cid()
if self.g1 is not None:
self.g1 = self.G1()
if self.g34 is not None:
self.g34 = self.G34()
self.eids = self.element_ids
self.g1_ref = None
self.g34_ref = None
self.cid_ref = None
self.eids_ref = None
def G1(self):
if self.g1_ref is not None:
return self.g1_ref.nid
return self.g1
def G34(self):
if self.g34_ref is not None:
return self.g34_ref.nid
return self.g34
@property
def node_ids(self):
node_ids = [self.G1(), self.G34()]
return node_ids
def get_element_ids(self, eid=None):
if self.eids_ref is not None:
try:
eids = [eid_ref.eid for eid_ref in self.eids_ref]
except AttributeError:
eids = []
for eid_ref in self.eids_ref:
if isinstance(eid_ref, integer_types):
# Nastran is NOT OK with elements that don't actually exist in the PLOAD4
# we do this for safe_cross_reference
eids.append(eid)
else:
eids.append(eid_ref.eid)
else:
eids = self.eids
return eids
@property
def element_ids(self):
return self.get_element_ids()
def repr_fields(self):
eids = self.element_ids
eid = eids[0]
p1 = self.pressures[0]
p2 = set_blank_if_default(self.pressures[1], p1)
p3 = set_blank_if_default(self.pressures[2], p1)
p4 = set_blank_if_default(self.pressures[3], p1)
list_fields = ['PLOAD4', self.sid, eid, self.pressures[0], p2, p3, p4]
if self.g1 is not None:
# is it a SOLID element
node_ids = self.node_ids
#node_ids = self.node_ids([self.g1, self.g34])
list_fields += node_ids
else:
if len(eids) > 1:
try:
list_fields.append('THRU')
eidi = eids[-1]
except Exception:
print("g1 = %s" % self.g1)
print("g34 = %s" % self.g34)
print("self.eids = %s" % self.eids)
raise
list_fields.append(eidi)
else:
list_fields += [None, None]
#+--------+-----+-----+----+----+------+------+------+-------+
#| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
#+========+=====+=====+====+====+======+======+======+=======+
#| PLOAD4 | SID | EID | P1 | P2 | P3 | P4 | THRU | EID2 |
#+--------+-----+-----+----+----+------+------+------+-------+
#| | CID | N1 | N2 | N3 | SORL | LDIR | | |
#+--------+-----+-----+----+----+------+------+------+-------+
cid = self.Cid()
if cid is not None or np.abs(self.nvector).max() > 0.:
n1, n2, n3 = self.nvector
list_fields.append(cid)
list_fields += [n1, n2, n3]
surf_or_line = self.surf_or_line
line_load_dir = self.line_load_dir
else:
list_fields += [None, None, None, None]
surf_or_line = set_blank_if_default(self.surf_or_line, 'SURF')
line_load_dir = set_blank_if_default(self.line_load_dir, 'NORM')
list_fields.append(surf_or_line)
if surf_or_line == 'LINE':
list_fields.append(line_load_dir)
return list_fields
def raw_fields(self):
eids = self.element_ids
eid = eids[0]
p1 = self.pressures[0]
p2 = self.pressures[1]
p3 = self.pressures[2]
p4 = self.pressures[3]
list_fields = ['PLOAD4', self.sid, eid, p1, p2, p3, p4]
if self.g1 is not None:
# is it a SOLID element
node_ids = self.node_ids
#node_ids = self.node_ids([self.g1, self.g34])
list_fields += node_ids
else:
if len(eids) > 1:
try:
list_fields.append('THRU')
eidi = eids[-1]
except Exception:
print("g1 = %s" % self.g1)
print("g34 = %s" % self.g34)
print("self.eids = %s" % self.eids)
raise
list_fields.append(eidi)
else:
list_fields += [None, None]
cid = self.Cid()
if cid is not None or not np.all(np.isnan(self.nvector)):
n1 = self.nvector[0]
n2 = self.nvector[1]
n3 = self.nvector[2]
list_fields.append(cid)
list_fields += [n1, n2, n3]
else:
list_fields += [None, None, None, None]
surf_or_line = self.surf_or_line
line_load_dir = self.line_load_dir
list_fields.append(surf_or_line)
if surf_or_line == 'LINE':
list_fields.append(line_load_dir)
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
"""
The writer method used by BDF.write_card()
Parameters
-----------
size : int; default=8
the size of the card (8/16)
"""
card = self.repr_fields()
if size == 8:
return self.comment + print_card_8(card)
return self.comment + print_card_16(card)
def update_pload4_vector(pload4: PLOAD4, normal, cid: int):
"""helper method"""
if np.abs(pload4.nvector).max() == 0.:
# element surface normal
pass
else:
if np.abs(pload4.nvector).max() != 0.0 and cid in [0, None]:
normal = pload4.nvector / np.linalg.norm(pload4.nvector)
else:
raise NotImplementedError('cid=%r nvector=%s on a PLOAD4 is not supported\n%s' % (
cid, pload4.nvector, str(pload4)))
return normal
def normalize(self, msg: str=''):
"""
adjust the vector to a unit length
scale up the magnitude of the vector
"""
assert abs(self.mag) > 0, 'mag=%s\n%s' % (self.mag, self)
if abs(self.mag) != 0.0: # enforced displacement
norm_xyz = norm(self.xyz)
if norm_xyz == 0.0:
raise RuntimeError('xyz=%s norm_xyz=%s' % (self.xyz, norm_xyz))
#mag = self.mag * norm_xyz
#self.mag *= norm_xyz
try:
self.xyz = self.xyz / norm_xyz
except FloatingPointError:
msgi = 'xyz = %s\n' % self.xyz
msgi += 'norm_xyz = %s\n' % norm_xyz
msgi += 'card =\n%s' % str(self)
msgi += msg
raise FloatingPointError(msgi)
#def normalize(self):
#"""
#adjust the vector to a unit length
#scale up the magnitude of the vector
#"""
#if self.mag != 0.0: # enforced displacement
#norm_xyz = norm(self.xyz)
##mag = self.mag*norm_xyz
#self.mag *= norm_xyz
#self.xyz /= norm_xyz
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,631
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/converters/tecplot/tecplot.py
|
"""
models from:
http://people.sc.fsu.edu/~jburkardt/data/tec/tec.html
"""
import sys
import os
from struct import unpack
import itertools
from typing import List, Optional, Any
import numpy as np
from cpylog import SimpleLogger, get_logger2
from pyNastran.utils import is_binary_file, object_attributes, object_methods, object_stats
from pyNastran.converters.tecplot.zone import Zone, CaseInsensitiveDict, is_3d
class Base:
def object_attributes(obj: Any, mode: str='public',
keys_to_skip: Optional[List[str]]=None,
filter_properties: bool=False) -> List[str]:
"""
List the names of attributes of a class as strings. Returns public
attributes as default.
Parameters
----------
obj : instance
the object for checking
mode : str
defines what kind of attributes will be listed
* 'public' - names that do not begin with underscore
* 'private' - names that begin with single underscore
* 'both' - private and public
* 'all' - all attributes that are defined for the object
keys_to_skip : List[str]; default=None -> []
names to not consider to avoid deprecation warnings
filter_properties: bool: default=False
filters the @property objects
Returns
-------
attribute_names : List[str]
sorted list of the names of attributes of a given type or None
if the mode is wrong
"""
return object_attributes(obj,
mode=mode,
keys_to_skip=keys_to_skip,
filter_properties=filter_properties)
def object_methods(obj: Any, mode: str='public',
keys_to_skip: Optional[List[str]]=None) -> List[str]:
"""
List the names of methods of a class as strings. Returns public methods
as default.
Parameters
----------
obj : instance
the object for checking
mode : str
defines what kind of methods will be listed
* "public" - names that do not begin with underscore
* "private" - names that begin with single underscore
* "both" - private and public
* "all" - all methods that are defined for the object
keys_to_skip : List[str]; default=None -> []
names to not consider to avoid deprecation warnings
Returns
-------
method : List[str]
sorted list of the names of methods of a given type
or None if the mode is wrong
"""
return object_methods(obj,
mode=mode,
keys_to_skip=keys_to_skip)
def object_stats(obj: Any, mode: str='public',
keys_to_skip: Optional[List[str]]=None,
filter_properties: bool=False) -> str:
"""Prints out an easy to read summary of the object"""
return object_stats(obj,
mode=mode,
keys_to_skip=keys_to_skip,
filter_properties=filter_properties)
def read_tecplot(tecplot_filename: str, use_cols=None, dtype=None,
filetype='guess',
log=None, debug=False):
"""loads a tecplot file"""
tecplot = Tecplot(log=log, debug=debug)
if use_cols:
tecplot.use_cols = use_cols
tecplot.dtype = dtype
tecplot.read_tecplot(tecplot_filename, filetype)
return tecplot
class Tecplot(Base):
"""
Parses a hexa binary/ASCII Tecplot 360 file.
Writes an ASCII Tecplot 10 file.
Supports:
- title
- single zone only?
- unstructured:
- nodal results
- single element type; ZONETYPE = [FETRIANGLE, FEQUADRILATERAL, FETETRAHEDRON, FEBRICK]
- DATAPACKING = [POINT, ELEMENT] writing
- 2D/3D
- full support for writing
- structured:
- nodal results
- F=POINT
- 2d/3d (POINT, I/J/K)
- full support for writing
- no reshaping of xyz to make slicing easier!
Doesn't support:
- text
- geometry
- transient writing
- centroidal results
- non-sequential node ids
- data lists (100*0.0)
"""
def __repr__(self):
msg = ''
for zone in self.zones:
msg += str(zone)
return msg
def __init__(self, log=None, debug: bool=False):
# defines binary file specific features
self._endian = b'<'
self._n = 0
self.tecplot_filename = ''
self.log = get_logger2(log, debug=debug)
self.debug = debug
# mesh = None : model hasn't been read
self.is_mesh = None
self.title = 'tecplot geometry and solution file'
self.variables = None
self.zones = []
# mesh = True : this is a structured/unstructured grid
# mesh = False : this is a plot file
self.use_cols = None
# TODO: what is this for?
self.dtype = None
self._uendian = ''
self.n = 0
@property
def nzones(self):
"""gets the number of zones"""
return len(self.zones)
def read_tecplot(self, tecplot_filename: str, filetype: str='guess'):
"""
Reads an ASCII/binary Tecplot file.
The binary file reader must have ONLY CHEXAs and be Tecplot 360 with
`rho`, `u`, `v`, `w`, and `p`.
The ASCII file reader has only been tested with Tecplot 10, but will
probably work on Tecplot360. It **should** work with any set of
variables.
"""
filetype = filetype.lower()
assert filetype in ['guess', 'ascii', 'binary'], filetype
if filetype == 'binary' or (filetype == 'guess' and is_binary_file(tecplot_filename)):
return self.read_tecplot_binary(tecplot_filename)
return self.read_tecplot_ascii(tecplot_filename)
def read_tecplot_ascii(self, tecplot_filename, nnodes=None, nelements=None):
"""
Reads a Tecplot ASCII file.
Supports:
- CTRIA3
- CQUAD4
- CTETRA
- CHEXA
.. note :: assumes single typed results
.. warning:: BLOCK option doesn't work if line length isn't the same...
"""
self.tecplot_filename = tecplot_filename
assert os.path.exists(tecplot_filename), tecplot_filename
iline = 0
nnodes = -1
nelements = -1
with open(tecplot_filename, 'r') as tecplot_file:
lines = tecplot_file.readlines()
del tecplot_file
quads_list = []
hexas_list = []
tris_list = []
tets_list = []
xyz_list = []
results_list = []
line = lines[iline].strip()
iline += 1
iblock = 0
while 1:
#print('start...')
iline, title_line, header_lines, line = _read_header_lines(
lines, iline, line, self.log)
#print('header_lines', header_lines)
headers_dict = _header_lines_to_header_dict(title_line, header_lines,
self.variables, self.log)
if headers_dict is None:
break
zone = Zone(self.log)
zone.headers_dict = headers_dict
self.variables = headers_dict['VARIABLES']
#print('self.variables', self.variables)
#print(headers_dict.keys())
if 'ZONETYPE' in headers_dict:
zone_type = headers_dict['ZONETYPE'].upper() # FEBrick
data_packing = headers_dict['DATAPACKING'].upper() # block
iline = self._read_zonetype(
zone, zone_type, lines, iline, iblock, headers_dict, line,
nnodes, nelements,
xyz_list, hexas_list, tets_list, quads_list, tris_list,
results_list,
data_packing=data_packing)
elif 'F' in headers_dict:
fe = headers_dict['F'] # FEPoint
assert isinstance(fe, str), headers_dict
zone_type = fe.upper() # FEPoint
self.log.debug('zone_type = %r' % zone_type[0])
iline = self._read_zonetype(
zone, zone_type, lines, iline, iblock, headers_dict, line,
nnodes, nelements,
xyz_list, hexas_list, tets_list, quads_list, tris_list,
results_list,
fe=fe)
iline -= 1
elif (('ZONE' in headers_dict) and
(headers_dict['ZONE'] is None) and
('T' in headers_dict)):
lines2 = itertools.chain((line, ), iter(lines[iline:]))
A, line = self._read_table_from_lines(lines2, headers_dict)
self.A = A
self.log.debug(f'read_table; A.shape={A.shape}...')
return
else:
msg = 'Expected ZONETYPE, F, or "ZONE T"\n'
msg += 'headers=%s\n' % str(headers_dict)
msg += 'line = %r' % line.strip()
raise NotImplementedError(msg)
self.zones.append(zone)
#sline = line.split()
#print('stack...')
_stack(zone, xyz_list, quads_list, tris_list, tets_list, hexas_list, results_list, self.log)
#print(zone)
if line is None:
return
try:
line = lines[iline].strip()
except IndexError:
break
quads_list = []
hexas_list = []
tris_list = []
tets_list = []
xyz_list = []
results_list = []
def read_table(self, tecplot_file, unused_iblock, headers_dict, line):
"""
reads a space-separated tabular data block
"""
# add on the preceding line to the line "list"
# that's not a hack at all...
lines = itertools.chain((line, ), iter(tecplot_file))
A, blank = self._read_table_from_lines(lines, headers_dict)
return A, None
def _read_table_from_lines(self, lines, headers_dict):
variables = [var.strip('" ') for var in headers_dict['VARIABLES']]
#print('variables = %s' % variables)
#self.dtype[]
if self.use_cols is None:
use_cols = None
else:
use_cols = [variables.index(var) for var in self.use_cols]
A = np.loadtxt(lines, dtype=self.dtype, comments='#', delimiter=None,
converters=None, skiprows=0,
usecols=use_cols, unpack=False, ndmin=0)
return A, None
def _read_zonetype(self, zone, zone_type, lines, iline, iblock, headers_dict, line,
nnodes, nelements,
xyz_list, hexas_list, tets_list, quads_list, tris_list,
results_list,
data_packing=None, fe=None):
"""
Parameters
----------
zone_type : str
fe : str
- a zone_type.upper() string???
- FEPOINT
reads:
- ZONE E
- ZONE T
ZONE is a flag, T is title, E is number of elements
------------- --------- ---------- ----------------------------------------------
Parameter Ordered Finite Description
Data Element
------------- --------- ---------- ----------------------------------------------
T="title" Yes Yes Zone title.
I=imax Yes No Number of points in 1st dimension.
J=jmax Yes No Number of points in 2nd dimension.
K=kmax Yes No Number of points in 3rd dimension.
C=colour Yes Yes Colour from WHITE, BLACK, RED, GREEN,
BLUE, CYAN, YELLOW, PURPLE,
CUST1, CUST2,....CUST8.
F=format Yes Yes POINT or BLOCK for ordered data.
FEPOINT or FEBLOCK for finite element.
D=(list) Yes Yes A list of variable names to to include
from the last zone.
DT=(list) Yes Yes A list of datatypes for each variable.
SINGLE, DOUBLE, LONGINT, SHORTINT, BYTE, BIT.
N=num No Yes Number of nodes.
E=num No Yes Number of elements.
ET=type No Yes Element type from TRIANGLE, BRICK,
QUADRILATERAL, TETRAHEDRON.
NV=variable No Yes Variable for node value.
------------- --------- ---------- ----------------------------------------------
http://paulbourke.net/dataformats/tp/
"""
#print('self.variables', self.variables)
#ndim = zone.ndim
#print('iblock =', iblock)
if iblock == 0:
variables = headers_dict['VARIABLES']
zone.variables = [variable.strip(' \r\n\t"\'') for variable in variables]
self.log.debug('zone.variables = %s' % zone.variables)
nresults = len(variables) - 3 # x, y, z, rho, u, v, w, p
self.log.debug('nresults = %s' % nresults)
self.log.debug(str(headers_dict))
is_unstructured = False
is_structured = False
if zone_type in ['FETRIANGLE', 'FEQUADRILATERAL', 'FETETRAHEDRON', 'FEBRICK']:
nnodesi = headers_dict['N']
nelementsi = headers_dict['E']
is_unstructured = True
elif zone_type in ['POINT', 'BLOCK']: # structured
ni = headers_dict['I']
if 'J' in headers_dict:
nj = headers_dict['J']
if 'K' in headers_dict:
# 3d
nk = headers_dict['K']
nnodesi = ni * nj * nk
nelementsi = (ni - 1) * (nj - 1) * (nk - 1)
else:
# 2d
nnodesi = ni * nj
nelementsi = (ni - 1) * (nj - 1)
else:
assert 'K' not in headers_dict, list(headers_dict.keys())
nnodesi = ni
nelementsi = (ni - 1)
assert nelementsi >= 0, nelementsi
#nelementsi = 0
elements = None # np.zeros((nelementsi, 8), dtype='int32')
is_structured = True
else:
raise NotImplementedError('zone_type = %r' % zone_type)
self.log.info(f'zone_type={zone_type} data_packing={data_packing} '
f'nnodes={nnodesi} nelements={nelementsi}')
assert nnodesi > 0, nnodesi
assert nresults >= 0, 'nresults=%s' % nresults
xyz = np.zeros((nnodesi, 3), dtype='float32')
results = np.zeros((nnodesi, nresults), dtype='float32')
if zone_type == 'FEBRICK':
# hex
elements = np.zeros((nelementsi, 8), dtype='int32')
elif zone_type in ('FEPOINT', 'FEQUADRILATERAL', 'FETETRAHEDRON'):
# quads / tets
elements = np.zeros((nelementsi, 4), dtype='int32')
elif zone_type == 'FETRIANGLE':
# tris
elements = np.zeros((nelementsi, 3), dtype='int32')
#elif zone_type == 'FEBLOCK':
#pass
elif zone_type in ['POINT', 'BLOCK']:
# already handled
#print('data')
pass
else:
#if isinstance(zone_type, list):
#raise NotImplementedError(zone_type[0])
raise NotImplementedError(zone_type)
sline = split_line(line.strip())
if zone_type in ('FEBRICK', 'FETETRAHEDRON'):
if data_packing == 'POINT':
for inode in range(nnodesi):
if inode == 0:
self.log.debug('zone_type=%s sline=%s' %(zone_type, sline))
if not len(sline[3:]) == len(results[inode, :]):
msg = 'sline[3:]=%s results[inode, :]=%s' % (sline[:3], results[inode, :])
raise RuntimeError(msg)
try:
xyz[inode, :] = sline[:3]
results[inode, :] = sline[3:]
except ValueError:
msg = 'i=%s line=%r\n' % (inode, line)
msg += 'sline = %s' % str(sline)
print(msg)
raise
iline, line, sline = get_next_sline(lines, iline)
elif data_packing == 'BLOCK':
iline, line, sline = read_zone_block(lines, iline, xyz, results, nresults, zone_type,
sline, nnodesi, self.log)
#print('sline =', sline)
else:
raise NotImplementedError(data_packing)
elif zone_type in ('FEPOINT', 'FEQUADRILATERAL', 'FETRIANGLE'):
sline = split_line(line.strip())
for inode in range(nnodesi):
#print(iline, inode, sline)
xyz[inode, :] = sline[:3]
#if abs(xyz[inode, 1]) <= 5.0:
#msg = 'inode=%s xyz=%s' % (inode, xyz[inode, :])
#raise RuntimeError(msg)
results[inode, :] = sline[3:]
iline, line, sline = get_next_sline(lines, iline)
elif zone_type == 'POINT':
nvars = len(zone.variables)
iline, line, sline = read_point(lines, iline, xyz, results, zone_type,
line, sline, nnodesi, nvars, self.log)
elif zone_type == 'BLOCK':
nvars = len(zone.variables)
iline = read_block(lines, iline, xyz, results, zone_type,
line, sline, nnodesi, nvars, self.log)
else: # pragma: no cover
raise NotImplementedError(zone_type)
#print(elements.shape)
#print('xyz[0 , :]', xyz[0, :])
#print('xyz[-1, :]', xyz[-1, :])
#print(sline)
if is_structured:
pass
elif is_unstructured:
iline, line, sline = read_unstructured_elements(lines, iline, sline, elements, nelementsi)
#print(f.readline())
if zone_type == 'FEBRICK':
hexas_list.append(elements + nnodes)
elif zone_type == 'FETETRAHEDRON':
tets_list.append(elements + nnodes)
elif zone_type in ('FEPOINT', 'FEQUADRILATERAL'):
# TODO: why are points stuck in the quads?
quads_list.append(elements + nnodes)
elif zone_type == 'FETRIANGLE':
tris_list.append(elements + nnodes)
else:
raise NotImplementedError(zone_type)
else:
raise RuntimeError()
xyz_list.append(xyz)
results_list.append(results)
nnodes += nnodesi
nelements += nelementsi
self.log.debug('nnodes=%s nelements=%s (0-based)' % (nnodes, nelements))
del headers_dict
iblock += 1
if iblock == 10:
return
self.log.debug('final sline=%s' % sline)
return iline
def read_tecplot_binary(self, tecplot_filename, nnodes=None,
nelements=None):
"""
The binary file reader must have ONLY CHEXAs and be Tecplot 360
with:
`rho`, `u`, `v`, `w`, and `p`.
"""
self.tecplot_filename = tecplot_filename
assert os.path.exists(tecplot_filename), tecplot_filename
with open(tecplot_filename, 'rb') as tecplot_file:
self.f = tecplot_file
self._uendian = '<'
self.n = 0
self.variables = ['rho', 'u', 'v', 'w', 'p']
data = tecplot_file.read(8)
self.n += 8
word, = unpack(b'8s', data)
self.log.debug('word = %r' % word)
#self.show(100, endian='<')
# http://home.ustc.edu.cn/~cbq/360_data_format_guide.pdf
# page 151
if 1:
values = []
ii = 0
for ii in range(100):
datai = tecplot_file.read(4)
vali, = unpack(b'i', datai)
valf, = unpack(b'f', datai)
self.n += 4
values.append((vali, valf))
if vali == 9999:
#print('breaking...')
break
#for j, vals in enumerate(values):
#print(' ', j, vals)
assert ii < 100, ii
nbytes = 3 * 4
data = tecplot_file.read(nbytes)
self.n += nbytes
self.show_data(data, types='if', endian='<')
nbytes = 1 * 4
data = tecplot_file.read(nbytes)
self.n += nbytes
zone_type, = unpack(b'i', data)
self.log.debug('zone_type = %s' % zone_type)
self.show(100, types='if', endian='<')
nbytes = 11 * 4
data = tecplot_file.read(nbytes)
self.n += nbytes
#self.show_data(data, types='if', endian='<') # 'if'?
s = unpack('2f 9i', data)
self.log.debug(str(s))
#assert self.n == 360, self.n
#print('----------')
nbytes = 2 * 4
data = tecplot_file.read(nbytes)
self.n += nbytes
nnodes2, nelements2 = unpack('2i', data)
assert nnodes2 > 0, nnodes2
assert nelements2 > 0, nelements2
#self.show_data(data, types='if', endian='<') # 'if'?
if nnodes and nelements:
self.log.debug('nnodes=%s nelements=%s' % (nnodes, nelements))
self.log.debug('nnodes2=%s nelements2=%s' % (nnodes2, nelements2))
else:
nnodes = nnodes2
nelements = nelements2
self.log.info('nnodes=%s nelements=%s' % (nnodes, nelements))
assert nnodes == nnodes2
assert nelements == nelements2
#assert nnodes2 < 10000, nnodes
#assert nelements2 < 10000, nelements
nbytes = 35 * 4
data = tecplot_file.read(nbytes)
self.n += nbytes
#self.show_data(data, types='if', endian='<')
#print('----------')
nbytes = 30 * 4
data = tecplot_file.read(nbytes)
self.n += nbytes
#print('----------------------')
#self.show_data(data, types='if', endian='<')
#print('----------------------')
# 0 - ORDERED (meaning?)
# 1 - FELINESEG (meaning?)
# 2 - FETRIANGLE
# 3 - FEQUADRILATERAL
# 4 - FETETRAHEDRON
# 5 - FEBRICK
assert zone_type in [0, 1, 2, 3, 4, 5], zone_type
# p.93
# zone_title
# zone_type
# 0 = ORDERED
# 1 = FELINESEG
# 2 = FETRIANGLE
# 3 = FEQUADRILATERAL
# 4 = FETETRAHEDRON
# 5 = FEBRICK
# i_max_or_num_points
# j_max_or_num_elements
# k_max
# i_cell_max
# j_cell_max
# k_cell_max
# solution_time
# strand_id
# parent_zone
# is_block (0=POINT, 1=BLOCK)
# num_face_connections
# face_neighbor_mode
# passive_var_list
# value_location (0=cell-centered; 1=node-centered)
# share_var_from_zone
# share_connectivity_from_zone
# http://www.hgs.k12.va.us/tecplot/documentation/tp_data_format_guide.pdf
# 0=POINT
# 1=BLOCK
is_block = False
# value_location:
# 0=cell-centered
# 1=node-centered
#value_location = None
if is_block:
raise NotImplementedError('is_block=%s' % is_block)
else:
# is_point
#print('----------')
# the variables: [x, y, z]
nvars = 3
#nnodes = 3807
ni = nnodes * nvars
nbytes = ni * 4
#print('nbytes =', nbytes)
data = tecplot_file.read(nbytes)
self.n += nbytes
xyzvals = unpack('%if' % ni, data)
xyz = np.array(xyzvals, dtype='float32').reshape(3, nnodes).T
# the variables: [rho, u, v, w, p]
nvars = 5
dunno = 0 # what's with this...
ni = nnodes * nvars + dunno
nbytes = ni * 4
data = tecplot_file.read(nbytes)
self.n += nbytes
resvals = unpack('%if' % ni, data)
nodal_results = np.array(resvals, dtype='float32').reshape(nvars, nnodes).T
# 7443 elements
if zone_type == 5:
# CHEXA
nnodes_per_element = 8 # 8 nodes/elements
nvals = nnodes_per_element * nelements
#elif zone_type == 1:
#asdf
elif zone_type == 0:
# CQUAD4
nnodes_per_element = 4
nvals = nnodes_per_element * nelements
self.log.debug('nvals = %s' % nvals)
else:
raise NotImplementedError('zone_type=%s' % zone_type)
nbytes = nvals * 4
node_ids = unpack(b'%ii' % nvals, tecplot_file.read(nbytes))
self.n += nbytes
elements = np.array(node_ids).reshape(nelements, nnodes_per_element)
#print(elements)
#self.show_data(data, types='ifs', endian='<')
#print(vals)
#self.show(100, endian='<')
zone = Zone(self.log)
if zone_type == 5:
zone.hexa_elements = elements
elif zone_type == 0:
zone.quad_elements = elements
else:
raise NotImplementedError(zone_type)
del self.f
zone.xyz = xyz
zone.nodal_results = nodal_results
self.zones.append(zone)
#self.log.debug('done...')
def show(self, n, types='ifs', endian=None): # pragma: no cover
assert self.n == self.f.tell()
nints = n // 4
data = self.f.read(4 * nints)
strings, ints, floats = self.show_data(data, types=types, endian=endian)
self.f.seek(self.n)
return strings, ints, floats
def show_data(self, data, types='ifs', endian=None): # pragma: no cover
"""
Shows a data block as various types
Parameters
----------
data : bytes
the binary string bytes
types : str; default='ifs'
i - int
f - float
s - string
d - double (float64; 8 bytes)
q - long long (int64; 8 bytes)
l - long (int; 4 bytes)
I - unsigned int (int; 4 bytes)
L - unsigned long (int; 4 bytes)
Q - unsigned long long (int; 8 bytes)
endian : str; default=None -> auto determined somewhere else in the code
the big/little endian {>, <}
.. warning:: 's' is apparently not Python 3 friendly
"""
return self._write_data(sys.stdout, data, types=types, endian=endian)
def _write_data(self, f, data, types='ifs', endian=None): # pragma: no cover
"""
Useful function for seeing what's going on locally when debugging.
Parameters
----------
data : bytes
the binary string bytes
types : str; default='ifs'
i - int
f - float
s - string
d - double (float64; 8 bytes)
q - long long (int64; 8 bytes)
l - long (int; 4 bytes)
I - unsigned int (int; 4 bytes)
L - unsigned long (int; 4 bytes)
Q - unsigned long long (int; 8 bytes)
endian : str; default=None -> auto determined somewhere else in the code
the big/little endian {>, <}
"""
n = len(data)
nints = n // 4
ndoubles = n // 8
strings = None
ints = None
floats = None
longs = None
if endian is None:
endian = self._uendian
assert endian is not None, endian
f.write('\nndata = %s:\n' % n)
for typei in types:
assert typei in 'sifdq lIL', 'type=%r is invalid' % typei
if 's' in types:
strings = unpack('%s%is' % (endian, n), data)
f.write(" strings = %s\n" % str(strings))
if 'i' in types:
ints = unpack('%s%ii' % (endian, nints), data)
f.write(" ints = %s\n" % str(ints))
if 'f' in types:
floats = unpack('%s%if' % (endian, nints), data)
f.write(" floats = %s\n" % str(floats))
if 'd' in types:
doubles = unpack('%s%id' % (endian, ndoubles), data[:ndoubles*8])
f.write(" doubles (float64) = %s\n" % str(doubles))
if 'l' in types:
longs = unpack('%s%il' % (endian, nints), data)
f.write(" long = %s\n" % str(longs))
if 'I' in types:
ints2 = unpack('%s%iI' % (endian, nints), data)
f.write(" unsigned int = %s\n" % str(ints2))
if 'L' in types:
longs2 = unpack('%s%iL' % (endian, nints), data)
f.write(" unsigned long = %s\n" % str(longs2))
if 'q' in types:
longs = unpack('%s%iq' % (endian, ndoubles), data[:ndoubles*8])
f.write(" long long (int64) = %s\n" % str(longs))
f.write('\n')
return strings, ints, floats
def show_ndata(self, n, types='ifs'): # pragma: no cover
return self._write_ndata(sys.stdout, n, types=types)
def _write_ndata(self, f, n, types='ifs'): # pragma: no cover
"""
Useful function for seeing what's going on locally when debugging.
"""
nold = self.n
data = self.f.read(n)
self.n = nold
self.f.seek(self.n)
return self._write_data(f, data, types=types)
def slice_x(self, xslice):
"""TODO: doesn't remove unused nodes/renumber elements"""
zone = self.zones[0]
x = zone.xyz[:, 0]
self._slice_plane(zone, x, xslice)
def slice_y(self, yslice):
"""TODO: doesn't remove unused nodes/renumber elements"""
zone = self.zones[0]
y = zone.xyz[:, 1]
self._slice_plane(zone, y, yslice)
def slice_z(self, zslice):
"""TODO: doesn't remove unused nodes/renumber elements"""
zone = self.zones[0]
z = zone.xyz[:, 2]
self._slice_plane(zone, z, zslice)
def slice_xyz(self, xslice, yslice, zslice):
"""TODO: doesn't remove unused nodes/renumber elements"""
zone = self.zones[0]
x = zone.xyz[:, 0]
y = zone.xyz[:, 1]
z = zone.xyz[:, 2]
inodes = []
if xslice is not None:
xslice = float(xslice)
inodes.append(np.where(x < xslice)[0])
if yslice is not None:
yslice = float(yslice)
inodes.append(np.where(y < yslice)[0])
if zslice is not None:
zslice = float(zslice)
inodes.append(np.where(z < zslice)[0])
nodes = None
if len(inodes) == 1:
nodes = inodes[0]
elif len(inodes) == 2:
nodes = np.intersect1d(inodes[0], inodes[1], assume_unique=True)
elif len(inodes) == 3:
nodes = np.intersect1d(
np.intersect1d(inodes[0], inodes[1], assume_unique=True),
inodes[2], assume_unique=True)
#inodes = arange(self.nodes.shape[0])
# nodes = unique(hstack(inodes))
if nodes is not None:
zone._slice_plane_inodes(nodes)
def _slice_plane(self, zone, y, slice_value):
"""
- Only works for CHEXA
- Doesn't remove unused nodes/renumber elements
"""
slice_value = float(slice_value)
inodes = np.where(y < slice_value)[0]
zone._slice_plane_inodes(inodes)
def _get_write_header(self, res_types):
"""gets the tecplot header"""
is_y = True
is_z = True
#is_results = False
assert self.nzones >= 1, self.nzones
for zone in self.zones:
variables = zone.variables
is_y = 'Y' in zone.headers_dict['VARIABLES']
is_z = 'Z' in zone.headers_dict['VARIABLES']
is_results = bool(len(zone.nodal_results))
if res_types is None:
res_types = zone.variables
elif isinstance(res_types, str):
res_types = [res_types]
break
#"tecplot geometry and solution file"
title = self.title
if '"' in title or "'" in title:
msg = 'TITLE = %s\n' % self.title
else:
msg = 'TITLE = "%s"\n' % self.title
msg += 'VARIABLES = "X"\n'
if is_y:
msg += '"Y"\n'
if is_z:
msg += '"Z"\n'
result_indices_to_write = []
if is_results:
#msg += '"rho"\n'
#msg += '"u"\n'
#msg += '"v"\n'
#msg += '"w"\n'
#msg += '"p"\n'
#msg += 'ZONE T="%s"\n' % r'\"processor 1\"'
#print('res_types =', res_types)
#print('vars =', variables)
for ivar, var in enumerate(res_types):
if var not in variables:
raise RuntimeError('var=%r not in variables=%s' % (var, variables))
#print('adding %s' % var)
result_indices_to_write.append(variables.index(var))
ivars = np.unique(result_indices_to_write)
ivars.sort()
for ivar in ivars:
var = variables[ivar]
msg += '"%s"\n' % var
#print('ivars =', ivars)
else:
#if res_types is None:
assert len(res_types) == 0, len(res_types)
ivars = []
return msg, ivars
def write_tecplot(self, tecplot_filename, res_types=None, adjust_nids=True):
"""
Only handles single type writing
Parameters
----------
tecplot_filename : str
the path to the output file
res_types : str; List[str, str, ...]; default=None -> all
the results that will be written (must be consistent with
self.variables)
adjust_nids : bool; default=True
element_ids are 0-based in binary and must be switched to
1-based in ASCII
"""
self.log.info('writing tecplot %s' % tecplot_filename)
msg, ivars = self._get_write_header(res_types)
with open(tecplot_filename, 'w') as tecplot_file:
tecplot_file.write(msg)
for zone in self.zones:
nnodes = zone.nnodes
nelements = zone.nelements
(is_structured, is_unstructured, is_points, zone_type,
is_tris, is_quads, is_tets, is_hexas) = zone.determine_element_type()
#print(is_structured, is_unstructured, is_points, zone_type)
#print(is_tris, is_quads, is_tets, is_hexas)
if is_unstructured:
zone.write_unstructured_zone(tecplot_file, ivars, is_points, nnodes, nelements, zone_type, self.log,
is_tris, is_quads, is_tets, is_hexas, adjust_nids=adjust_nids)
elif is_structured:
zone.write_structured_zone(tecplot_file, ivars, self.log, zone.headers_dict, adjust_nids=adjust_nids)
else: # pragma: no cover
raise RuntimeError('only structured/unstructured')
#def skin_elements(self):
#sss
#return tris, quads
#def get_free_faces(self):
#"""get the free faces for hexa elements"""
#sss
#return free_faces
def extract_y_slice(self, y0, tol=0.01, slice_filename=None):
"""
doesn't work...
"""
self.log.info('slicing...')
zone = model.zones[0]
y = self.xyz[:, 1]
nodes = self.xyz
assert tol > 0.0, tol
elements = zone.hexa_elements
results = zone.nodal_results
iy = np.where((y0 - tol <= y) & (y <= y0 + tol))[0]
self.log.debug(y[iy])
self.log.debug(nodes[iy, 1].min(), nodes[iy, 1].max())
#iy = np.where(y <= y0 + tol)[0]
assert len(iy) > 0, iy
#inode = iy + 1
# find all elements that have iy within tolerance
#slots = np.where(elements == iy)
#slots = np.where(element for element in elements
#if any(iy in element))
#slots = where(iy == elements.ravel())[0]
ielements = np.unique([ie for ie, unused_elem in enumerate(elements)
for i in range(8)
if i in iy])
#print(slots)
#ri, ci = slots
#ri = unique(hstack([where(element == iy)[0] for element in elements]))
#ri = [ie for ie, element in enumerate(elements)
#if [n for n in element
#if n in iy]]
#ri = [np.where(element == iy)[0] for element in elements if np.where(element == iy)[0]]
#print(ri)
#ielements = np.unique(ri)
self.log.debug(ielements)
assert len(ielements) > 0, ielements
# find nodes
elements2 = elements[ielements, :]
inodes = np.unique(elements2)
assert len(inodes) > 0, inodes
# renumber the nodes
nidmap = {}
for inode, nid in enumerate(inodes):
nidmap[nid] = inode
elements3 = np.array(
[[nidmap[nid] for nid in element]
for element in elements2],
dtype='int32')
self.log.debug(inodes)
nodes2 = nodes[inodes, :]
nodal_results2 = results[inodes, :]
model = Tecplot()
zone = Zone(self.log)
zone.xyz = nodes2
zone.nodal_results = nodal_results2
zone.hexa_elements = elements3
model.zones = [zone]
if slice_filename:
model.write_tecplot(slice_filename)
return model
def split_headers(headers_in: str, log: SimpleLogger) -> List[str]:
log.debug(f'headers_in = {headers_in}')
#allowed_keys = ['TITLE', 'VARIABLES', 'T', 'ZONETYPE', 'DATAPACKING',
#'N', 'E', 'F', 'DT', 'SOLUTIONTIME', 'STRANDID',
#'I', 'J', 'K'
#]
#print(f'header1 = {headers_in}')
header = headers_in.replace('""', '","')
#print(f'header2 = {header}')
cheaders = header.split(',')
#print(header)
#print(cheaders)
#header = cheaders[0]
#headers = [header]
#i = 1
#while i < len(cheaders):
#headeri = cheaders[i]
#uheaderi = headeri.upper().replace(' ', '')
#is_key = [uheaderi.startswith(key+'=') for key in allowed_keys]
#if any(is_key):
#print('key!', headeri)
#header = headeri
#headers.append(header.lstrip())
#else:
#headers[-1] += ',' + headeri
#i += 1
#print('headers')
#for headeri in headers:
#print(' ', headeri)
#print(headers)
#print(header.replace('""', '","'))
#if ''
#headers = header.replace('""', '","').split(',')
return cheaders
def _join_headers(header_lines: List[str]) -> str:
"""smart join by commas"""
header = ','.join([headeri.strip(', ') for headeri in header_lines])
return header
def _header_lines_to_header_dict(title_line: str, header_lines: List[str],
variables: List[str], log: SimpleLogger):
"""parses the parsed header lines"""
#print('header_lines', header_lines)
#headers_dict = {}
headers_dict = CaseInsensitiveDict()
if title_line:
title_sline = title_line.split('=', 1)
title = title_sline[1]
else:
title = 'tecplot geometry and solution file'
headers_dict['TITLE'] = title
if len(header_lines) == 0:
#raise RuntimeError(header_lines)
return None
header = _join_headers(header_lines)
# this is so overly complicataed and probably not even enough...
# what about the following 'quote' style?
headers = split_headers(header, log)
#headers = header.replace('""', '","').split(',')
#TITLE = "Weights=1/6,6,1"
#Variables = "x","y","z","psi"
#Zone N = 125, E = 64, DATAPACKING = POINT, ZONETYPE = FEBRICK
nheaders = len(headers) - 1
for iheader, header in enumerate(headers):
header = header.strip()
#print(f'{iheader} {header!r}')
for iheader, header in enumerate(headers):
header = header.strip()
#print('%2i %s' % (iheader, header))
#print('iheader=%s header=%r' % (iheader, header))
if '=' in header:
sline = header.split('=', 1)
parse = False
#print('iheader=%s nheaders=%s' % (iheader, nheaders))
if iheader == nheaders:
parse = True
elif '=' in headers[iheader + 1]:
parse = True
elif header.upper() == 'ZONE':
# apparently the null key is also a thing...
# we'll use 'ZONE' because...
headers_dict['ZONE'] = None
parse = True
#continue
elif '"' in header:
sline += [header]
parse = False
if iheader == nheaders:
parse = True
elif '=' in headers[iheader + 1]:
parse = True
else:
raise NotImplementedError('header=%r headers=%r' % (header, headers))
if parse:
# ZONE T="FUSELAGE" I=21 J=49 K=1 F=BLOCK
#print(' parsing')
log.debug(f'sline = {sline}')
key = sline[0].strip().upper()
if key.startswith('ZONE '):
# the key is not "ZONE T" or "ZONE E"
# ZONE is a flag, T is title, E is number of elements
key = key[5:].strip()
value = [val.strip() for val in sline[1:]]
if len(value) == 1:
value = value[0].strip()
#assert not isinstance(value, list), value
headers_dict[key] = value
#print(' ', value)
#value = value.strip()
# 'T', 'ZONE T', ???
# 'DT', 'SOLUTIONTIME', 'STRANDID', # tecplot 360 specific things not supported
allowed_keys = ['VARIABLES', 'T', 'ZONETYPE', 'DATAPACKING', # 'TITLE',
'N', 'E', 'F', 'DT', 'SOLUTIONTIME', 'STRANDID',
'I', 'J', 'K']
assert key in allowed_keys, 'key=%r; allowed=[%s]' % (key, ', '.join(allowed_keys))
parse = False
#print('headers_dict', headers_dict)
#print(headers_dict.keys())
_simplify_header(headers_dict, variables)
assert len(headers_dict) > 0, headers_dict
return headers_dict
def _simplify_header(headers_dict, variables: List[str]) -> None:
"""cast the integer headers adn sets the variables"""
# unstructured
if 'N' in headers_dict: # nnodes
headers_dict['N'] = int(headers_dict['N'])
if 'E' in headers_dict: # nelements
headers_dict['E'] = int(headers_dict['E'])
# structured
if 'I' in headers_dict:
headers_dict['I'] = int(headers_dict['I'])
if 'J' in headers_dict:
headers_dict['J'] = int(headers_dict['J'])
if 'K' in headers_dict:
headers_dict['K'] = int(headers_dict['K'])
#print('_simplify_header', variables, headers_dict)
if 'TITLE' not in headers_dict:
headers_dict['TITLE'] = 'tecplot geometry and solution file'
if 'VARIABLES' in headers_dict and variables is None:
#print('VARIABLES' in headers_dict, variables is None)
_simplify_variables(headers_dict)
elif 'VARIABLES' in headers_dict:
_simplify_variables(headers_dict)
elif variables is not None:
headers_dict['VARIABLES'] = variables
else:
raise RuntimeError('no variables...')
def _simplify_variables(headers_dict) -> None:
variables = headers_dict['VARIABLES']
headers_dict['VARIABLES'] = [var.strip('"') for var in variables]
def _stack(zone, xyz_list, quads_list, tris_list, tets_list, hexas_list, results_list, log):
"""
elements are read as a list of lines, so we need to stack them
and cast them while we're at it.
"""
log.debug('stacking elements')
if len(hexas_list):
zone.hexa_elements = np.vstack(hexas_list)
if len(tets_list):
zone.tet_elements = np.vstack(tets_list)
if len(quads_list):
zone.quad_elements = np.vstack(quads_list)
if len(tris_list):
zone.tri_elements = np.vstack(tris_list)
log.debug('stacking nodes')
if len(xyz_list) == 1:
xyz = xyz_list[0]
else:
xyz = np.vstack(xyz_list)
#self.elements = elements - 1
#print(self.elements)
if is_3d(zone.headers_dict):
zone.xyz = xyz
nresults = len(results_list)
if nresults == 1:
results = results_list[0]
else:
results = np.vstack(results_list)
zone.nodal_results = results
else:
zone.xy = xyz[:, :2]
nresults = len(results_list) + 1
nnodes_temp = xyz.shape[0]
if nresults == 1:
zone.nodal_results = xyz[:, 2].reshape(nnodes_temp, 1)
else:
inputs = [xyz[:, 2].reshape(nnodes_temp, 1), *results_list]
zone.nodal_results = np.hstack(inputs)
del nnodes_temp
zone.variables = [var for var in zone.variables if var not in ['X', 'Y', 'Z']]
def read_zone_block(lines, iline, xyz, results, nresults, zone_type,
sline, nnodes, log):
"""a zone can be structured or unstructred"""
#print('***', iline, sline)
# read all data
#result = sline
#iresult = len(sline)
#nresult = len(sline)
result = []
iresult = 0
nresult = 0
nnodes_max = (3 + nresults) * nnodes
#print('nnodes_max =', nnodes_max)
while nresult < nnodes_max: # changed from iresult to nresult
#print('zb', iline, sline, len(sline))
result += sline
nresult += len(sline)
if iresult >= nnodes_max:
log.debug('breaking...')
#break
iline, line, sline = get_next_sline(lines, iline)
if iresult == 0:
log.debug('zone_type=%s sline=%s' % (zone_type, sline))
iresult += len(sline)
#print('len', iresult, nresult, len(result))
#print(result, len(result))
for i, value in enumerate(result):
assert '.' in value, 'i=%i value=%s' % (i, value)
assert len(result) == nnodes_max, 'len(result)=%s expected=%s' % (len(result), nnodes_max)
#-----------------
# pack data
for ires in range(3 + nresults):
i0 = ires * nnodes
i1 = (ires + 1) * nnodes #+ 1
if len(result[i0:i1]) != nnodes:
msg = 'ires=%s len=%s nnodes=%s' % (
ires, len(result[i0:i1]), nnodes)
raise RuntimeError(msg)
if ires in [0, 1, 2]:
log.debug('ires=%s nnodes=%s len(result)=%s' % (ires, nnodes, len(result)))
xyz[:, ires] = result[i0:i1]
else:
results[:, ires - 3] = result[i0:i1]
# setup
#iline, line, sline = get_next_sline(lines, iline)
return iline, line, sline
def read_unstructured_elements(lines, iline, sline, elements, nelements):
assert '.' not in sline[0], sline
i = 0
#print('nelements =', nelements)
for i in range(nelements):
#print(iline, i, sline)
try:
elements[i, :] = sline
except IndexError:
raise RuntimeError('i=%s sline=%s' % (i, str(sline)))
except ValueError:
raise RuntimeError('i=%s sline=%s' % (i, str(sline)))
iline, line, sline = get_next_sline(lines, iline)
#line = lines.readline()
#iline += 1
#sline = line.strip().split()
return iline, line, sline
def read_point(lines, iline, xyz, results, zone_type, line, sline, nnodes, nvars, log):
"""a POINT grid is a structured grid"""
log.debug(f'start of POINT (structured); nnodes={nnodes} nvars={nvars} zone_type={zone_type}')
for inode in range(nnodes):
iline, sline = get_next_nsline(lines, iline, sline, nvars)
#print(iline, inode, sline)
#if inode == 0:
#log.debug('zone_type=%s sline=%s' %(zone_type, sline))
if not len(sline[3:]) == len(results[inode, :]):
msg = 'sline[3:]=%s results[inode, :]=%s' % (sline[:3], results[inode, :])
raise RuntimeError(msg)
try:
xyz[inode, :] = sline[:3]
results[inode, :] = sline[3:]
except ValueError:
msg = 'i=%s line=%r\n' % (inode, line)
msg += 'sline = %s' % str(sline)
print(msg)
raise
iline, line, sline = get_next_sline(lines, iline)
#log.debug(sline)
log.debug('end of POINT')
return iline, line, sline
def read_block(lines, iline, xyz, results, zone_type, line, sline, nnodes, nvars, log):
"""
BLOCK format is similar to PLOT3D in that you read all the X values before the Ys,
Zs, and results. The alternative format is POINT, which reads them on a per node
basis.
"""
log.debug('start of BLOCK')
#print('nnodes =', nnodes)
#print('nvars =', nvars)
ndata = nnodes * nvars
#print('ndata =', ndata)
results = []
while len(results) < ndata:
sline = split_line(line)
results += sline
#print('block:', iline, sline, len(results))
if len(sline) == 0:
raise
iline, line, sline = get_next_sline(lines, iline)
#log.debug(sline)
#print(len(results))
assert len(results) == ndata, 'len(results)=%s expected=%s' % (len(results), ndata)
log.debug('end of BLOCK')
#TODO: save results
raise RuntimeError('not done...save results')
return iline
def get_next_line(lines, iline):
"""Read the next line from the file. Handles comments."""
try:
line = lines[iline].strip()
except IndexError:
line = None
return iline, line
iline += 1
igap = 0
ngap_max = 10
while len(line) == 0 or line[0] == '#':
try:
line = lines[iline].strip()
except IndexError:
line = None
return iline, line
iline += 1
if igap > ngap_max:
break
igap += 1
return iline, line
def split_line(line):
"""splits a comma or space separated line"""
if ',' in line:
line2 = line.replace(',', ' ')
sline = line2.split()
else:
sline = line.split()
return sline
def get_next_sline(lines, iline):
"""Read the next split line from the file. Handles comments."""
iline, line = get_next_line(lines, iline)
if line is None:
return iline, None, None
sline = split_line(line)
return iline, line, sline
def get_next_nsline(lines, iline, sline, nvars):
#print(iline, sline)
while len(sline) != nvars: # long line was split
#print(sline, nvars)
iline, line, slinei = get_next_sline(lines, iline)
#print(iline, line, slinei, nvars)
assert len(slinei) > 0, slinei
sline += slinei
#print(sline, '\n')
#iline += 1
assert len(sline) == nvars, 'iline=%i sline=%s nvars=%s' % (iline, sline, nvars)
return iline, sline
def _read_header_lines(lines, iline, line, log):
"""
reads a tecplot header
Examples
--------
**Example 1**
TITLE = "tecplot geometry and solution file"
VARIABLES = "x"
"y"
"z"
"rho"
"u"
"v"
"w"
"p"
ZONE T="\"processor 1\""
n=522437, e=1000503, ZONETYPE=FEBrick
DATAPACKING=BLOCK
**Example 2**
title="Force and Momment Data for forces"
variables="Iteration"
"C_L","C_D","C_M_x","C_M_y","C_M_z""C_x","C_y","C_z","C_Lp","C_Dp", "C_Lv", "C_Dv""C_M_xp"
"C_M_yp","C_M_zp","C_M_xv","C_M_yv""C_M_zv","C_xp","C_yp","C_zp","C_xv","C_yv""C_zv
"Mass flow","<greek>r</greek>","u"
"p/p<sub>0</sub>","T","p<sub>t</sub>/p<sub>0</sub>"
"T<sub>t</sub>","Mach"
"Simulation Time"
zone,t="forces"
"""
i = 0
title_line = ''
#variables_line = ''
active_key = None
vars_found = []
header_lines = []
#print('-----------------------------')
#for iii, linei in enumerate(lines):
#if iii > 10:
#break
#print(linei)
#print('-----------------------------')
while i < 30:
#print(iline, i, line.strip())
#self.n = 0
if len(line) == 0 or line[0] == '#':
line = lines[iline].strip()
iline += 1
i += 1
continue
if line[0].isdigit() or line[0] == '-':
#print(line)
log.debug('breaking after finding header lines...')
break
uline = line.upper()
uline2 = uline.replace(' ', '')
if 'TITLE=' in uline2:
title_line += line
vars_found.append('TITLE')
active_key = 'TITLE'
elif 'VARIABLES' in uline2:
vars_found.append('VARIABLES')
#variables_line += line
active_key = 'VARIABLES'
else:
#if 'ZONE T' in line:
#vars_found.append('ZONE T')
if 'ZONE' in uline2:
vars_found.append('ZONE')
active_key = 'ZONE'
#if 'ZONE N' in uline:
#vars_found.append('N')
if 'ZONETYPE' in uline2:
vars_found.append('ZONETYPE')
active_key = 'ZONE'
if 'DATAPACKING' in uline2:
vars_found.append('DATAPACKING')
active_key = 'ZONE'
#print(active_key, line)
if active_key in ['ZONE', 'VARIABLES']:
header_lines.append(line.strip())
#if len(vars_found) == 5:
#break
#if active_key
i += 1
line = lines[iline].strip()
iline += 1
log.debug('vars_found = %s' % vars_found)
#print('header_lines', header_lines)
#print("title = %r" % title_line)
#print("variables_line = %r" % variables_line)
return iline, title_line, header_lines, line
def main(): # pragma: no cover
#plt = Tecplot()
fnames = os.listdir(r'Z:\Temporary_Transfers\steve\output\time20000')
#datai = [
#'n=3807, e=7443',
#'n=3633, e=7106',
#'n=3847, e=7332',
#'n=3873, e=6947',
#'n=4594, e=8131',
#'n=4341, e=7160',
#'n=4116, e=8061',
#'n=4441, e=8105',
#'n=4141, e=8126',
#'n=4085, e=8053',
#'n=4047, e=8215',
#'n=4143, e=8123',
#'n=4242, e=7758',
#'n=3830, e=7535',
#'n=3847, e=7936',
#'n=3981, e=7807',
#'n=3688, e=7415',
#'n=4222, e=8073',
#'n=4164, e=7327',
#'n=3845, e=8354',
#'n=4037, e=6786',
#'n=3941, e=8942',
#'n=4069, e=7345',
#'n=4443, e=8001',
#'n=3895, e=7459',
#'n=4145, e=7754',
#'n=4224, e=8152',
#'n=4172, e=7878',
#'n=4138, e=8864',
#'n=3801, e=7431',
#'n=3984, e=6992',
#'n=4195, e=7967',
#'n=4132, e=7992',
#'n=4259, e=7396',
#'n=4118, e=7520',
#'n=4176, e=7933',
#'n=4047, e=8098',
#'n=4064, e=8540',
#'n=4144, e=8402',
#'n=4144, e=7979',
#'n=3991, e=6984',
#'n=4080, e=8465',
#'n=3900, e=7981',
#'n=3709, e=8838',
#'n=4693, e=8055',
#'n=4022, e=7240',
#'n=4028, e=8227',
#'n=3780, e=7551',
#'n=3993, e=8671',
#'n=4241, e=7277',
#'n=4084, e=6495',
#'n=4103, e=8165',
#'n=4496, e=5967',
#'n=3548, e=8561',
#'n=4143, e=7749',
#'n=4136, e=8358',
#'n=4096, e=7319',
#'n=4209, e=8036',
#'n=3885, e=7814',
#'n=3800, e=8232',
#'n=3841, e=7837',
#'n=3874, e=7571',
#'n=3887, e=8079',
#'n=3980, e=7834',
#'n=3763, e=7039',
#'n=4287, e=7130',
#'n=4110, e=8336',
#'n=3958, e=7195',
#'n=4730, e=7628',
#'n=4087, e=8149',
#'n=4045, e=8561',
#'n=3960, e=7320',
#'n=3901, e=8286',
#'n=4065, e=7013',
#'n=4160, e=7906',
#'n=3628, e=7140',
#'n=4256, e=8168',
#'n=3972, e=8296',
#'n=3661, e=7879',
#'n=3922, e=8093',
#'n=3972, e=6997',
#'n=3884, e=7603',
#'n=3609, e=6856',
#'n=4168, e=7147',
#'n=4206, e=8232',
#'n=4631, e=8222',
#'n=3970, e=7569',
#'n=3998, e=7617',
#'n=3855, e=7971',
#'n=4092, e=7486',
#'n=4407, e=7847',
#'n=3976, e=7627',
#'n=3911, e=8483',
#'n=4144, e=7919',
#'n=4033, e=8129',
#'n=3976, e=7495',
#'n=3912, e=7739',
#'n=4278, e=8522',
#'n=4703, e=8186',
#'n=4230, e=7811',
#'n=3971, e=7699',
#'n=4081, e=8242',
#'n=4045, e=7524',
#'n=4532, e=5728',
#'n=4299, e=8560',
#'n=3885, e=7531',
#'n=4452, e=8405',
#'n=4090, e=7661',
#'n=3937, e=7739',
#'n=4336, e=7612',
#'n=4101, e=7461',
#'n=3980, e=8632',
#'n=4523, e=7761',
#'n=4237, e=8463',
#'n=4013, e=7856',
#'n=4219, e=8013',
#'n=4248, e=8328',
#'n=4529, e=8757',
#'n=4109, e=7496',
#'n=3969, e=8026',
#'n=4093, e=8506',
#'n=3635, e=7965',
#'n=4347, e=8123',
#'n=4703, e=7752',
#'n=3867, e=8124',
#'n=3930, e=7919',
#'n=4247, e=7154',
#'n=4065, e=8125',
#]
fnames = [os.path.join(r'Z:\Temporary_Transfers\steve\output\time20000', fname)
for fname in fnames]
tecplot_filename_out = None
#tecplot_filename_out = 'tecplot_joined.plt'
from pyNastran.converters.tecplot.utils import merge_tecplot_files
model = merge_tecplot_files(fnames, tecplot_filename_out)
y0 = 0.0
model.extract_y_slice(y0, tol=0.014, slice_filename='slice.plt')
return
#for iprocessor, fname in enumerate(fnames):
#nnodes, nelements = datai[iprocessor].split(',')
#nnodes = int(nnodes.split('=')[1])
#nelements = int(nelements.split('=')[1])
#ip = iprocessor + 1
#tecplot_filename = 'model_final_meters_part%i_tec_volume_timestep20000.plt' % ip
#print(tecplot_filename)
#try:
#plt.read_tecplot_binary(tecplot_filename, nnodes=nnodes, nelements=nelements)
#plt.write_tecplot('processor%i.plt' % ip)
#except Exception:
#raise
##break
def main2(): # pragma: no cover
"""tests slicing"""
plt = Tecplot()
#fnames = os.listdir(r'Z:\Temporary_Transfers\steve\output\time20000')
#fnames = [os.path.join(r'Z:\Temporary_Transfers\steve\output\time20000', fname)
# for fname in fnames]
fnames = ['slice.plt']
# tecplot_filename_out = None
#tecplot_filename_out = 'tecplot_joined.plt'
#model = merge_tecplot_files(fnames, tecplot_filename_out)
for iprocessor, tecplot_filename in enumerate(fnames):
plt.read_tecplot(tecplot_filename)
plt.write_tecplot('processor_%i.plt' % iprocessor)
if __name__ == '__main__': # pragma: no cover
main()
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,632
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/bdf/bdf_interface/hdf5_loader.py
|
"""Defines various helper functions for loading a HDF5 BDF file"""
from __future__ import annotations
from itertools import count
from typing import TYPE_CHECKING
import numpy as np
import h5py
from pyNastran.bdf.bdf import DMIAX, MDLPRM
from pyNastran.utils.dict_to_h5py import _cast, _cast_array, cast_string, cast_strings
from pyNastran.bdf.bdf_interface.encoding import decode_lines
from pyNastran.bdf.case_control_deck import CaseControlDeck
from pyNastran.bdf.bdf_interface.add_card import CARD_MAP
from pyNastran.bdf.bdf_interface.hdf5_exporter import (
dict_int_obj_attrs, scalar_obj_keys, LIST_OBJ_KEYS)
if TYPE_CHECKING: # pragma: no cover
from pyNastran.bdf.bdf import BDF
dict_attrs = [
# required
'params',
# removed
#'_solmap_to_value',
#'card_count',
#'_card_parser',
#'_card_parser_prepare',
#'_slot_to_type_map',
#'_type_to_id_map',
#'_type_to_slot_map',
]
def load_bdf_from_hdf5_file(h5_file, model):
"""
Loads an h5 file object into an OP2 object
Parameters
----------
h5_file : H5File()
an h5py file object
model : BDF()
the BDF file to put the data into
"""
encoding = cast_string(h5_file['minor_attributes']['encoding'], 'latin1')
model._encoding = encoding
assert isinstance(encoding, str), f'encoding={encoding!r}; type={type(encoding)}'
model.get_encoding()
keys = h5_file.keys()
mapper = {
'elements' : hdf5_load_elements,
'plotels' : hdf5_load_plotels,
'properties' : hdf5_load_properties,
'coords' : hdf5_load_coords,
'tables' : hdf5_load_tables,
'methods' : hdf5_load_methods,
'masses' : hdf5_load_masses,
'materials' : hdf5_load_materials,
'spcs' : hdf5_load_spcs,
'spcadds' : hdf5_load_spcadds,
'mpcs' : hdf5_load_mpcs,
'mpcadds' : hdf5_load_mpcadds,
'pval' : hdf5_load_pval,
'loads' : hdf5_load_loads,
'load_combinations' : hdf5_load_load_combinations,
'dloads' : hdf5_load_dloads,
'dload_entries' : hdf5_load_dload_entries,
'bcs' : hdf5_load_bcs,
'transfer_functions' : hdf5_load_transfer_functions,
'dvgrids': hdf5_load_dvgrids,
'nsms' : hdf5_load_nsms,
'nsmadds' : hdf5_load_nsmadds,
'frequencies' : hdf5_load_frequencies,
'aelinks' : hdf5_load_aelinks,
'desvars' : hdf5_load_desvars,
'dmig' : hdf5_load_dmigs,
'dmiax' : hdf5_load_dmigs,
'dmij' : hdf5_load_dmigs,
'dmik' : hdf5_load_dmigs,
'dmiji' : hdf5_load_dmigs,
'dmi' : hdf5_load_dmigs,
'dti' : hdf5_load_dti,
'dconstrs' : hdf5_load_dconstrs,
'dresps' : hdf5_load_dresps,
'usets' : hdf5_load_usets,
}
generic_mapper = {
'rigid_elements' : hdf5_load_generic,
'thermal_materials' : hdf5_load_generic,
'creep_materials' : hdf5_load_generic,
'hyperelastic_materials' : hdf5_load_generic,
'flutters' : hdf5_load_generic,
'trims' : hdf5_load_generic,
'csschds' : hdf5_load_generic,
'gusts' : hdf5_load_generic,
'caeros' : hdf5_load_generic,
'splines' : hdf5_load_generic,
#'MATS1' : hdf5_load_generic,
#'MATT1' : hdf5_load_generic,
#'MATT2' : hdf5_load_generic,
#'MATT3' : hdf5_load_generic,
#'MATT4' : hdf5_load_generic,
#'MATT5' : hdf5_load_generic,
#'MATT8' : hdf5_load_generic,
#'MATT9' : hdf5_load_generic,
}
#print('keys =', list(keys))
for key in keys:
#model.log.debug('loading %s' % key)
group = h5_file[key]
if key == 'nodes':
grids = group['GRID']
nids = _cast_array(grids['nid'])
xyz = _cast_array(grids['xyz'])
cp = _cast_array(grids['cp'])
cd = _cast_array(grids['cd'])
ps = _cast(grids['ps'])
seid = _cast_array(grids['seid'])
for nid, xyzi, cpi, cdi, psi, seidi in zip(nids, xyz, cp, cd, ps, seid):
model.add_grid(nid, xyzi, cp=cpi, cd=cdi, ps=psi, seid=seidi, comment='')
model.card_count['GRID'] = len(nids)
elif key in mapper:
func = mapper[key]
func(model, group, encoding)
elif key in generic_mapper:
func = generic_mapper[key]
func(model, group, key, encoding)
elif key in dict_int_obj_attrs:
#model.log.debug(' dict_int_obj')
dkeys, values = load_cards_from_keys_values(
key, group, encoding, model.log)
_put_keys_values_into_dict(model, key, dkeys, values)
card_type = values[0].type
model.card_count[card_type] = len(dkeys)
elif key in ['info', 'matrices'] or key.startswith('Subcase'): # op2
continue
elif key in ['cards_to_read']: # handled separately
continue
elif key == 'params':
keys = list(group.keys())
values = _load_cards_from_keys_values('params', group, keys, encoding, model.log)
_put_keys_values_into_dict(model, 'params', keys, values, cast_int_keys=False)
model.card_count['PARAM'] = len(keys)
elif key == 'minor_attributes':
_load_minor_attributes(key, group, model, encoding)
#elif key in ['case_control_lines', 'executive_control_lines', 'system_command_lines']:
#lst = _load_indexed_list_str(keyi, sub_group, encoding)
elif key == 'active_filenames':
if 'value' not in group:
lst = _load_indexed_list_str(key, group, encoding)
continue
lst = _cast_array(group['value']).tolist()
#else:
#except KeyError: # pragma: no cover
#print('group', group)
#print('group.keys()', list(group.keys()))
#raise
if isinstance(lst[0], str):
pass
else:
lst = [line.encode(encoding) for line in lst]
setattr(model, key, lst)
elif key in LIST_OBJ_KEYS:
#model.log.debug(' list_obj')
#model.log.info(' key = %s' % key)
#model.log.info(' group = %s' % group)
#model.log.info(' group.keys() = %s' % list(group.keys()))
keys = _cast(group['keys'])
values = group['values']
lst = [None] * len(keys)
for keyi in values.keys():
ikey = int(keyi)
class_obj_hdf5 = values[keyi]
card_type = cast_string(class_obj_hdf5['type'], encoding)
#print(card_type, class_obj_hdf5)
class_instance = _load_from_class(class_obj_hdf5, card_type, encoding)
lst[ikey] = class_instance
_put_keys_values_into_list(model, key, keys, lst)
#model.log.info('keys = %s' % keys)
#model.log.info('values = %s' % values)
#model.log.info('values.keys() = %s' % values.keys())
elif key in 'case_control_deck':
lines = []
model.case_control_deck = CaseControlDeck(lines, log=model.log)
model.case_control_deck.load_hdf5_file(group, encoding)
str(model.case_control_deck)
elif key in scalar_obj_keys: # these only have 1 value
#model.log.debug(' scalar_obj')
keys = list(group.keys())
keys.remove('type')
card_type = cast_string(group['type'], encoding)
class_instance = _load_from_class(group, card_type, encoding)
write_card(class_instance)
setattr(model, key, class_instance)
model.card_count[card_type] = 1
#elif key in scalar_keys:
#value = _cast(group)
#try:
#setattr(model, key, value)
#except AttributeError:
#model.log.warning('cant set %r as %s' % (key, value))
#raise
#elif key in list_keys:
#value = _cast(group)
#try:
#setattr(model, key, value)
#except AttributeError:
#model.log.warning('cant set %r as %s' % (key, value))
#raise
elif key in 'mdlprm':
lines = []
model.mdlprm = MDLPRM({'HDF5': 1})
model.mdlprm.load_hdf5_file(group, encoding)
str(model.mdlprm)
else:
model.log.warning('skipping hdf5 load for %s' % key)
raise RuntimeError('skipping hdf5 load for %s' % key)
cards_to_read = _cast(h5_file['cards_to_read'])
cards_to_read = [key.decode(encoding) for key in cards_to_read]
model.cards_to_read = set(list(cards_to_read))
def _load_minor_attributes(unused_key: str, group, model: BDF,
encoding: str) -> None:
keys_attrs = group.keys()
list_attrs = {'case_control_lines', 'executive_control_lines',
'system_command_lines', 'active_filenames'}
str_attrs = {'nastran_format', 'include_dir'}
#skip_attrs = []
for keyi in keys_attrs:
sub_group = group[keyi]
#model.log.debug(' %s' % keyi)
if keyi in list_attrs:
lst = _cast(sub_group)
if isinstance(lst[0], str):
pass
else:
lst = decode_lines(lst, encoding)
assert isinstance(lst[0], str), type(lst[0])
setattr(model, keyi, lst)
continue
elif keyi == 'reject_lines':
reject_keys = list(sub_group.keys())
lst = [None] * len(reject_keys)
for reject_key in reject_keys:
reject_key_int = int(reject_key)
h5_value = sub_group[reject_key]
value = _cast(h5_value)
lst[reject_key_int] = value
comment = value[0].decode(encoding)
card_lines = value[1:]
card_lines = decode_lines(card_lines, encoding)
try:
line0 = card_lines[0]
except IndexError:
# C:\Program Files\Siemens\NX 12.0\NXNASTRAN\nxn12\nast\del\gentim1.dat
print(value)
print(card_lines)
raise
card_name_field0 = line0.split(',', 1)[0].split('\t', 1)[0]
card_name = card_name_field0[:8].rstrip().upper().rstrip('*')
assert isinstance(comment, str), type(comment)
## TODO: swap out
#model.add_card(card_lines, card_name, comment=comment,
#ifile=None, is_list=True, has_none=True)
model.reject_card_lines(card_name, card_lines, comment=comment)
continue
elif keyi == 'reject_cards':
reject_keys = list(sub_group.keys())
for ireject in sub_group.keys():
reject_card = _cast(sub_group[ireject])
if not isinstance(reject_card, list):
reject_card = reject_card.tolist()
fields = decode_lines(reject_card, encoding)
#fields = [field if field != 'nan' else None for field in fields]
card_name = fields[0]
model.add_card(fields, card_name, comment='', ifile=None,
is_list=True, has_none=True)
continue
elif keyi in str_attrs:
value = cast_string(sub_group, encoding)
#print(f'adding key={keyi!r} value={value!r}')
assert isinstance(value, str), value
try:
setattr(model, keyi, value)
except RuntimeError: # pragma: no cover
model.log.error('cant set minor_attributes/%s as %s' % (keyi, value))
except AttributeError: # pragma: no cover
model.log.warning('cant set minor_attributes/%s as %s' % (keyi, value))
raise
continue
elif keyi == 'is_enddata':
model.card_count['ENDDATA'] = 1
continue
value = _cast(sub_group)
try:
setattr(model, keyi, value)
except AttributeError: # pragma: no cover
model.log.warning('cant set minor_attributes/%s as %s' % (keyi, value))
raise
return
def _load_indexed_list(key, group, unused_encoding):
lst = []
for key in group.keys():
value = _cast(group[key])
lst.append(value)
#print('_load_indexed_list: %s' % lst)
return lst
def _load_indexed_list_str(key, group, encoding):
lst = _load_indexed_list(key, group, encoding)
#try:
#value0 = value[0]
#except IndexError: # pragma: no cover
#print('key =', key)
#print('value = %r' % value)
#print('group =', group)
#print('group.keys() =', list(group.keys()))
#raise
if isinstance(value, str):
pass
else:
lst = decode_lines(lst, encoding)
assert isinstance(lst[0], str), type(lst[0])
return lst
def hdf5_load_coords(model, coords_group, encoding):
"""loads the coords from an HDF5 file"""
for card_type in coords_group.keys():
coords = coords_group[card_type]
if card_type in ['CORD2R', 'CORD2C', 'CORD2S']:
if card_type == 'CORD2R':
func = model.add_cord2r
elif card_type == 'CORD2C':
func = model.add_cord2c
elif card_type == 'CORD2S':
func = model.add_cord2s
cids = _cast_array(coords['cid'])
rids = _cast_array(coords['rid'])
e1s = _cast_array(coords['e1'])
e2s = _cast_array(coords['e2'])
e3s = _cast_array(coords['e3'])
for cid, rid, origin, zaxis, xzplane in zip(
cids, rids, e1s, e2s, e3s):
func(cid, origin, zaxis, xzplane, rid=rid, comment='')
elif card_type in ['CORD1R', 'CORD1C', 'CORD1S']:
if card_type == 'CORD1R':
func = model.add_cord1r
elif card_type == 'CORD1C':
func = model.add_cord1c
elif card_type == 'CORD1S':
func = model.add_cord1s
cids = _cast_array(coords['cid'])
nodes = _cast_array(coords['nodes'])
for cid, (n1, n2, n3) in zip(cids, nodes):
func(cid, n1, n2, n3, comment='')
else:
cids, values = load_cards_from_keys_values(
'coords/%s' % card_type,
coords, encoding, model.log)
_put_keys_values_into_dict(model, 'coords', cids, values)
model.card_count[card_type] = len(cids)
def hdf5_load_tables(model: BDF, group, encoding: str) -> None:
"""loads the tables"""
for card_type in group.keys():
sub_group = group[card_type]
#if card_type == 'TABLES1':
#pass
keys, values = load_cards_from_keys_values(
'tables/%s' % card_type,
sub_group, encoding, model.log)
_put_keys_values_into_dict(model, 'tables', keys, values)
model.card_count[card_type] = len(keys)
def hdf5_load_methods(model: BDF, group, encoding: str) -> None:
"""loads the methods"""
for card_type in group.keys():
sub_group = group[card_type]
#if card_type == 'EIGRL':
#pass
keys, values = load_cards_from_keys_values(
'methods/%s' % card_type,
sub_group, encoding, model.log)
_put_keys_values_into_dict(model, 'methods', keys, values)
model.card_count[card_type] = len(keys)
def hdf5_load_masses(model: BDF, group, encoding: str) -> None:
"""loads the masses"""
for card_type in group.keys():
masses = group[card_type]
if card_type == 'CONM2':
eid = _cast_array(masses['eid'])
nid = _cast_array(masses['nid'])
cid = _cast_array(masses['cid'])
X = _cast_array(masses['X'])
I = _cast_array(masses['I'])
mass = _cast_array(masses['mass'])
for eidi, nidi, cidi, Xi, Ii, massi in zip(eid, nid, cid, X, I, mass):
model.add_conm2(eidi, nidi, massi, cid=cidi, X=Xi, I=Ii, comment='')
elif card_type == 'CMASS2':
eid = _cast_array(masses['eid'])
mass = _cast_array(masses['mass'])
nodes = _cast_array(masses['nodes']).tolist()
components = _cast(masses['components'])
for eidi, massi, nids, (c1, c2) in zip(eid, mass, nodes, components):
model.add_cmass2(eidi, massi, nids, c1, c2, comment='')
else:
#model.add_cmass1(eid, pid, nids, c1=0, c2=0, comment='')
#model.add_cmass3(eid, pid, nids, comment='')
#model.add_cmass4(eid, mass, nids, comment='')
#model.add_conm1(eid, nid, mass_matrix, cid=0, comment='')
eid, values = load_cards_from_keys_values(
'masses/%s' % card_type,
masses, encoding, model.log)
_put_keys_values_into_dict(model, 'masses', eid, values)
model.card_count[card_type] = len(eid)
def hdf5_load_materials(model: BDF, group, encoding: str) -> None:
"""loads the materials"""
for card_type in group.keys():
sub_group = group[card_type]
if card_type == 'MAT1':
mid = _cast_array(sub_group['mid'])
E = _cast_array(sub_group['E'])
G = _cast_array(sub_group['G'])
nu = _cast_array(sub_group['nu'])
rho = _cast_array(sub_group['rho'])
a = _cast_array(sub_group['A'])
tref = _cast_array(sub_group['tref'])
ge = _cast_array(sub_group['ge'])
St = _cast_array(sub_group['St'])
Sc = _cast_array(sub_group['Sc'])
Ss = _cast_array(sub_group['Ss'])
mcsid = _cast_array(sub_group['mcsid'])
for midi, Ei, Gi, nui, rhoi, ai, trefi, gei, Sti, Sci, Ssi, mcsidi in zip(
mid, E, G, nu, rho, a, tref, ge, St, Sc, Ss, mcsid):
model.add_mat1(midi, Ei, Gi, nui, rho=rhoi, a=ai, tref=trefi,
ge=gei, St=Sti, Sc=Sci, Ss=Ssi, mcsid=mcsidi, comment='')
elif card_type == 'MAT2':
mid = _cast(sub_group['mid'])
G = _cast_array(sub_group['G'])
rho = _cast_array(sub_group['rho'])
a = _cast_array(sub_group['A'])
tref = _cast_array(sub_group['tref'])
ge = _cast_array(sub_group['ge'])
St = _cast_array(sub_group['St'])
Sc = _cast_array(sub_group['Sc'])
Ss = _cast_array(sub_group['Ss'])
mcsid = _cast_array(sub_group['mcsid'])
for (midi, (G11, G22, G33, G12, G13, G23), rhoi, (a1i, a2i, a3i),
trefi, gei, Sti, Sci, Ssi, mcsidi) in zip(
mid, G, rho, a, tref, ge, St, Sc, Ss, mcsid):
if mcsidi == -1:
mcsidi = None
model.add_mat2(midi, G11, G12, G13, G22, G23, G33, rho=rhoi,
a1=a1i, a2=a2i, a3=a3i, tref=trefi, ge=gei,
St=Sti, Sc=Sci, Ss=Ssi, mcsid=mcsidi, comment='')
elif card_type == 'MAT3':
mid = _cast_array(sub_group['mid'])
ex = _cast_array(sub_group['Ex'])
eth = _cast_array(sub_group['Eth'])
ez = _cast_array(sub_group['Ez'])
nuxth = _cast_array(sub_group['Nuxth'])
nuzx = _cast_array(sub_group['Nuzx'])
nuthz = _cast_array(sub_group['Nuthz'])
gxz = _cast_array(sub_group['Gzx'])
ax = _cast_array(sub_group['Ax'])
ath = _cast_array(sub_group['Ath'])
az = _cast_array(sub_group['Az'])
rho = _cast_array(sub_group['rho'])
tref = _cast_array(sub_group['tref'])
ge = _cast_array(sub_group['ge'])
for (midi, exi, ethi, ezi, nuxthi, nuzxi, nuthzi,
rhoi, gzxi, axi, athi, azi, trefi, gei) in zip(
mid, ex, eth, ez, nuxth, nuzx, nuthz, rho, gxz, ax, ath, az, tref, ge):
model.add_mat3(midi, exi, ethi, ezi, nuxthi, nuthzi, nuzxi, rho=rhoi,
gzx=gzxi, ax=axi, ath=athi, az=azi, tref=trefi, ge=gei, comment='')
elif card_type == 'MAT8':
mid = _cast_array(sub_group['mid'])
e11 = _cast_array(sub_group['E11'])
e22 = _cast_array(sub_group['E22'])
nu12 = _cast_array(sub_group['Nu12'])
g12 = _cast_array(sub_group['G12'])
g1z = _cast_array(sub_group['G1z'])
g2z = _cast_array(sub_group['G2z'])
a1 = _cast_array(sub_group['A1'])
a2 = _cast_array(sub_group['A2'])
tref = _cast_array(sub_group['tref'])
ge = _cast_array(sub_group['ge'])
rho = _cast_array(sub_group['rho'])
xt = _cast_array(sub_group['Xt'])
xc = _cast_array(sub_group['Xc'])
yt = _cast_array(sub_group['Yt'])
yc = _cast_array(sub_group['Yc'])
s = _cast_array(sub_group['S'])
f12 = _cast_array(sub_group['F12'])
strn = _cast_array(sub_group['strn'])
for (midi, e11i, e22i, nu12i, g12i, g1zi, g2zi, rhoi, a1i, a2i, trefi,
xti, xci, yti, yci, si, gei, f12i, strni) in zip(
mid, e11, e22, nu12, g12, g1z, g2z, rho, a1, a2, tref,
xt, xc, yt, yc, s, ge, f12, strn):
model.add_mat8(midi, e11i, e22i, nu12i, g12=g12i, g1z=g1zi, g2z=g2zi, rho=rhoi,
a1=a1i, a2=a2i, tref=trefi, Xt=xti, Xc=xci, Yt=yti, Yc=yci,
S=si, ge=gei, F12=f12i, strn=strni, comment='')
elif card_type == 'MAT9':
## TODO: add G
mid = _cast_array(sub_group['mid'])
a = _cast_array(sub_group['A'])
tref = _cast_array(sub_group['tref'])
ge = _cast_array(sub_group['ge'])
rho = _cast_array(sub_group['rho'])
for midi, ai, trefi, gei, rhoi in zip(mid, a, tref, ge, rho):
model.add_mat9(
midi,
G11=0., G12=0., G13=0., G14=0., G15=0., G16=0.,
G22=0., G23=0., G24=0., G25=0., G26=0.,
G33=0., G34=0., G35=0., G36=0.,
G44=0., G45=0., G46=0.,
G55=0., G56=0.,
G66=0.,
rho=rhoi, A=ai, tref=trefi, ge=gei, comment='')
else:
#model.add_mat4(mid, k, cp=0.0, rho=1.0, H=None, mu=None, hgen=1.0,
#ref_enthalpy=None, tch=None, tdelta=None, qlat=None, comment='')
#model.add_mat5(mid, kxx=0., kxy=0., kxz=0., kyy=0., kyz=0., kzz=0.,
#cp=0., rho=1., hgen=1., comment='')
#model.add_mat10(mid, bulk, rho, c, ge=0.0, gamma=None,
#table_bulk=None, table_rho=None, table_ge=None,
#table_gamma=None, comment='')
#model.add_mat11(mid, e1, e2, e3, nu12, nu13, nu23, g12, g13, g23,
#rho=0.0, a1=0.0, a2=0.0, a3=0.0, tref=0.0, ge=0.0, comment='')
mid, values = load_cards_from_keys_values(
'materials/%s' % card_type,
sub_group, encoding, model.log)
_put_keys_values_into_dict(model, 'materials', mid, values)
model.card_count[card_type] = len(mid)
def hdf5_load_spcs(model: BDF, group, encoding: str) -> None:
"""loads the spcs"""
keys = list(group.keys())
keys.remove('keys')
#spc_ids = _cast(group['keys'])
for spc_id in keys:
ispc_id = int(spc_id)
cards_group = group[spc_id]
for card_type in cards_group.keys():
sub_group = cards_group[card_type]
#if card_type == 'SPC1':
#mid = _cast(sub_group['mid'])
#else:
lkeys, values = load_cards_from_keys_values(
'spcs/%s/%s' % (spc_id, card_type),
sub_group, encoding, model.log)
_put_keys_values_into_dict_list(model, 'spcs', ispc_id, lkeys, values)
model.card_count[card_type] = len(lkeys)
def hdf5_load_spcadds(model: BDF, group, encoding: str) -> None:
"""loads the spcadds"""
keys = list(group.keys())
keys.remove('keys')
#spc_ids = _cast(group['keys'])
for spc_id in keys:
ispc_id = int(spc_id)
cards_group = group[spc_id]
for card_type in cards_group.keys():
sub_group = cards_group[card_type]
#if card_type == 'SPC1':
#mid = _cast(sub_group['mid'])
#else:
lkeys, values = load_cards_from_keys_values(
'spcadds/%s/%s' % (spc_id, card_type),
sub_group, encoding, model.log)
_put_keys_values_into_dict_list(model, 'spcadds', ispc_id, lkeys, values)
def hdf5_load_mpcs(model: BDF, group, encoding: str) -> None:
"""loads the mpcs"""
keys = list(group.keys())
keys.remove('keys')
#mpc_ids = _cast(group['keys'])
for mpc_id in keys:
impc_id = int(mpc_id)
cards_group = group[mpc_id]
for card_type in cards_group.keys():
sub_group = cards_group[card_type]
#if card_type == 'MPC':
#mid = _cast(sub_group['mid'])
#else:
lkeys, values = load_cards_from_keys_values(
'mpcs/%s/%s' % (mpc_id, card_type),
sub_group, encoding, model.log)
_put_keys_values_into_dict_list(model, 'mpcs', impc_id, lkeys, values)
model.card_count[card_type] = len(lkeys)
def hdf5_load_mpcadds(model: BDF, group, encoding: str) -> None:
"""loads the mpcadds"""
keys = list(group.keys())
keys.remove('keys')
#spc_ids = _cast(group['keys'])
for mpc_id in keys:
unused_impc_id = int(mpc_id)
cards_group = group[mpc_id]
for card_type in cards_group.keys():
sub_group = cards_group[card_type]
#if card_type == 'MPCADD':
#mid = _cast(sub_group['mid'])
#else:
lkeys, values = load_cards_from_keys_values(
'mpcadds/%s/%s' % (mpc_id, card_type),
sub_group, encoding, model.log)
_put_keys_values_into_dict_list(model, 'mpcadds', mpc_id, lkeys, values)
model.card_count[card_type] = len(lkeys)
def hdf5_load_pval(model: BDF, group, encoding: str) -> None:
"""loads the pval"""
keys = list(group.keys())
keys.remove('keys')
for adapt_id in keys:
adapt_idi = int(adapt_id)
cards_group = group[adapt_id]
for card_type in cards_group.keys():
sub_group = cards_group[card_type]
#if card_type == 'TEMP': # this has a weird dictionary structure
#sid = sub_group.keys()
#for index in sid:
#cardi = sub_group[index]
#nodes = _cast(cardi['node']).tolist()
#temp = _cast(cardi['temperature']).tolist()
#temperatures = {nid : tempi for (nid, tempi) in zip(nodes, temp)}
#model.add_temp(iload_id, temperatures, comment='')
#else:
sid, values = load_cards_from_keys_values(
'pval/%s/%s' % (adapt_idi, card_type),
sub_group, encoding, model.log)
#for value in values:
#print(value)
_put_keys_values_into_dict_list(model, 'pval', adapt_idi, sid, values)
model.card_count[card_type] = len(sid)
def hdf5_load_loads(model: BDF, group, encoding: str) -> None:
"""loads the loads"""
keys = list(group.keys())
keys.remove('keys')
for load_id in keys:
iload_id = int(load_id)
cards_group = group[load_id]
for card_type in cards_group.keys():
sub_group = cards_group[card_type]
if card_type in ['FORCE', 'MOMENT']:
if card_type == 'FORCE':
func = model.add_force
else:
func = model.add_moment
sid = _cast_array(sub_group['sid'])
node = _cast_array(sub_group['node'])
cid = _cast_array(sub_group['cid'])
mag = _cast_array(sub_group['mag'])
xyz = _cast_array(sub_group['xyz'])
for (sidi, nodei, magi, xyzi, cidi) in zip(sid, node, mag, xyz, cid):
func(sidi, nodei, magi, xyzi, cid=cidi, comment='')
elif card_type == 'TEMP': # this has a weird dictionary structure
sid = sub_group.keys()
for index in sid:
cardi = sub_group[index]
nodes = _cast_array(cardi['node']).tolist()
temp = _cast_array(cardi['temperature']).tolist()
temperatures = {nid : tempi for (nid, tempi) in zip(nodes, temp)}
model.add_temp(iload_id, temperatures, comment='')
else:
#model.add_force1(sid, node, mag, g1, g2, comment='')
sid, values = load_cards_from_keys_values(
'loads/%s/%s' % (load_id, card_type),
sub_group, encoding, model.log)
#for value in values:
#print(value)
_put_keys_values_into_dict_list(model, 'loads', iload_id, sid, values)
model.card_count[card_type] = len(sid)
def hdf5_load_load_combinations(model: BDF, group, encoding: str) -> None:
"""loads the load_combinations"""
keys = list(group.keys())
keys.remove('keys')
for load_id in keys:
iload_id = int(load_id)
cards_group = group[load_id]
for card_type in cards_group.keys():
sub_group = cards_group[card_type]
#if card_type == 'LOAD':
#mid = _cast(sub_group['mid'])
#else:
lkeys, values = load_cards_from_keys_values(
'load_combinations/%s/%s' % (load_id, card_type),
sub_group, encoding, model.log)
#for value in values:
#print(value)
_put_keys_values_into_dict_list(model, 'load_combinations', iload_id, lkeys, values)
model.card_count[card_type] = len(lkeys)
def hdf5_load_nsms(model: BDF, group, encoding: str) -> None:
"""loads the nsms"""
keys = list(group.keys())
keys.remove('keys')
for nsm_id in keys:
insm_id = int(nsm_id)
cards_group = group[nsm_id]
for card_type in cards_group.keys():
sub_group = cards_group[card_type]
#if card_type == 'NSM':
#mid = _cast(sub_group['mid'])
#else:
keys, values = load_cards_from_keys_values(
'nsms/%s/%s' % (nsm_id, card_type),
sub_group, encoding, model.log)
_put_keys_values_into_dict_list(model, 'nsms', insm_id, keys, values)
model.card_count[card_type] = len(keys)
def hdf5_load_nsmadds(model: BDF, group, encoding: str) -> None:
"""loads the nsmadds"""
keys = list(group.keys())
keys.remove('keys')
for nsm_id in keys:
insm_id = int(nsm_id)
cards_group = group[nsm_id]
for card_type in cards_group.keys():
sub_group = cards_group[card_type]
#if card_type == 'NSMADD':
#mid = _cast(sub_group['mid'])
#else:
lkeys, values = load_cards_from_keys_values(
'nsmadds/%s/%s' % (nsm_id, card_type),
sub_group, encoding, model.log)
_put_keys_values_into_dict_list(model, 'nsmadds', insm_id, lkeys, values)
model.card_count[card_type] = len(keys)
def hdf5_load_frequencies(model: BDF, group, encoding: str) -> None:
"""loads the frequencies"""
keys = list(group.keys())
keys.remove('keys')
for freq_id in keys:
ifreq_id = int(freq_id)
cards_group = group[freq_id]
for card_type in cards_group.keys():
sub_group = cards_group[card_type]
#if card_type == 'FREQ':
#mid = _cast(sub_group['mid'])
#else:
fkeys, values = load_cards_from_keys_values(
'frequencies/%s/%s' % (freq_id, card_type),
sub_group, encoding, model.log)
_put_keys_values_into_dict_list(model, 'frequencies', ifreq_id, fkeys, values)
model.card_count[card_type] = len(fkeys)
def hdf5_load_aelinks(model: BDF, group, encoding: str) -> None:
"""loads the aelinks"""
keys = group.keys()
naelinks = 0
add_methods = model._add_methods
for aelink_id in keys:
unused_iaelink_id = int(aelink_id)
jlinks_group = group[aelink_id]
keys = jlinks_group.keys()
aelink = [None] * len(keys)
for jlink in keys:
j_int = int(jlink)
aelinki_group = jlinks_group[jlink]
value = aelinki_group
aelinki = _load_class(jlink, value, 'AELINK', encoding)
aelink[j_int] = aelinki
naelinks += 1
for aelinki in aelink:
add_methods._add_aelink_object(aelinki)
model.card_count['AELINK'] = naelinks
def hdf5_load_dloads(model: BDF, group, encoding: str) -> None:
"""loads the dloads"""
keys = list(group.keys())
keys.remove('keys')
for dload_id in keys:
idload_id = int(dload_id)
cards_group = group[dload_id]
for card_type in cards_group.keys():
sub_group = cards_group[card_type]
#if card_type == 'DLOAD':
#mid = _cast(sub_group['mid'])
#else:
lkeys, values = load_cards_from_keys_values(
'dloads/%s/%s' % (dload_id, card_type),
sub_group, encoding, model.log)
_put_keys_values_into_dict_list(model, 'dloads', idload_id, lkeys, values)
model.card_count[card_type] = len(lkeys)
def hdf5_load_dload_entries(model: BDF, group, encoding: str) -> None:
"""loads the dload_entries"""
keys = list(group.keys())
keys.remove('keys')
for dload_id in keys:
idload_id = int(dload_id)
cards_group = group[dload_id]
for card_type in cards_group.keys():
sub_group = cards_group[card_type]
#if card_type == 'TLOAD1':
#mid = _cast(sub_group['mid'])
#else:
lkeys, values = load_cards_from_keys_values(
'dload_entries/%s/%s' % (dload_id, card_type),
sub_group, encoding, model.log)
_put_keys_values_into_dict_list(model, 'dload_entries', idload_id, lkeys, values)
model.card_count[card_type] = len(lkeys)
def hdf5_load_bcs(model: BDF, group, encoding: str) -> None:
"""loads the bcs"""
keys = list(group.keys())
keys.remove('keys')
for bc_id in keys:
ibc_id = int(bc_id)
cards_group = group[bc_id]
for card_type in cards_group.keys():
sub_group = cards_group[card_type]
#if card_type == 'MAT1':
#mid = _cast(sub_group['mid'])
#else:
lkeys, values = load_cards_from_keys_values(
'bcs/%s/%s' % (bc_id, card_type),
sub_group, encoding, model.log)
_put_keys_values_into_dict_list(model, 'bcs', ibc_id, lkeys, values)
model.card_count[card_type] = len(lkeys)
def hdf5_load_transfer_functions(model: BDF, group, encoding: str) -> None:
"""loads the transfer_functions"""
keys = list(group.keys())
keys.remove('keys')
for tf_id in keys:
itf_id = int(tf_id)
cards_group = group[tf_id]
for card_type in cards_group.keys():
sub_group = cards_group[card_type]
#if card_type == 'MAT1':
#mid = _cast(sub_group['mid'])
#else:
lkeys, values = load_cards_from_keys_values(
'transfer_functions/%s/%s' % (tf_id, card_type),
sub_group, encoding, model.log)
_put_keys_values_into_dict_list(model, 'transfer_functions', itf_id, lkeys, values)
model.card_count[card_type] = len(lkeys)
def hdf5_load_dvgrids(model: BDF, group, encoding: str) -> None:
"""loads the dvgrids"""
keys = list(group.keys())
keys.remove('keys')
for opt_id in keys:
iopt_id = int(opt_id)
cards_group = group[opt_id]
for card_type in cards_group.keys():
sub_group = cards_group[card_type]
#if card_type == 'MAT1':
#mid = _cast(sub_group['mid'])
#else:
lkeys, values = load_cards_from_keys_values(
'dvgrids/%s/%s' % (opt_id, card_type),
sub_group, encoding, model.log)
_put_keys_values_into_dict_list(model, 'dvgrids', iopt_id, lkeys, values)
model.card_count[card_type] = len(lkeys)
def hdf5_load_desvars(model: BDF, group, encoding: str) -> None:
"""loads the desvars"""
for card_type in group.keys():
sub_group = group[card_type]
if card_type == 'DESVAR':
desvar = _cast_array(sub_group['desvar'])
label = _cast(sub_group['label'])
xinit = _cast_array(sub_group['xinit'])
xlb = _cast_array(sub_group['xlb'])
xub = _cast_array(sub_group['xub'])
delx = _cast_array(sub_group['delx'])
ddval = _cast_array(sub_group['ddval'])
for desvari, labeli, xiniti, xlbi, xubi, delxi, ddvali in zip(
desvar, label, xinit, xlb, xub, delx, ddval):
labeli = labeli.decode(encoding)
assert isinstance(labeli, str), labeli
model.add_desvar(desvari, labeli, xiniti, xlb=xlbi, xub=xubi,
delx=delxi, ddval=ddvali, comment='')
else: # pragma: no cover
raise RuntimeError('card_type=%s in hdf5_load_desvars' % card_type)
model.card_count[card_type] = len(desvar)
def hdf5_load_dmigs(model: BDF, group, unused_encoding: str) -> None:
"""loads the dmigs"""
keys = group.keys()
if len(keys) == 0:
#model.log.warning('skipping loading %s' % group)
raise RuntimeError('error loading %s' % group)
#return
for name in keys:
sub_group = group[name]
#print('group', group)
#print('sub_group', sub_group)
class_type = group.attrs['type']
if class_type == 'DMIG' and name == 'UACCEL':
_load_dmig_uaccel(model, sub_group)
elif class_type == 'DMI':
_load_dmi(model, name, sub_group)
elif class_type == 'DMIAX':
_load_dmiax(model, name, sub_group)
else:
_load_dmig(model, name, sub_group, class_type)
model.card_count[class_type] = len(keys)
def _load_dmig_uaccel(model: BDF, sub_group):
"""loads the DMIG,UACCEL"""
keysi = list(sub_group.keys())
tin = _cast(sub_group['tin'])
keysi.remove('tin')
ncol = None
if 'ncol' in keysi:
keysi.remove('ncol')
ncol = _cast(sub_group['ncol'])
load_sequences = {}
for idi in keysi:
lseq = int(idi)
sub_groupi = sub_group[idi]
dofs = _cast(sub_groupi['dofs'])
nids = _cast(sub_groupi['nids'])
values = _cast(sub_groupi['values'])
load_sequences[lseq] = list([
[nid, dof, value] for (nid, dof, value)
in zip(nids, dofs, values)])
dmig_uaccel = model.add_dmig_uaccel(tin, ncol, load_sequences, comment='')
str(dmig_uaccel)
def _load_dmi(model: BDF, name, sub_group):
"""loads the DMI"""
ncols = _cast(sub_group['ncols'])
nrows = _cast(sub_group['nrows'])
#polar = _cast(sub_group['polar'])
matrix_form = _cast(sub_group['matrix_form'])
tin = _cast(sub_group['tin'])
tout = _cast(sub_group['tout'])
GCi = _cast(sub_group['GCi'])
GCj = _cast(sub_group['GCj'])
Real = _cast(sub_group['Real'])
Complex = None
if 'Complex' in sub_group:
Complex = _cast(sub_group['Complex'])
#ifo = matrix_form
form = matrix_form
model.add_dmi(name, form, tin, tout, nrows, ncols, GCj, GCi,
Real, Complex=Complex, comment='')
def _load_dmig(model, name, sub_group, class_type):
"""loads the DMIG, DMIJ, DMIJI, DMIK"""
class_obj = CARD_MAP[class_type]
ncols = None
if 'ncols' in sub_group:
ncols = _cast(sub_group['ncols'])
polar = _cast(sub_group['polar'])
matrix_form = _cast(sub_group['matrix_form'])
tin = _cast(sub_group['tin'])
tout = _cast(sub_group['tout'])
#dmig_group.create_dataset('tin_dtype', data=dmig.tin_dtype)
#dmig_group.create_dataset('tout_dtype', data=dmig.tout_dtype)
#dmig_group.create_dataset('matrix_type', data=dmig.matrix_type)
#dmig_group.create_dataset('is_complex', data=dmig.is_complex)
#dmig_group.create_dataset('is_real', data=dmig.is_real)
#dmig_group.create_dataset('is_polar', data=dmig.is_polar)
GCi = _cast(sub_group['GCi'])
GCj = _cast(sub_group['GCj'])
Real = _cast(sub_group['Real'])
Complex = None
if 'Complex' in sub_group:
Complex = _cast(sub_group['Complex'])
ifo = matrix_form
dmig = class_obj(name, ifo, tin, tout, polar, ncols,
GCj, GCi, Real, Complex=Complex, comment='', finalize=True)
assert class_type in ['DMIG', 'DMIK', 'DMIJ', 'DMIJI'], class_type
slot_name = class_type.lower() + 's'
slot = getattr(model, slot_name)
slot[name] = dmig
str(dmig)
#model.dmigs[name] = dmig
def _load_dmiax(model, name, sub_group):
"""loads the DMIAX"""
class_obj = CARD_MAP['DMIAX']
ncols = None
if 'ncols' in sub_group:
ncols = _cast(sub_group['ncols'])
matrix_form = _cast(sub_group['matrix_form'])
tin = _cast(sub_group['tin'])
tout = _cast(sub_group['tout'])
gcni = _cast(sub_group['GCNi_j'])
gcnj = _cast(sub_group['GCNj'])
i_none_flags = _cast(sub_group['i_none_flags'])
j_none_flags = _cast(sub_group['j_none_flags'])
dmiax_GCNi = []
dmiax_GCNj = []
k = 0
for GCNj, is_none_flag_j in zip(gcnj, j_none_flags):
gj, cj, nj = GCNj
if is_none_flag_j:
nj = None
dmiax_GCNj.append((gj, cj, nj))
del GCNj, is_none_flag_j, nj
j_old = -1
gcni_group = []
for GCNi_j, is_none_flag_j in zip(gcni, i_none_flags):
gi, ci, ni, j = GCNi_j
is_none_flag_i = i_none_flags[k]
if is_none_flag_i:
ni = None
if j != j_old:
j_old = j
gcni_group = []
dmiax_GCNi.append(gcni_group)
gcni_group.append((gi, ci, ni))
#print('GCNj =', dmiax_GCNj)
#print('GCNi =', dmiax_GCNi)
Real = _cast(sub_group['Real'])
Complex = None
if 'Complex' in sub_group:
Complex = _cast(sub_group['Complex'])
ifo = matrix_form
dmiax = DMIAX(name, matrix_form, tin, tout, ncols,
dmiax_GCNj, dmiax_GCNi, Real, Complex=Complex)
model.dmiax[name] = dmiax
str(dmiax)
#print(dmiax)
def hdf5_load_dconstrs(model, group, encoding):
"""loads the dconstrs"""
keys = group.keys()
if len(keys) == 0:
#model.log.warning('skipping loading %s' % group)
raise RuntimeError('error loading %s' % group)
#return
add_methods = model._add_methods
for card_type in keys:
sub_group = group[card_type]
#print('group', group)
#print('sub_group', sub_group)
if card_type == 'DCONSTR':
#keys_group = list(sub_group.keys())
oid = _cast(sub_group['oid'])
dresp_id = _cast(sub_group['dresp_id'])
lid = _cast(sub_group['lid'])
uid = _cast(sub_group['uid'])
lowfq = _cast(sub_group['lowfq'])
highfq = _cast(sub_group['highfq'])
for oidi, dresp_idi, lidi, uidi, lowfqi, highfqi in zip(
oid, dresp_id, lid, uid, lowfq, highfq):
model.add_dconstr(oidi, dresp_idi, lid=lidi, uid=uidi,
lowfq=lowfqi, highfq=highfqi, comment='')
elif card_type == 'DCONADD':
keys = sub_group.keys()
#print('keys_group', keys_group)
#debug = False
unused_name = 'dconstrs/%s' % card_type
for key in keys:
value = sub_group[key]
dconadd = _load_class(key, value, card_type, encoding)
add_methods._add_dconstr_object(dconadd)
#model.add_dconadd(oid, dconstrs, comment='')
else:
raise RuntimeError('error loading %s' % card_type)
model.card_count[card_type] = len(keys)
def hdf5_load_dti(model, group, encoding):
"""loads the dti"""
group_keys = group.keys()
if len(group_keys) == 0:
#model.log.warning('skipping loading %s' % group)
raise RuntimeError('error loading %s' % group)
names = cast_strings(group['keys'], encoding)
values = group['values']
for name in names:
sub_group = values[name]
records = sub_group.keys()
fields = {}
#print('records', records)
for irecord in records:
sub_groupi = sub_group[irecord]
#print(sub_group, sub_groupi)
if 'keys' in sub_groupi:
lst = _load_indexed_list(irecord, sub_groupi, encoding)
#print('indexe_lst', lst)
lst2 = [val.decode(encoding) if isinstance(val, bytes) else val for val in lst]
else:
if isinstance(sub_groupi, h5py._hl.dataset.Dataset):
#print('dataset')
#print(sub_group, sub_groupi)
lst = _cast(sub_groupi).tolist()
#print('lst =', lst)
lst2 = [val.decode(encoding) if isinstance(val, bytes) else val for val in lst]
else:
#print(sub_group, sub_groupi, len(sub_groupi.keys()))
keys = sub_groupi.keys()
lst = []
for key in keys:
sub_groupii = sub_groupi[key]
if len(sub_groupii.shape) == 0:
# h5py between 2.8.0 and 3.1.0 (probably 3.0)
# changed str to bytes
scalar_value = np.array(sub_groupii).tolist()
# str/bytes/float
if isinstance(scalar_value, str):
pass
elif isinstance(scalar_value, bytes):
scalar_value = scalar_value.decode(encoding)
elif np.isnan(scalar_value):
scalar_value = None
lst.append(scalar_value)
else:
lsti = _cast(sub_groupii)
#assert isinstance(lsti, int, float, str), lsti
lst.append(lsti)
#lst = _cast(sub_groupi)
#print(lst)
lst2 = lst
if name == 'UNITS':
fields[irecord] = lst2[0]
else:
fields[irecord] = lst2
assert len(fields) > 0, fields
model.add_dti(name, fields)
model.card_count['DTI'] = len(names)
def hdf5_load_usets(model, group, encoding):
"""loads the usets"""
keys = group.keys()
if len(keys) == 0:
#model.log.warning('skipping loading %s' % group)
raise RuntimeError('error loading %s' % group)
add_methods = model._add_methods
for name in keys:
sub_group = group[name]
keys = sub_group.keys()
unused_lst = [None] * len(keys)
for key in keys:
sub_groupi = sub_group[key]
unused_keys2 = sub_groupi.keys()
value = sub_groupi
card_type = _cast(sub_groupi['type'])
class_obj = _load_class(key, value, card_type, encoding)
add_methods._add_uset_object(class_obj)
if card_type not in model.card_count:
model.card_count[card_type] = 1
else:
model.card_count[card_type] += 1
def hdf5_load_dresps(model, group, encoding):
"""loads the dresps"""
keys = list(group.keys())
if len(keys) == 0:
#model.log.warning('skipping loading %s' % group)
raise RuntimeError('error loading %s' % group)
for class_type in group.keys():
sub_group = group[class_type]
if class_type == 'DRESP1':
unused_keys_group = list(sub_group.keys())
#print('keys_group', keys_group)
#'atta', u'attb', u'dresp_id', u'label', u'region', u'response_type'
dresp_id = _cast_array(sub_group['dresp_id'])
atta = _cast(sub_group['atta'])
#print('atta =', atta)
attb = _cast(sub_group['attb'])
label = _cast_array(sub_group['label'])
region = _cast_array(sub_group['region'])
response_type = _cast_array(sub_group['response_type'])
property_type = _cast_array(sub_group['property_type'])
atti = []
for (i, dresp_idi, labeli, response_typei, property_typei, regioni,
attai, attbi) in zip(count(), dresp_id, label, response_type, property_type,
region, atta, attb):
drespi_group = sub_group[str(i)]
labeli = labeli.decode(encoding)
response_typei = response_typei.decode(encoding)
if property_typei == b'':
property_typei = None
elif property_typei.isdigit():
property_typei = int(property_typei)
else:
property_typei = property_typei.decode(encoding)
if regioni == -1:
regioni = None
#else:
#regioni = regioni.decode(encoding)
# int, float, str, blank
if attai == b'':
attai = None
elif b'.' in attai:
attai = float(attai)
elif attai.isdigit():
attai = int(attai)
else:
attai = attai.decode(encoding)
# int, float, str, blank
if attbi == b'':
attbi = None
elif b'.' in attbi:
attbi = float(attbi)
elif attbi.isdigit():
attbi = int(attbi)
else:
attbi = attbi.decode(encoding)
atti = []
if 'atti' in drespi_group:
atti = _cast_array(drespi_group['atti']).tolist()
model.add_dresp1(dresp_idi, labeli, response_typei, property_typei, regioni,
attai, attbi, atti, validate=False, comment='')
elif class_type == 'DRESP2':
dresp_id = _cast_array(sub_group['dresp_id'])
label = _cast(sub_group['label'])
dequation = _cast(sub_group['dequation'])
dequation_str = _cast(sub_group['func'])
#dequation_str = _cast(sub_group['dequation_str'])
region = _cast_array(sub_group['region'])
method = _cast(sub_group['method'])
c123 = _cast(sub_group['c123'])
for (i, dresp_idi, labeli, dequationi, dequation_stri, regioni, methodi, c123i) in zip(
count(), dresp_id, label, dequation, dequation_str, region, method, c123):
c1, c2, c3 = c123i
if regioni == -1:
regioni = None
#paramsi = {(0, u'DESVAR'): [1, 2, 3]}
paramsi = {}
dresp_groupi = sub_group[str(i)]
param_keys = _cast(dresp_groupi['param_keys'])
#print('param_keys', param_keys)
for j, param_key_j in enumerate(param_keys):
param_values = _cast(dresp_groupi[str(j)]['values'])
param_key = param_key_j.decode(encoding)
#print(' param_values', (i, j), param_values)
param_values2 = [val.decode(encoding) if isinstance(val, bytes) else val
for val in param_values]
paramsi[(j, param_key)] = param_values2
model.log.debug('DRESP2 params = %s' % paramsi)
if dequationi == -1:
dequationi = dequation_stri.decode(encoding)
labeli = labeli.decode(encoding)
methodi = methodi.decode(encoding)
model.add_dresp2(dresp_idi, labeli, dequationi, regioni, paramsi,
method=methodi, c1=c1, c2=c2, c3=c3,
validate=False, comment='')
else:
raise RuntimeError('error loading %s' % class_type)
model.card_count[class_type] = len(dresp_id)
def hdf5_load_generic(model, group, name, encoding):
for card_type in group.keys():
sub_group = group[card_type]
#if card_type == 'TABLES1':
#pass
lkeys, values = load_cards_from_keys_values(
'%s/%s' % (name, card_type),
sub_group, encoding, model.log)
_put_keys_values_into_dict(model, name, lkeys, values)
model.card_count[card_type] = len(lkeys)
def hdf5_load_properties(model, properties_group, encoding):
"""loads the properties from an HDF5 file"""
for card_type in properties_group.keys():
properties = properties_group[card_type]
if card_type == 'PSHELL':
pid = _cast_array(properties['pid'])
mids = _cast_array(properties['mids'])
z = _cast_array(properties['z'])
t = _cast_array(properties['t'])
twelveIt3 = _cast_array(properties['twelveIt3'])
tst = _cast_array(properties['tst'])
nsm = _cast_array(properties['nsm'])
for pidi, (mid1, mid2, mid3, mid4), (z1, z2), ti, twelveIt3i, tsti, nsmi in zip(
pid, mids, z, t, twelveIt3, tst, nsm):
if np.isnan(ti):
ti = None
raise RuntimeError('Differential shell thickness is not supported')
if np.isnan(z1):
z1 = None
if np.isnan(z2):
z2 = None
model.add_pshell(pidi, mid1=mid1, t=ti, mid2=mid2, twelveIt3=twelveIt3i,
mid3=mid3, tst=tsti, nsm=nsmi, z1=z1, z2=z2, mid4=mid4,
comment='')
elif card_type in ['PSOLID', 'PIHEX']:
func = model.add_psolid if card_type == 'PSOLID' else model.add_pihex
pid = _cast_array(properties['pid'])
mid = _cast_array(properties['mid'])
cordm = _cast_array(properties['cordm'])
integ = _cast_array(properties['integ'])
isop = _cast_array(properties['isop'])
stress = _cast_array(properties['stress'])
fctn = _cast_array(properties['fctn'])
for pidi, midi, cordmi, integi, stressi, isopi, fctni in zip(
pid, mid, cordm, integ, stress, isop, fctn):
integi = integi.decode(encoding)
fctni = fctni.decode(encoding)
isopi = isopi.decode(encoding)
stressi = stressi.decode(encoding)
if integi == '':
integi = None
if fctni == '':
fctni = None
if isopi == '':
isopi = None
if stressi == '':
stressi = None
func(pidi, midi, cordm=cordmi, integ=integi, stress=stressi,
isop=isopi, fctn=fctni, comment='')
elif card_type == 'PROD':
pid = _cast_array(properties['pid'])
mid = _cast_array(properties['mid'])
A = _cast_array(properties['A'])
j = _cast_array(properties['J'])
c = _cast_array(properties['c'])
nsm = _cast_array(properties['nsm'])
for pidi, midi, Ai, ji, ci, nsmi in zip(
pid, mid, A, j, c, nsm):
model.add_prod(pidi, midi, Ai, j=ji, c=ci, nsm=nsmi, comment='')
elif card_type == 'PTUBE':
pid = _cast_array(properties['pid'])
mid = _cast_array(properties['mid'])
OD = _cast_array(properties['OD'])
t = _cast_array(properties['t'])
nsm = _cast_array(properties['nsm'])
for pidi, midi, (OD1, OD2), ti, nsmi in zip(
pid, mid, OD, t, nsm):
model.add_ptube(pidi, midi, OD1, t=ti, nsm=nsmi, OD2=OD2, comment='')
elif card_type == 'PBAR':
pid = _cast_array(properties['pid'])
mid = _cast_array(properties['mid'])
A = _cast_array(properties['A'])
J = _cast_array(properties['J'])
I = _cast_array(properties['I'])
c = _cast_array(properties['c'])
d = _cast_array(properties['d'])
e = _cast_array(properties['e'])
f = _cast_array(properties['f'])
k = _cast_array(properties['k'])
nsm = _cast_array(properties['nsm'])
for (pidi, midi, Ai, Ji, (i1, i2, i12),
(c1, c2), (d1, d2), (e1, e2), (f1, f2), (k1, k2), nsmi) in zip(
pid, mid, A, J, I,
c, d, e, f, k, nsm):
if k1 == np.nan:
k1 = None
if k2 == np.nan:
k2 = None
model.add_pbar(pidi, midi, A=Ai, i1=i1, i2=i2, i12=i12, j=Ji, nsm=nsmi,
c1=c1, c2=c2, d1=d1, d2=d2, e1=e1, e2=e2,
f1=f1, f2=f2, k1=k1, k2=k2, comment='')
else:
#if card_type == 'PCOMP':
#debug = True
pid, values = load_cards_from_keys_values(
'properties/%s' % card_type,
properties, encoding, model.log)
_put_keys_values_into_dict(model, 'properties', pid, values)
#model.add_pshear(pid, mid, t, nsm=0., f1=0., f2=0., comment='')
#model.add_pvisc(pid, ce, cr, comment='')
#model.add_pelas(pid, k, ge=0., s=0., comment='')
#model.add_pdamp(pid, b, comment='')
#model.add_pcomp(pid, mids, thicknesses, thetas=None, souts=None, nsm=0., sb=0.,
#ft=None, tref=0., ge=0., lam=None, z0=None, comment='')
#model.add_pcompg(pid, global_ply_ids, mids, thicknesses, thetas=None, souts=None,
#nsm=0.0, sb=0.0, ft=None, tref=0.0, ge=0.0, lam=None, z0=None,
#comment='')
model.card_count[card_type] = len(pid)
for prop in model.properties.values():
write_card(prop)
def _put_keys_values_into_dict(model, name: str, keys, values, cast_int_keys: bool=True) -> None:
"""add something like an element to a dictionary"""
for value in values:
write_card(value)
slot = getattr(model, name)
card_count = model.card_count
# 'dmig', 'dmik', 'dmij', 'dmiji', 'dmi', 'dmiax'
if cast_int_keys and name not in ['dscreen', 'dti', 'aecomps', 'seconct', 'sebndry']:
#print('keys =', keys, cast_int_keys, name)
try:
keys = [int(key) for key in keys]
except ValueError: # pragma: no cover
# If this hits, you need probably have a non-integer key
# (e.g., a tuple of 2 ints) and need to skip the above
# caster and figure out the right way to cast it.
#
# This could be a string (in which case you just pass the
# initial check above and then use the normal adder below)
# similar to 'dscreen'.
#
# Another possibility is you have a (int_a, int_b) tuple key.
# Follow the pattern for 'seconct'.
print('name =', name)
print('keys = ', keys)
print('values = ', values)
raise
tuple_integer_casts = ('seconct', 'sebndry')
if name in tuple_integer_casts:
for key, value in zip(keys, values):
key = tuple(key)
slot[key] = value
#print(' *%s %s' % (value.type, key))
card_type = value.type
if card_type not in card_count:
card_count[card_type] = 0
card_count[card_type] += 1
model._type_to_id_map[card_type].append(key)
else:
for key, value in zip(keys, values):
slot[key] = value
#print(' *%s %s' % (value.type, key))
card_type = value.type
if card_type not in card_count:
card_count[card_type] = 0
card_count[card_type] += 1
model._type_to_id_map[card_type].append(key)
def _put_keys_values_into_list(model, name, keys, values):
"""add something like an MKAERO1 to a list"""
for value in values:
try:
write_card(value)
except RuntimeError:
print(value)
raise
slot = getattr(model, name)
card_count = model.card_count
for key, value in zip(keys, values):
slot.append(value)
#print(' *%s %s' % (value.type, key))
Type = value.type
if Type not in card_count:
card_count[Type] = 0
card_count[Type] += 1
model._type_to_id_map[Type].append(key)
def _put_keys_values_into_dict_list(model: Any, name: str, idi: int,
keys: np.ndarray,
values: List[Any]):
"""add someting like an SPC into a dictionary that has a list"""
for value in values:
#print(value)
write_card(value)
slot = getattr(model, name)
idi = int(idi)
#assert isinstance(idi, int), 'idi=%s type=%s' % (idi, type(idi))
if idi in slot:
slot_list = slot[idi]
else:
slot_list = []
slot[idi] = slot_list
card_count = model.card_count
for key, value in zip(keys, values):
slot_list.append(value)
#print(' *%s %s' % (value.type, key))
Type = value.type
if Type not in card_count:
card_count[Type] = 0
card_count[Type] += 1
model._type_to_id_map[Type].append(key)
def load_cards_from_keys_values(name, properties, encoding, log):
try:
keys = _cast(properties['keys'])
except KeyError: # pragma: no cover
print('name = %s' % name)
print(properties)
raise
#except TypeError: # pragma: no cover
#print('name = %s' % name)
#print(properties)
#print(properties['keys'])
#raise
values = properties['values']
value_objs = _load_cards_from_keys_values(name, values, keys, encoding, log)
return keys, value_objs
def _load_cards_from_keys_values(unused_name, values, keys, encoding, unused_log):
value_objs = []
for key, keyi in zip(keys, values.keys()):
#print('%s - %s' % (name, key))
value = values[keyi]
card_type = _cast(value['type'])
class_instance = _load_class(key, value, card_type, encoding)
value_objs.append(class_instance)
return value_objs
def _load_class(key: str, value, card_type: str, encoding: str):
if isinstance(card_type, bytes):
card_type = card_type.decode(encoding)
keys_to_read = list(value.keys())
class_obj = CARD_MAP[card_type] # see add_card.py ~line 200
#print(f'--{card_type}--')
if hasattr(class_obj, '_init_from_empty'):
class_instance = class_obj._init_from_empty()
else:
try:
class_instance = class_obj()
except TypeError: # pragma: no cover
print('error loading %r' % card_type)
print(class_obj)
raise
_properties = []
if hasattr(class_obj, '_properties'):
_properties = class_obj._properties
#print(' keys_to_read = ', keys_to_read)
for key_to_cast in keys_to_read:
if key_to_cast in _properties:
continue
try:
valuei = _get_casted_value(value, key_to_cast, encoding)
except AssertionError:
print('error loading %r' % card_type)
print(_properties)
print(key, key_to_cast)
raise
if isinstance(valuei, np.ndarray):
valuei = valuei.tolist()
if isinstance(valuei, list) and isinstance(valuei[0], bytes):
valuei = [valueii.decode(encoding) for valueii in valuei]
elif isinstance(valuei, list) and isinstance(valuei[0], bytes):
valuei = [valueii.decode(encoding) for valueii in valuei]
elif isinstance(valuei, bytes):
raise TypeError(f'class={card_type} key={key_to_cast} value={valuei} must be a string (not bytes)')
try:
setattr(class_instance, key_to_cast, valuei)
#print(' set %s to %s' % (key_to_cast, valuei))
except AttributeError: # pragma: no cover
print('error loading %r' % card_type)
print(_properties)
print(key, key_to_cast, valuei)
raise
#if debug:
#print(class_instance.get_stats())
#print(class_instance)
if hasattr(class_instance, '_finalize_hdf5'):
class_instance._finalize_hdf5(encoding)
#else:
#print('no %s' % class_instance.type)
str(class_instance)
return class_instance
def _cast_encoding(value_h5, encoding: str):
valuei = _cast(value_h5)
if isinstance(valuei, (int, float, np.ndarray)):
pass
elif isinstance(valuei, bytes):
valuei = valuei.decode(encoding)
elif isinstance(valuei, list):
valuei = [val.decode(encoding) if isinstance(val, bytes) else val
for val in valuei]
#else:
#print(type(valuei))
return valuei
def _get_casted_value(value, key_to_cast: str, encoding: str) -> Any:
value_h5 = value[key_to_cast]
if isinstance(value_h5, h5py._hl.dataset.Dataset):
#print('A', key_to_cast)
valuei = _cast_encoding(value_h5, encoding)
else:
#print('B', key_to_cast)
h5_keys = list(value_h5.keys())
if len(h5_keys) == 0:
valuei = _cast_encoding(value_h5, encoding)
else:
#print('h5_keys =', h5_keys)
lst = []
for h5_key in h5_keys:
slot_h5 = value_h5[h5_key]
if isinstance(slot_h5, h5py._hl.dataset.Dataset):
valueii = _cast(slot_h5)
elif isinstance(slot_h5, h5py._hl.group.Group):
valueii = _load_indexed_list(h5_key, slot_h5, encoding)
else: # pragma: no cover
print(key_to_cast, h5_key)
print(slot_h5, type(slot_h5))
raise NotImplementedError()
#print(f'key={key_to_cast}; valueii={valueii}; type={type(valueii)}')
valueii = to_list_int_float_str(valueii, encoding)
lst.append(valueii)
valuei = lst
#valuei = None
#else:
#try:
#valuei = _cast(value_h5)
#except AttributeError:
#print(value, key_to_cast, value.keys())
#print(value_h5, value_h5.keys())
#raise
#valuei = None
#print(f'key={key_to_cast}; valuei={valuei}; type={type(valuei)}')
#assert not isinstance(valuei, bytes), f'key={key_to_cast}; valuei={valuei}; type={type(valuei)}'
return valuei
def to_list_int_float_str(valueii: Any, encoding: str) -> Any:
if isinstance(valueii, (int, float, str)):
pass
elif isinstance(valueii, bytes):
valueii = valueii.decode(encoding)
elif isinstance(valueii, np.ndarray):
valueii = valueii.tolist()
if isinstance(valueii[0], bytes):
valueii = [val.decode(encoding) if isinstance(val, bytes) else val
for val in valueii]
elif isinstance(valueii, list):
if len(valueii) > 0 and isinstance(valueii[0], bytes):
valueii = [val.decode(encoding) if isinstance(val, bytes) else val
for val in valueii]
else:
print(valueii)
raise NotImplementedError(type(valueii))
return valueii
def _load_from_class(value, card_type: str, encoding: str):
"""generic loader that only requires an ``_init_from_empty`` method"""
keys_to_read = list(value.keys())
assert isinstance(card_type, str), card_type
class_obj = CARD_MAP[card_type] # see add_card.py ~line 200
if hasattr(class_obj, '_init_from_empty'):
class_instance = class_obj._init_from_empty()
else:
try:
class_instance = class_obj()
except TypeError: # pragma: no cover
print('error loading %r' % card_type)
print(class_obj)
raise
_properties = []
if hasattr(class_obj, '_properties'):
_properties = class_obj._properties
for key_to_cast in keys_to_read:
if key_to_cast in _properties:
continue
valuei = _get_casted_value(value, key_to_cast, encoding)
#print('%s set to %r' % (key_to_cast, valuei))
#h5_value = value[key_to_cast]
#try:
#valuei = _cast(h5_value)
#except AttributeError:
#print('key =', key)
#raise
#valuei = None
try:
setattr(class_instance, key_to_cast, valuei)
except AttributeError: # pragma: no cover
print('error loading %r' % card_type)
print(_properties)
print(key_to_cast, valuei)
raise
if hasattr(class_instance, '_finalize_hdf5'):
class_instance._finalize_hdf5(encoding)
return class_instance
def hdf5_load_elements(model, elements_group, encoding):
"""loads the elements from an HDF5 file"""
for card_type in elements_group.keys():
elements = elements_group[card_type]
if card_type == 'CTETRA':
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
nodes = _cast_array(elements['nodes']).tolist()
for eid, pid, nids in zip(eids, pids, nodes):
model.add_ctetra(eid, pid, nids, comment='')
elif card_type == 'CPENTA':
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
nodes = _cast_array(elements['nodes']).tolist()
for eid, pid, nids in zip(eids, pids, nodes):
model.add_cpenta(eid, pid, nids, comment='')
elif card_type == 'CPYRAM':
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
nodes = _cast_array(elements['nodes']).tolist()
for eid, pid, nids in zip(eids, pids, nodes):
model.add_cpyram(eid, pid, nids, comment='')
elif card_type == 'CHEXA':
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
nodes = _cast_array(elements['nodes']).tolist()
for eid, pid, nids in zip(eids, pids, nodes):
model.add_chexa(eid, pid, nids, comment='')
elif card_type == 'CROD':
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
nodes = _cast_array(elements['nodes']).tolist()
for eid, pid, nids in zip(eids, pids, nodes):
model.add_crod(eid, pid, nids, comment='')
elif card_type == 'CTUBE':
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
nodes = _cast_array(elements['nodes']).tolist()
for eid, pid, nids in zip(eids, pids, nodes):
model.add_ctube(eid, pid, nids, comment='')
elif card_type == 'CONROD':
eids = _cast_array(elements['eid'])
mids = _cast_array(elements['mid'])
nodes = _cast_array(elements['nodes']).tolist()
A = _cast_array(elements['A'])
J = _cast_array(elements['J'])
c = _cast_array(elements['c'])
nsm = _cast_array(elements['nsm'])
for eid, mid, nids, ai, ji, ci, nsmi in zip(eids, mids, nodes, A, J, c, nsm):
model.add_conrod(eid, mid, nids, A=ai, j=ji, c=ci, nsm=nsmi, comment='')
elif card_type == 'CBAR':
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
nodes = _cast_array(elements['nodes']).tolist()
g0 = _cast_array(elements['g0'])
x = _cast_array(elements['x'])
offt = _cast_array(elements['offt'])
wa = _cast_array(elements['wa'])
wb = _cast_array(elements['wb'])
pa = _cast_array(elements['pa'])
pb = _cast_array(elements['pb'])
for eid, pid, nids, xi, g0i, offti, pai, pbi, wai, wbi in zip(
eids, pids, nodes, x, g0, offt, pa, pb, wa, wb):
if g0i == -1:
g0i = None
if xi[0] == np.nan:
xi = [None, None, None]
model.add_cbar(eid, pid, nids, xi, g0i, offt=offti.decode(encoding),
pa=pai, pb=pbi, wa=wai, wb=wbi, comment='')
elif card_type == 'CBEAM':
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
nodes = _cast_array(elements['nodes']).tolist()
g0 = _cast_array(elements['g0'])
x = _cast_array(elements['x'])
bit = _cast_array(elements['bit'])
offt = _cast_array(elements['offt'])
sa = _cast_array(elements['sa'])
sb = _cast_array(elements['sb'])
wa = _cast_array(elements['wa'])
wb = _cast_array(elements['wb'])
pa = _cast_array(elements['pa'])
pb = _cast_array(elements['pb'])
for eid, pid, nids, xi, g0i, offti, biti, pai, pbi, wai, wbi, sai, sbi in zip(
eids, pids, nodes, x, g0, offt, bit, pa, pb, wa, wb, sa, sb):
if g0i == -1:
g0i = None
if xi[0] == np.nan:
xi = [None, None, None]
if biti == np.nan:
offti = offti.decode(encoding)
else:
offti = None
model.add_cbeam(eid, pid, nids, xi, g0i, offt=offti, bit=biti,
pa=pai, pb=pbi, wa=wai, wb=wbi, sa=sai, sb=sbi, comment='')
elif card_type in ['CELAS1', 'CDAMP1']:
func = model.add_celas1 if card_type == 'CELAS1' else model.add_cdamp1
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
nodes = _cast_array(elements['nodes']).tolist()
components = _cast(elements['components'])
for eid, pid, nids, (c1, c2) in zip(eids, pids, nodes, components):
func(eid, pid, nids, c1=c1, c2=c2, comment='')
elif card_type == 'CELAS2':
eids = _cast_array(elements['eid'])
k = _cast_array(elements['K'])
ge = _cast_array(elements['ge'])
s = _cast_array(elements['s'])
nodes = _cast_array(elements['nodes']).tolist()
components = _cast(elements['components'])
for eid, ki, nids, (c1, c2), gei, si in zip(eids, k, nodes, components, ge, s):
model.add_celas2(eid, ki, nids, c1=c1, c2=c2, ge=gei, s=si, comment='')
elif card_type == 'CDAMP2':
eids = _cast_array(elements['eid'])
b = _cast_array(elements['B'])
nodes = _cast_array(elements['nodes']).tolist()
components = _cast(elements['components'])
for eid, bi, nids, (c1, c2) in zip(eids, b, nodes, components):
nids = list([nid if nid != 0 else None for nid in nids])
model.add_cdamp2(eid, bi, nids, c1=c1, c2=c2, comment='')
elif card_type in ['CELAS3', 'CDAMP3', 'CDAMP5', 'CVISC']:
if card_type == 'CELAS3':
func = model.add_celas3
elif card_type == 'CDAMP3':
func = model.add_cdamp3
elif card_type == 'CDAMP5':
func = model.add_cdamp5
elif card_type == 'CVISC':
func = model.add_cvisc
else:
raise NotImplementedError(card_type)
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
nodes = _cast_array(elements['nodes']).tolist()
for eid, pid, nids in zip(eids, pids, nodes):
nids = list([nid if nid != 0 else None for nid in nids])
model.add_celas3(eid, pid, nids, comment='')
elif card_type == 'CELAS4':
eids = _cast_array(elements['eid'])
k = _cast_array(elements['K'])
nodes = _cast_array(elements['nodes']).tolist()
for eid, ki, nids in zip(eids, k, nodes):
nids = list([nid if nid != 0 else None for nid in nids])
model.add_celas4(eid, ki, nids, comment='')
elif card_type == 'CDAMP4':
eids = _cast_array(elements['eid'])
b = _cast_array(elements['B'])
nodes = _cast_array(elements['nodes']).tolist()
for eid, bi, nids in zip(eids, b, nodes):
nids = list([nid if nid != 0 else None for nid in nids])
model.add_cdamp4(eid, bi, nids, comment='')
elif card_type == 'CBUSH':
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
nodes = _cast_array(elements['nodes']).tolist()
g0 = _cast_array(elements['g0'])
x = _cast_array(elements['x']).tolist()
cid = _cast_array(elements['cid'])
ocid = _cast_array(elements['ocid'])
s = _cast_array(elements['s'])
si = _cast_array(elements['si']).tolist()
for eid, pid, nids, xi, g0i, cidi, s2, ocidi, si2 in zip(
eids, pids, nodes, x, g0, cid, s, ocid, si):
nids = list([nid if nid != 0 else None for nid in nids])
if g0i == -1:
g0i = None
#if xi[0] == np.nan:
#xi = [None, None, None]
if cidi == -1:
cidi = None
if si2[0] == np.nan:
si2 = [None, None, None]
elem = model.add_cbush(eid, pid, nids, xi, g0i, cid=cidi, s=s2, ocid=ocidi, si=si2,
comment='')
write_card(elem)
elif card_type == 'CGAP':
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
nodes = _cast_array(elements['nodes']).tolist()
g0 = _cast_array(elements['g0'])
x = _cast_array(elements['x']).tolist()
cid = _cast_array(elements['cid'])
for eid, pid, nids, xi, g0i, cidi in zip(
eids, pids, nodes, x, g0, cid):
nids = list([nid if nid != 0 else None for nid in nids])
if g0i == -1:
g0i = None
#if xi[0] == np.nan:
#xi = [None, None, None]
if cidi == -1:
cidi = None
elem = model.add_cgap(eid, pid, nids, xi, g0i, cid=cidi, comment='')
#write_card(elem)
elif card_type == 'CBUSH1D':
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
nodes = _cast_array(elements['nodes']).tolist()
cid = _cast_array(elements['cid'])
for eid, pid, nids, cidi in zip(eids, pids, nodes, cid):
nids = list([nid if nid != 0 else None for nid in nids])
if cidi == -1:
cidi = None
model.add_cbush1d(eid, pid, nids, cid=cidi, comment='')
elif card_type == 'CBUSH2D':
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
nodes = _cast_array(elements['nodes']).tolist()
cid = _cast_array(elements['cid'])
sptid = _cast_array(elements['sptid'])
plane = _cast_array(elements['plane']).tolist()
for eid, pid, nids, cidi, planei, sptidi in zip(eids, pids, nodes, cid, plane, sptid):
planei = planei.decode(encoding)
model.add_cbush2d(eid, pid, nids, cid=cidi, plane=planei, sptid=sptidi, comment='')
elif card_type in ['CTRIA3', 'CTRIAR']:
func = model.add_ctria3 if card_type == 'CTRIA3' else model.add_ctriar
# TODO: doesn't support tflag
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
thetas = _cast_array(elements['theta'])
mcids = _cast_array(elements['mcid'])
zoffsets = _cast_array(elements['zoffset'])
nodes = _cast_array(elements['nodes']).tolist()
for eid, pid, nids, mcid, theta, zoffset in zip(
eids, pids, nodes, mcids, thetas, zoffsets):
if mcid == -1:
theta_mcid = theta
else:
theta_mcid = mcid
model.add_ctria3(eid, pid, nids, zoffset=zoffset, theta_mcid=theta_mcid,
tflag=0, T1=None, T2=None, T3=None, comment='')
elif card_type in ['CQUAD4', 'CQUADR']:
func = model.add_cquad4 if card_type == 'CQUAD4' else model.add_cquadr
# TODO: doesn't support tflag
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
thetas = _cast_array(elements['theta'])
mcids = _cast_array(elements['mcid'])
zoffsets = _cast_array(elements['zoffset'])
nodes = _cast_array(elements['nodes']).tolist()
for eid, pid, nids, mcid, theta, zoffset in zip(
eids, pids, nodes, mcids, thetas, zoffsets):
if mcid == -1:
theta_mcid = theta
else:
theta_mcid = mcid
func(eid, pid, nids, zoffset=zoffset, theta_mcid=theta_mcid,
tflag=0, T1=None, T2=None, T3=None, T4=None, comment='')
elif card_type == 'CTRIA6':
# TODO: doesn't support tflag
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
thetas = _cast_array(elements['theta'])
mcids = _cast_array(elements['mcid'])
zoffsets = _cast_array(elements['zoffset'])
nodes = _cast_array(elements['nodes']).tolist()
for eid, pid, nids, mcid, theta, zoffset in zip(
eids, pids, nodes, mcids, thetas, zoffsets):
if mcid == -1:
theta_mcid = theta
else:
theta_mcid = mcid
nids = list([nid if nid != 0 else None for nid in nids])
model.add_ctria6(eid, pid, nids, zoffset=zoffset, theta_mcid=theta_mcid,
tflag=0, T1=None, T2=None, T3=None, comment='')
elif card_type == 'CQUAD8':
# TODO: doesn't support tflag
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
thetas = _cast_array(elements['theta'])
mcids = _cast_array(elements['mcid'])
zoffsets = _cast_array(elements['zoffset'])
nodes = _cast_array(elements['nodes']).tolist()
for eid, pid, nids, mcid, theta, zoffset in zip(
eids, pids, nodes, mcids, thetas, zoffsets):
if mcid == -1:
theta_mcid = theta
else:
theta_mcid = mcid
nids = list([nid if nid != 0 else None for nid in nids])
model.add_cquad8(eid, pid, nids, zoffset=zoffset, theta_mcid=theta_mcid,
tflag=0, T1=None, T2=None, T3=None, T4=None, comment='')
elif card_type == 'CQUAD':
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
thetas = _cast_array(elements['theta'])
mcids = _cast_array(elements['mcid'])
nodes = _cast_array(elements['nodes']).tolist()
for eid, pid, nids, mcid, theta in zip(eids, pids, nodes, mcids, thetas):
if mcid == -1:
theta_mcid = theta
else:
theta_mcid = mcid
nids = list([nid if nid != 0 else None for nid in nids])
model.add_cquad(eid, pid, nids, theta_mcid=theta_mcid, comment='')
elif card_type == 'CSHEAR':
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
nodes = _cast_array(elements['nodes']).tolist()
for eid, pid, nids in zip(eids, pids, nodes):
model.add_cshear(eid, pid, nids, comment='')
elif card_type == 'CTRIAX':
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
thetas = _cast_array(elements['theta'])
mcids = _cast_array(elements['mcid'])
nodes = _cast_array(elements['nodes']).tolist()
for eid, pid, nids, mcid, theta in zip(eids, pids, nodes, mcids, thetas):
if mcid == -1:
theta_mcid = theta
else:
theta_mcid = mcid
model.add_ctriax(eid, pid, nids, theta_mcid=theta_mcid, comment='')
elif card_type in ['CTRAX3', 'CTRAX6']:
if card_type == 'CTRAX3':
func = model.add_ctrax3
else:
func = model.add_ctrax6
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
thetas = _cast_array(elements['theta'])
nodes = _cast_array(elements['nodes']).tolist()
for eid, pid, nids, theta in zip(eids, pids, nodes, thetas):
func(eid, pid, nids, theta=theta, comment='')
elif card_type == 'CTRIAX6':
eids = _cast_array(elements['eid'])
mids = _cast_array(elements['mid'])
thetas = _cast_array(elements['theta'])
nodes = _cast_array(elements['nodes']).tolist()
for eid, mid, nids, theta in zip(eids, mids, nodes, thetas):
nids = list([nid if nid != 0 else None for nid in nids])
model.add_ctriax6(eid, mid, nids, theta=theta, comment='')
elif card_type == 'CQUADX':
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
thetas = _cast_array(elements['theta'])
mcids = _cast_array(elements['mcid'])
nodes = _cast_array(elements['nodes']).tolist()
for eid, pid, nids, theta, mcid in zip(eids, pids, nodes, thetas, mcids):
if mcid == -1:
theta_mcid = theta
else:
theta_mcid = mcid
nids = [None if nid == 0 else nid
for nid in nids]
model.add_cquadx(eid, pid, nids, theta_mcid=theta_mcid, comment='')
elif card_type in ['CQUADX4', 'CQUADX8']:
if card_type == 'CQUADX4':
func = model.add_cquadx4
else:
func = model.add_cquadx8
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
thetas = _cast_array(elements['theta'])
nodes = _cast_array(elements['nodes']).tolist()
for eid, pid, nids, theta in zip(eids, pids, nodes, thetas):
func(eid, pid, nids, theta=theta, comment='')
elif card_type in ['CPLSTN3', 'CPLSTN4',
'CPLSTS3', 'CPLSTS4']:
func_map = {
'CPLSTN3' : model.add_cplstn3,
'CPLSTN4' : model.add_cplstn4,
'CPLSTS3' : model.add_cplsts3,
'CPLSTS4' : model.add_cplsts4,
}
func = func_map[card_type]
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
thetas = _cast_array(elements['theta'])
nodes = _cast_array(elements['nodes']).tolist()
for eid, pid, nids, theta in zip(eids, pids, nodes, thetas):
func(eid, pid, nids, theta=theta, comment='')
elif card_type in ['CPLSTN6', 'CPLSTN8',
'CPLSTS6', 'CPLSTS8']:
func_map = {
'CPLSTN6' : model.add_cplstn6,
'CPLSTN8' : model.add_cplstn8,
'CPLSTS6' : model.add_cplsts6,
'CPLSTS8' : model.add_cplsts8,
}
func = func_map[card_type]
eids = _cast_array(elements['eid'])
pids = _cast_array(elements['pid'])
thetas = _cast_array(elements['theta'])
nodes = _cast_array(elements['nodes']).tolist()
for eid, pid, nids, theta in zip(eids, pids, nodes, thetas):
func(eid, pid, nids, theta=theta, comment='')
else:
eids, values = load_cards_from_keys_values(
'elements/%s' % card_type,
elements, encoding, model.log)
_put_keys_values_into_dict(model, 'elements', eids, values)
#model.add_cdamp4(eid, b, nids, comment='')
#model.add_cbush2d(eid, pid, nids, cid=0, plane='XY', sptid=None, comment='')
#model.add_cfast(eid, pid, Type, ida, idb, gs=None, ga=None, gb=None,
#xs=None, ys=None, zs=None, comment='')
#model.add_cmass1(eid, pid, nids, c1=0, c2=0, comment='')
#model.add_cmass2(eid, mass, nids, c1, c2, comment='')
#model.add_cmass3(eid, pid, nids, comment='')
#model.add_cmass4(eid, mass, nids, comment='')
#model.log.debug(card_type)
model.card_count[card_type] = len(eids)
def hdf5_load_plotels(model, elements_group, unused_encoding):
"""loads the plotels from an HDF5 file"""
for card_type in elements_group.keys():
elements = elements_group[card_type]
if card_type == 'PLOTEL':
eids = _cast_array(elements['eid'])
nodes = _cast_array(elements['nodes']).tolist()
for eid, nids in zip(eids, nodes):
model.add_plotel(eid, nids, comment='')
else: # pragma: no cover
raise RuntimeError('card_type=%s in hdf5_load_plotels' % card_type)
model.card_count[card_type] = len(eids)
def write_card(elem): # pragma: no cover
"""verifies that the card was built correctly near where the card was made"""
try:
elem.write_card(size=8, is_double=False)
except RuntimeError:
elem.write_card(size=16, is_double=False)
except Exception: # pragma: no cover
print(elem.get_stats())
raise
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,633
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/gui/qt_files/gui_attributes.py
|
"""
defines GuiAttributes, which defines Gui getter/setter methods
and is inherited from many GUI classes
"""
import os
import sys
import traceback
from collections import OrderedDict
from typing import List, Dict, Any, Optional
import numpy as np
import vtk
from qtpy import QtGui
from qtpy.QtWidgets import QMainWindow
try:
import matplotlib
IS_MATPLOTLIB = True
except ImportError:
IS_MATPLOTLIB = False
import pyNastran
from pyNastran.gui.gui_objects.settings import Settings
from pyNastran.gui.qt_files.tool_actions import ToolActions
from pyNastran.gui.qt_files.view_actions import ViewActions
from pyNastran.gui.qt_files.group_actions import GroupActions
from pyNastran.gui.qt_files.mouse_actions import MouseActions
from pyNastran.gui.qt_files.load_actions import LoadActions
from pyNastran.gui.qt_files.mark_actions import MarkActions
from pyNastran.gui.menus.legend.legend_object import LegendObject
from pyNastran.gui.menus.highlight.highlight_object import HighlightObject, MarkObject
from pyNastran.gui.menus.preferences.preferences_object import PreferencesObject
IS_CUTTING_PLANE = False
if IS_MATPLOTLIB:
from pyNastran.gui.menus.cutting_plane.cutting_plane_object import CuttingPlaneObject
from pyNastran.gui.menus.cutting_plane.shear_moment_torque_object import ShearMomentTorqueObject
IS_CUTTING_PLANE = True
from pyNastran.gui.menus.clipping.clipping_object import ClippingObject
from pyNastran.gui.menus.camera.camera_object import CameraObject
from pyNastran.gui.menus.edit_geometry_properties.edit_geometry_properties_object import (
EditGeometryPropertiesObject)
from pyNastran.gui.utils.vtk.gui_utils import remove_actors_from_gui
from pyNastran.gui.utils.vtk.vtk_utils import (
numpy_to_vtk_points, create_vtk_cells_of_constant_element_type)
from pyNastran.bdf.cards.base_card import deprecated
from pyNastran.utils import print_bad_path
IS_TESTING = 'test' in sys.argv[0]
IS_OFFICIAL_RELEASE = 'dev' not in pyNastran.__version__
class GeometryObject:
"""
"""
def __init__(self, parent):
self.gui = parent
def show(self):
pass
#def create(self):
#"""
#Create
#- Point
#- Line
#- Surface
#- Solid
#- Coord
#Modify
#Delete
#"""
#pass
#def create_point(self):
#pass
#def crate_surface(self):
#pass
#def create_coord(self):
#pass
#def modify(self):
#pass
class GuiAttributes:
"""All methods in this class must not require VTK"""
def __init__(self, **kwds):
"""
These variables are common between the GUI and
the batch mode testing that fakes the GUI
"""
inputs = kwds['inputs']
res_widget = kwds['res_widget']
self.dev = False
self.log = None # it hasn't been initialized yet
self.log_widget = None
self._log_messages = []
self._performance_mode = False
#self.performance_mode = True
# totally broken for solids
self.make_contour_filter = False
self.settings = Settings(self)
self.tool_actions = ToolActions(self)
self.view_actions = ViewActions(self)
self.group_actions = GroupActions(self)
self.mouse_actions = MouseActions(self)
self.load_actions = LoadActions(self)
self.mark_actions = MarkActions(self)
self.legend_obj = LegendObject(self)
self.camera_obj = CameraObject(self)
self.clipping_obj = ClippingObject(self)
self.highlight_obj = HighlightObject(self)
self.mark_obj = MarkObject(self)
self.preferences_obj = PreferencesObject(self)
if IS_MATPLOTLIB:
self.cutting_plane_obj = CuttingPlaneObject(self)
self.shear_moment_torque_obj = ShearMomentTorqueObject(self)
self.edit_geometry_properties_obj = EditGeometryPropertiesObject(self)
self.geometry_obj = GeometryObject(self)
self.min_max_actors = []
self.glyph_scale_factor = 1.0
self.html_logging = False
# the result type being currently shown
# for a Nastran NodeID/displacement, this is 'node'
# for a Nastran ElementID/PropertyID, this is 'element'
self.result_location = None
self.obj_names = []
self.case_keys = []
self.res_widget = res_widget
self._show_flag = True
self.observers = {}
# the gui is actually running
# we set this to False when testing
self.is_gui = True
# testing enables additional checks
# it's different than if we're just running tests
if 'test' in inputs:
self.is_testing_flag = inputs['test']
else:
self.is_testing_flag = False
# just initializing the variable
self.is_groups = False
self._logo = None
self._script_path = None
self._icon_path = ''
self.title = None
self.min_value = None
self.max_value = None
self.blue_to_red = False
self._is_axes_shown = True
self.nvalues = 9
#-------------
# window variables
self._modify_groups_window_shown = False
#self._label_window = None
#-------------
# inputs dict
self.is_edges = False
self.is_edges_black = self.is_edges
#self.format = ''
debug = inputs['debug']
self.debug = debug
assert debug in [True, False], 'debug=%s' % debug
#-------------
# format
self.format = None
self.format_class_map = {}
self.supported_formats = []
self.fmts = []
self.infile_name = None
self.out_filename = None
# file
self.menu_bar_format = None
self.dirname = ''
self.last_dir = '' # last visited directory while opening file
self._default_python_file = None
#-------------
# internal params
self.ncases = 0
self.icase = 0
self.icase_disp = None
self.icase_vector = None
self.icase_fringe = None
self.nnodes = 0
self.nelements = 0
self.model_type = None
self.tools = []
self.checkables = []
self.actions = {}
self.modules = OrderedDict()
# actor_slots
self.text_actors = {}
self.geometry_actors = OrderedDict()
self.alt_grids = {} #additional grids
# coords
self.transform = {}
self.axes = {}
#geom = Geom(color, line_thickness, etc.)
#self.geometry_properties = {
# 'name' : Geom(),
#}
self.model_data = ModelData(self)
self.num_user_points = 0
self._is_displaced = False
self._is_forces = False
self._is_fringe = False
self._xyz_nominal = None
self.nvalues = 9
self.nid_maps = {}
self.eid_maps = {}
self.name = 'main'
self.models = {}
#if not isinstance(res_widget, MockResWidget):
#if qt_version == 5:
#super(QMainWindow, self).__init__()
self.main_grids = {}
self.main_grid_mappers = {}
self.main_geometry_actors = {}
self.main_edge_mappers = {}
self.main_edge_actors = {}
self.color_order = [
(1.0, 0.145098039216, 1.0),
(0.0823529411765, 0.0823529411765, 1.0),
(0.0901960784314, 1.0, 0.941176470588),
(0.501960784314, 1.0, 0.0941176470588),
(1.0, 1.0, 0.117647058824),
(1.0, 0.662745098039, 0.113725490196)
]
self.color_function_black = vtk.vtkColorTransferFunction()
self.color_function_black.AddRGBPoint(0.0, 0.0, 0.0, 0.0)
self.color_function_black.AddRGBPoint(1.0, 0.0, 0.0, 0.0)
@property
def geometry_properties(self):
return self.model_data.geometry_properties
@geometry_properties.setter
def geometry_properties(self, geometry_properties):
self.model_data.geometry_properties = geometry_properties
@property
def groups(self):
return self.model_data.groups
@groups.setter
def groups(self, groups: Dict[str, Any]):
self.model_data.groups = groups
@property
def group_active(self):
return self.model_data.group_active
@group_active.setter
def group_active(self, group_active: str):
self.model_data.group_active = group_active
@property
def follower_nodes(self):
return self.model_data.follower_nodes
@follower_nodes.setter
def follower_nodes(self, follower_nodes):
self.model_data.follower_nodes = follower_nodes
@property
def follower_functions(self):
return self.model_data.follower_functions
@follower_functions.setter
def follower_functions(self, follower_functions):
self.model_data.follower_functions = follower_functions
@property
def label_actors(self):
return self.model_data.label_actors
@label_actors.setter
def label_ids(self, label_actors: List[Any]):
self.model_data.label_actors = label_actors
@property
def label_ids(self):
return self.model_data.label_ids
@label_ids.setter
def label_ids(self, label_ids: List[int]):
self.model_data.label_ids = label_ids
@property
def label_scale(self) -> float:
return self.model_data.label_scale
@property
def label_scale(self, label_scale: float):
self.model_data.label_scale = label_scale
@property
def result_cases(self):
return self.model_data.result_cases
@result_cases.setter
def result_cases(self, result_cases: Dict[int, Any]):
self.model_data.result_cases = result_cases
@property
def performance_mode(self):
"""get the performance mode"""
return self._performance_mode
@performance_mode.setter
def performance_mode(self, performance_mode):
"""
Set the performance mode. If performance mode flips
to False, we dump the log buffer.
"""
if not performance_mode and self._log_messages:
msg = ''.join(self._log_messages)
#setUpdatesEnabled(False)
#TxtBrows.append(SomeBigHTMLString)
self._log_msg(msg)
#setUpdatesEnabled(True)
self._log_messages = []
self._performance_mode = performance_mode
def start_stop_performance_mode(func):
"""
Supresses logging. If we started with logging suppressed,
we won't unsupress logging at the end of the function.
"""
def new_func(self, *args, **kwargs):
"""The actual function exec'd by the decorated function."""
performance_mode_initial = self.performance_mode
if not performance_mode_initial:
self.performance_mode = True
try:
n = func(self, *args, **kwargs)
except Exception:
if not performance_mode_initial:
self.performance_mode = False
raise
if not performance_mode_initial:
self.performance_mode = False
return n
return new_func
#-------------------------------------------------------------------
# deprecated attributes
def deprecated(self, old_name: str, new_name: str, deprecated_version: Optional[List[int]]) -> None:
"""
Throws a deprecation message and crashes if past a specific version.
Parameters
----------
old_name : str
the old function name
new_name : str
the new function name
deprecated_version : float
the version the method was first deprecated in
"""
deprecated(old_name, new_name, deprecated_version, levels=[0])
#-------------------------------------------------------------------
# geom
def clear_actor(self, actor_name):
if actor_name in self.gui.alt_grids:
del self.alt_grids[actor_name]
if actor_name in self.geometry_actors:
actor = self.geometry_actors[actor_name]
self.rend.RemoveActor(actor)
del self.geometry_actors[actor_name]
@property
def grid(self):
"""gets the active grid"""
#print('get grid; %r' % self.name)
return self.main_grids[self.name]
@grid.setter
def grid(self, grid):
"""sets the active grid"""
#print('set grid; %r' % self.name)
self.main_grids[self.name] = grid
@property
def grid_mapper(self):
"""gets the active grid_mapper"""
return self.main_grid_mappers[self.name]
@grid_mapper.setter
def grid_mapper(self, grid_mapper):
"""sets the active grid_mapper"""
self.main_grid_mappers[self.name] = grid_mapper
@property
def geom_actor(self):
"""gets the active geom_actor"""
return self.main_geometry_actors[self.name]
@geom_actor.setter
def geom_actor(self, geom_actor):
"""sets the active geom_actor"""
self.main_geometry_actors[self.name] = geom_actor
#-------------------------------------------------------------------
# edges
@property
def edge_mapper(self):
return self.main_edge_mappers[self.name]
@edge_mapper.setter
def edge_mapper(self, edge_mapper):
self.main_edge_mappers[self.name] = edge_mapper
@property
def edge_actor(self):
"""gets the active edge_actor"""
return self.main_edge_actors[self.name]
@edge_actor.setter
def edge_actor(self, edge_actor):
"""sets the active edge_actor"""
self.main_edge_actors[self.name] = edge_actor
def set_glyph_scale_factor(self, scale):
"""sets the glyph scale factor"""
if scale == np.nan:
self.log.error('cannot set loads scale factor because no 1D, 2D, or 3D elements exist')
return
self.glyph_scale_factor = scale
self.glyphs.SetScaleFactor(scale)
@property
def nid_map(self):
"""gets the node_id map"""
return self.nid_maps[self.name]
@nid_map.setter
def nid_map(self, nid_map):
"""sets the node_id map"""
self.nid_maps[self.name] = nid_map
@property
def eid_map(self):
"""gets the element_id map"""
try:
return self.eid_maps[self.name]
except Exception:
msg = 'KeyError: key=%r; keys=%s' % (self.name, list(self.eid_maps.keys()))
raise KeyError(msg)
@eid_map.setter
def eid_map(self, eid_map):
"""sets the element_id map"""
self.eid_maps[self.name] = eid_map
#-------------------------------------------------------------------
def set_point_grid(self, name, nodes, elements, color,
point_size=5, opacity=1., add=True):
"""Makes a POINT grid"""
self.create_alternate_vtk_grid(name, color=color, point_size=point_size,
opacity=opacity, representation='point')
nnodes = nodes.shape[0]
if nnodes == 0:
return
assert isinstance(nodes, np.ndarray), type(nodes)
points = numpy_to_vtk_points(nodes)
grid = self.alt_grids[name]
grid.SetPoints(points)
etype = 9 # vtk.vtkQuad().GetCellType()
create_vtk_cells_of_constant_element_type(grid, elements, etype)
if add:
self._add_alt_actors({name : self.alt_grids[name]})
#if name in self.geometry_actors:
self.geometry_actors[name].Modified()
def set_quad_grid(self, name, nodes, elements, color,
line_width=5, opacity=1., representation='wire', add=True):
"""Makes a CQUAD4 grid"""
self.create_alternate_vtk_grid(name, color=color, line_width=line_width,
opacity=opacity, representation=representation)
nnodes = nodes.shape[0]
nquads = elements.shape[0]
if nnodes == 0:
return
if nquads == 0:
return
#print('adding quad_grid %s; nnodes=%s nquads=%s' % (name, nnodes, nquads))
assert isinstance(nodes, np.ndarray), type(nodes)
points = numpy_to_vtk_points(nodes)
grid = self.alt_grids[name]
grid.SetPoints(points)
etype = 9 # vtk.vtkQuad().GetCellType()
create_vtk_cells_of_constant_element_type(grid, elements, etype)
if add:
self._add_alt_actors({name : self.alt_grids[name]})
#if name in self.geometry_actors:
self.geometry_actors[name].Modified()
def _add_alt_actors(self, grids_dict, names_to_ignore=None):
if names_to_ignore is None:
names_to_ignore = ['main']
names = set(list(grids_dict.keys()))
names_old = set(list(self.geometry_actors.keys()))
names_old = names_old - set(names_to_ignore)
#print('names_old1 =', names_old)
#names_to_clear = names_old - names
#self._remove_alt_actors(names_to_clear)
#print('names_old2 =', names_old)
#print('names =', names)
for name in names:
grid = grids_dict[name]
self.tool_actions._add_alt_geometry(grid, name)
def _remove_alt_actors(self, names=None):
if names is None:
names = list(self.geometry_actors.keys())
names.remove('main')
for name in names:
actor = self.geometry_actors[name]
self.rend.RemoveActor(actor)
del actor
@property
def displacement_scale_factor(self):
"""
# dim_max = max_val * scale
# scale = dim_max / max_val
# 0.25 added just cause
scale = self.displacement_scale_factor / tnorm_abs_max
"""
#scale = dim_max / tnorm_abs_max * 0.25
scale = self.settings.dim_max * 0.25
return scale
def set_script_path(self, script_path):
"""Sets the path to the custom script directory"""
self._script_path = script_path
def set_icon_path(self, icon_path):
"""Sets the path to the icon directory where custom icons are found"""
self._icon_path = icon_path
def form(self):
formi = self.res_widget.get_form()
return formi
def get_form(self):
return self._form
def set_form(self, formi):
self._form = formi
data = []
for key in self.case_keys:
assert isinstance(key, int), key
unused_obj, (i, unused_name) = self.result_cases[key]
form_tuple = (i, [])
data.append(form_tuple)
self.res_widget.update_results(formi, self.name)
key = list(self.case_keys)[0]
location = self.get_case_location(key)
method = 'centroid' if location else 'nodal'
data2 = [(method, None, [])]
self.res_widget.update_methods(data2)
def _remove_old_geometry(self, geom_filename):
skip_reading = False
if self.dev:
return skip_reading
self.eid_map = {}
self.nid_map = {}
params_to_delete = (
'case_keys', 'icase', 'isubcase_name_map',
'result_cases', 'eid_map', 'nid_map',
)
if geom_filename is None or geom_filename == '':
skip_reading = True
return skip_reading
else:
self.turn_text_off()
self.grid.Reset()
self.model_data.result_cases = OrderedDict()
self.ncases = 0
for param in params_to_delete:
if hasattr(self, param): # TODO: is this correct???
try:
delattr(self, param)
except AttributeError:
msg = 'cannot delete %r; hasattr=%r' % (param, hasattr(self, param))
self.log.warning(msg)
skip_reading = False
#self.scalar_bar_actor.VisibilityOff()
self.scalar_bar_actor.Modified()
return skip_reading
#---------------------------------------------------------------------------
def _create_load_file_dialog(self, qt_wildcard, title, default_filename=None):
wildcard_level, fname = self.load_actions.create_load_file_dialog(
qt_wildcard, title, default_filename=default_filename)
return wildcard_level, fname
@start_stop_performance_mode
def on_run_script(self, python_file=False):
"""pulldown for running a python script"""
is_passed = False
if python_file in [None, False]:
title = 'Choose a Python Script to Run'
wildcard = "Python (*.py)"
infile_name = self._create_load_file_dialog(
wildcard, title, self._default_python_file)[1]
if not infile_name:
return is_passed # user clicked cancel
#python_file = os.path.join(script_path, infile_name)
python_file = os.path.join(infile_name)
if not os.path.exists(python_file):
msg = 'python_file = %r does not exist' % python_file
self.log_error(msg)
return is_passed
with open(python_file, 'r') as python_file_obj:
txt = python_file_obj.read()
is_passed = self._execute_python_code(txt, show_msg=False)
if not is_passed:
return is_passed
self._default_python_file = python_file
self.log_command('self.on_run_script(%r)' % python_file)
print('self.on_run_script(%r)' % python_file)
return is_passed
def _execute_python_code(self, txt, show_msg=True):
"""executes python code"""
is_passed = False
if len(txt) == 0:
return is_passed
if show_msg:
self.log_command(txt)
try:
exec(txt)
except TypeError as error:
self.log_error('\n' + ''.join(traceback.format_stack()))
#traceback.print_exc(file=self.log_error)
self.log_error(str(error))
self.log_error(str(txt))
self.log_error(str(type(txt)))
return is_passed
except Exception as error:
#self.log_error(traceback.print_stack(f))
self.log_error('\n' + ''.join(traceback.format_stack()))
#traceback.print_exc(file=self.log_error)
self.log_error(str(error))
self.log_error(str(txt))
return is_passed
is_passed = True
return is_passed
#---------------------------------------------------------------------------
def reset_labels(self, reset_minus1=True):
"""
Wipe all labels and regenerate the key slots based on the case keys.
This is used when changing the model.
"""
self._remove_labels()
reset_minus1 = True
# new geometry
if reset_minus1:
self.model_data.label_actors = {-1 : []}
else:
for idi in self.label_actors:
if idi == -1:
continue
self.label_actors[idi] = []
self.label_ids = {}
#self.case_keys = [
#(1, 'ElementID', 1, 'centroid', '%.0f'),
#(1, 'Region', 1, 'centroid', '%.0f')
#]
for icase in self.case_keys:
#result_name = self.get_result_name(icase)
self.label_actors[icase] = []
self.label_ids[icase] = set()
#print(self.label_actors)
#print(self.label_ids)
def _remove_labels(self):
"""
Remove all labels from the current result case.
This happens when the user explictly selects the clear label button.
"""
if len(self.label_actors) == 0:
self.log.warning('No actors to remove')
return
# existing geometry
for icase, actors in self.label_actors.items():
if icase == -1:
continue
for actor in actors:
self.rend.RemoveActor(actor)
del actor
self.label_actors[icase] = []
self.label_ids[icase] = set()
def clear_labels(self):
"""This clears out all labels from all result cases."""
if len(self.label_actors) == 0:
self.log.warning('No actors to clear')
return
# existing geometry
icase = self.icase
if icase not in self.label_actors:
self.log.warning('No actors to clear')
return
actors = self.label_actors[icase]
remove_actors_from_gui(self, actors, render=True)
self.label_actors[icase] = []
self.label_ids[icase] = set()
def resize_labels(self, case_keys=None, show_msg=True):
"""
This resizes labels for all result cases.
TODO: not done...
"""
if case_keys is None:
names = 'None) # None -> all'
case_keys = sorted(self.label_actors.keys())
else:
mid = '%s,' * len(case_keys)
names = '[' + mid[:-1] + '])'
count = 0
for icase in case_keys:
actors = self.label_actors[icase]
for actor in actors:
actor.VisibilityOff()
count += 1
if count and show_msg:
self.log_command('resize_labels(%s)' % names)
#---------------------------------------------------------------------------
def on_update_clipping(self, min_clip=None, max_clip=None):
self.clipping_obj.on_update_clipping(min_clip=min_clip, max_clip=max_clip)
#---------------------------------------------------------------------------
def hide_legend(self):
"""hides the legend"""
self.scalar_bar.VisibilityOff()
#self.scalar_bar.is_shown = False
self.legend_obj.hide_legend()
def show_legend(self):
"""shows the legend"""
self.scalar_bar.VisibilityOn()
#self.scalar_bar.is_shown = True
self.legend_obj.show_legend()
def clear_legend(self):
"""clears the legend"""
self._is_fringe = False
self.legend_obj.clear_legend()
def _set_legend_fringe(self, is_fringe):
self._is_fringe = is_fringe
self.legend_obj._set_legend_fringe(is_fringe)
def on_update_legend(self,
title='Title', min_value=0., max_value=1.,
scale=0.0, phase=0.0,
arrow_scale=1.,
data_format='%.0f',
is_low_to_high=True, is_discrete=True, is_horizontal=True,
nlabels=None, labelsize=None, ncolors=None, colormap=None,
is_shown=True, render=True):
"""
Updates the legend/model
Parameters
----------
scale : float
displacemnt scale factor; true scale
TODO: speed up by using existing values to skip update steps
"""
self.legend_obj.on_update_legend(
title=title, min_value=min_value, max_value=max_value,
scale=scale, phase=phase,
arrow_scale=arrow_scale,
data_format=data_format,
is_low_to_high=is_low_to_high, is_discrete=is_discrete, is_horizontal=is_horizontal,
nlabels=nlabels, labelsize=labelsize, ncolors=ncolors, colormap=colormap,
is_shown=is_shown, render=render)
def update_scalar_bar(self, title, min_value, max_value,
data_format,
nlabels=None, labelsize=None,
ncolors=None, colormap=None,
is_shown=True):
"""
Updates the Scalar Bar
Parameters
----------
title : str
the scalar bar title
min_value : float
the blue value
max_value :
the red value
data_format : str
'%g','%f','%i', etc.
nlabels : int (default=None -> auto)
the number of labels
labelsize : int (default=None -> auto)
the label size
ncolors : int (default=None -> auto)
the number of colors
colormap : varies
str : the name
ndarray : (N, 3) float ndarry
red-green-blue array
is_shown : bool
show the scalar bar
"""
if colormap is None:
colormap = self.settings.colormap
#print("update_scalar_bar min=%s max=%s" % (min_value, max_value))
self.scalar_bar.update(title, min_value, max_value, data_format,
nlabels=nlabels, labelsize=labelsize,
ncolors=ncolors, colormap=colormap,
is_low_to_high=self.legend_obj.is_low_to_high,
is_horizontal=self.legend_obj.is_horizontal_scalar_bar,
is_shown=is_shown)
def on_update_scalar_bar(self, title, min_value, max_value, data_format):
self.title = str(title)
self.min_value = float(min_value)
self.max_value = float(max_value)
try:
data_format % 1
except Exception:
msg = ("failed applying the data formatter format=%r and "
"should be of the form: '%i', '%8f', '%.2f', '%e', etc.")
self.log_error(msg)
return
#self.data_format = data_format
self.log_command('on_update_scalar_bar(%r, %r, %r, %r)' % (
title, min_value, max_value, data_format))
#---------------------------------------------------------------------------
def create_coordinate_system(self, coord_id: int, dim_max: float, label: str='',
origin=None, matrix_3x3=None,
coord_type: str='xyz'):
"""
Creates a coordinate system
Parameters
----------
coord_id : int
the coordinate system id
dim_max : float
the max model dimension; 10% of the max will be used for the coord length
label : str
the coord id or other unique label (default is empty to indicate the global frame)
origin : (3, ) ndarray/list/tuple
the origin
matrix_3x3 : (3, 3) ndarray
a standard Nastran-style coordinate system
coord_type : str
a string of 'xyz', 'Rtz', 'Rtp' (xyz, cylindrical, spherical)
that changes the axis names
.. todo:: coord_type is not supported ('xyz' ONLY)
.. todo:: Can only set one coordinate system
"""
self.tool_actions.create_coordinate_system(
coord_id, dim_max, label=label,
origin=origin, matrix_3x3=matrix_3x3,
coord_type=coord_type)
def create_global_axes(self, dim_max: float):
"""creates the global axis"""
cid = 0
self.tool_actions.create_coordinate_system(
cid, dim_max, label='', origin=None, matrix_3x3=None, coord_type='xyz')
def create_corner_axis(self):
"""creates the axes that sits in the corner"""
self.tool_actions.create_corner_axis()
def get_corner_axis_visiblity(self):
"""gets the visibility of the corner axis"""
corner_axis = self.corner_axis
axes_actor = corner_axis.GetOrientationMarker()
is_visible = axes_actor.GetVisibility()
return is_visible
def set_corner_axis_visiblity(self, is_visible, render=True):
"""sets the visibility of the corner axis"""
corner_axis = self.corner_axis
axes_actor = corner_axis.GetOrientationMarker()
axes_actor.SetVisibility(is_visible)
if render:
self.Render()
def update_axes_length(self, dim_max):
"""
sets the driving dimension for:
- picking?
- coordinate systems
- label size
"""
self.settings.dim_max = dim_max
dim = self.settings.dim_max * self.settings.coord_scale
self.on_set_axes_length(dim)
def on_set_axes_length(self, dim=None):
"""
scale coordinate system based on model length
"""
if dim is None:
dim = self.settings.dim_max * self.settings.coord_scale
for axes in self.axes.values():
axes.SetTotalLength(dim, dim, dim)
#---------------------------------------------------------------------------
@property
def window_title(self):
return self.getWindowTitle()
@window_title.setter
def window_title(self, msg):
#msg2 = "%s - " % self.base_window_title
#msg2 += msg
self.setWindowTitle(msg)
def build_fmts(self, fmt_order: List[str], stop_on_failure: bool=False):
"""populates the formats that will be supported"""
fmts = []
#assert 'h5nastran' in fmt_order
for fmt in fmt_order:
geom_results_funcs = 'get_%s_wildcard_geometry_results_functions' % fmt
if fmt in self.format_class_map:
cls = self.format_class_map[fmt](self)
data = getattr(cls, geom_results_funcs)()
elif hasattr(self, geom_results_funcs):
data = getattr(self, geom_results_funcs)()
else:
msg = 'get_%s_wildcard_geometry_results_functions does not exist' % fmt
if stop_on_failure:
raise RuntimeError(msg)
if not IS_OFFICIAL_RELEASE:
if self.log is None:
print('***', msg)
else:
self.log_error(msg)
_add_fmt(fmts, fmt, geom_results_funcs, data)
if len(fmts) == 0:
RuntimeError('No formats...expected=%s' % fmt_order)
self.fmts = fmts
#print("fmts =", fmts)
self.supported_formats = [fmt[0] for fmt in fmts]
if not IS_TESTING: # pragma: no cover
print('supported_formats = %s' % self.supported_formats)
#assert 'h5nastran' in self.supported_formats, self.supported_formats
if len(fmts) == 0:
print('supported_formats = %s' % self.supported_formats)
raise RuntimeError('no modules were loaded...')
@property
def model(self):
return self.models[self.name]
@model.setter
def model(self, model):
self.models[self.name] = model
def _reset_model(self, name: str):
"""resets the grids; sets up alt_grids"""
if hasattr(self, 'main_grids') and name not in self.main_grids:
grid = vtk.vtkUnstructuredGrid()
grid_mapper = vtk.vtkDataSetMapper()
grid_mapper.SetInputData(grid)
geom_actor = vtk.vtkLODActor()
geom_actor.DragableOff()
geom_actor.SetMapper(grid_mapper)
self.rend.AddActor(geom_actor)
self.name = 'main'
self.models = {}
self.grid = grid
self.grid_mapper = grid_mapper
self.geom_actor = geom_actor
self.grid.Modified()
# link the current "main" to the scalar bar
scalar_range = self.grid_selected.GetScalarRange()
self.grid_mapper.ScalarVisibilityOn()
self.grid_mapper.SetScalarRange(scalar_range)
self.grid_mapper.SetLookupTable(self.color_function)
self.edge_actor = vtk.vtkLODActor()
self.edge_actor.DragableOff()
self.edge_mapper = vtk.vtkPolyDataMapper()
# create the edges
self.get_edges()
else:
self.grid.Reset()
self.grid.Modified()
# reset alt grids
alt_names = self.alt_grids.keys()
for alt_name in alt_names:
self.alt_grids[alt_name].Reset()
self.alt_grids[alt_name].Modified()
#---------------------------------------------------------------------------
@start_stop_performance_mode
def on_load_geometry(self, infile_name=None, geometry_format=None, name='main',
plot=True, raise_error=False):
"""
Loads a baseline geometry
Parameters
----------
infile_name : str; default=None -> popup
path to the filename
geometry_format : str; default=None
the geometry format for programmatic loading
name : str; default='main'
the name of the actor; don't use this
plot : bool; default=True
Should the baseline geometry have results created and plotted/rendered?
If you're calling the on_load_results method immediately after, set it to False
raise_error : bool; default=True
stop the code if True
"""
self.load_actions.on_load_geometry(
infile_name=infile_name, geometry_format=geometry_format,
name=name, plot=plot, raise_error=raise_error)
@start_stop_performance_mode
def on_load_results(self, out_filename=None):
"""
Loads a results file. Must have called on_load_geometry first.
Parameters
----------
out_filename : str / None
the path to the results file
"""
self.load_actions.on_load_results(out_filename=out_filename)
@start_stop_performance_mode
def on_load_custom_results(self, out_filename=None, restype=None, stop_on_failure: bool=False):
"""will be a more generalized results reader"""
self.load_actions.on_load_custom_results(
out_filename=out_filename, restype=restype, stop_on_failure=stop_on_failure)
@start_stop_performance_mode
def load_patran_nod(self, nod_filename):
"""reads a Patran formatted *.nod file"""
self.load_actions.load_patran_nod(nod_filename)
@start_stop_performance_mode
def load_batch_inputs(self, inputs):
print('load_batch_inputs', inputs)
geom_script = inputs['geomscript']
if geom_script is not None:
self.on_run_script(geom_script)
formats = inputs['format']
if isinstance(formats, str):
formats = [formats]
if not formats:
return
input_filenames = inputs['input']
results_filename = inputs['output']
plot = True
if results_filename:
plot = False
#print('input_filename =', input_filename)
#print(formats)
#print(input_filenames)
assert len(formats) == len(input_filenames)
if input_filenames is not None:
for form, input_filename in zip(formats, input_filenames):
form = form.lower()
if not os.path.exists(input_filename):
msg = 'input filename: %s does not exist\n%s' % (
input_filename, print_bad_path(input_filename))
self.log.error(msg)
if self.html_logging:
print(msg)
return
for results_filenamei in results_filename:
#print('results_filenamei =', results_filenamei)
if results_filenamei is not None:
if not os.path.exists(results_filenamei):
msg = '%s does not exist\n%s' % (
results_filenamei, print_bad_path(results_filenamei))
self.log.error(msg)
if self.html_logging:
print(msg)
return
#unused_is_geom_results = input_filename == results_filename and len(input_filenames) == 1
unused_is_geom_results = False
is_failed = False
print('input_filenames =', input_filenames)
for i, input_filename in enumerate(input_filenames):
if i == 0:
name = 'main'
else:
name = input_filename
self.name = name
#form = inputs['format'].lower()
#if is_geom_results:
# is_failed = self.on_load_geometry_and_results(
# infile_name=input_filename, name=name, geometry_format=form,
# plot=plot, raise_error=True)
#else:
is_failed = self.on_load_geometry(
infile_name=input_filename, name=name, geometry_format=form,
plot=plot, raise_error=True)
self.name = 'main'
#print('keys =', self.nid_maps.keys())
if is_failed:
return
if results_filename: # and not is_geom_results
self.on_load_results(results_filename)
post_script = inputs['postscript']
if post_script is not None:
self.on_run_script(post_script)
self.on_reset_camera()
#self.log.debug('debug')
#self.log.info('info')
#self.log.warning('warning')
#self.log.error('error')
#self.log_debug('debug2')
#self.log_info('info2')
#self.log_warning('warning2')
#self.log_command('command2')
#self.log_error('error2')
self.vtk_interactor.Modified()
#---------------------------------------------------------------------------
def on_increase_font_size(self):
"""used by the hidden_tools for Ctrl +"""
self.on_set_font_size(self.settings.font_size + 1)
def on_decrease_font_size(self):
"""used by the hidden_tools for Ctrl -"""
self.on_set_font_size(self.settings.font_size - 1)
def on_set_font_size(self, font_size: int, show_command: bool=True):
"""changes the font size"""
is_failed = True
if not isinstance(font_size, int):
self.log_error('font_size=%r must be an integer; type=%s' % (
font_size, type(font_size)))
return is_failed
if font_size < 6:
font_size = 6
if self.settings.font_size == font_size:
return False
self.settings.font_size = font_size
font = QtGui.QFont()
font.setPointSize(self.settings.font_size)
self.setFont(font)
if isinstance(self, QMainWindow):
#self.toolbar.setFont(font)
self.menu_file.setFont(font)
self.menu_view.setFont(font)
self.menu_window.setFont(font)
self.menu_help.setFont(font)
self.legend_obj.set_font_size(font_size)
self.camera_obj.set_font_size(font_size)
self.highlight_obj.set_font_size(font_size)
self.mark_obj.set_font_size(font_size)
self.clipping_obj.set_font_size(font_size)
if self._modify_groups_window_shown:
self._modify_groups_window.set_font_size(font_size)
self.preferences_obj.set_font_size(font_size)
if hasattr(self, 'cutting_plane_obj'):
self.cutting_plane_obj.set_font_size(font_size)
if hasattr(self, 'shear_moment_torque_obj'):
self.shear_moment_torque_obj.set_font_size(font_size)
self.edit_geometry_properties_obj.set_font_size(font_size)
#self.menu_scripts.setFont(font)
self.log_command('settings.on_set_font_size(%s)' % font_size)
return False
def make_cutting_plane(self, data):
model_name = data['model_name']
unused_model = self.models[model_name]
cid_p1, p1 = data['p1']
cid_p2, p2 = data['p2']
unused_method, cid_zaxis, zaxis = data['zaxis']
unused_xyz1 = self.model.coords[cid_p1].transform_node_to_global(p1)
unused_xyz2 = self.model.coords[cid_p2].transform_node_to_global(p2)
unused_zaxis_xyz = self.model.coords[cid_zaxis].transform_node_to_global(zaxis)
#---------------------------------------------------------------------------
def get_result_by_xyz_cell_id(self, node_xyz, cell_id):
"""won't handle multiple cell_ids/node_xyz"""
out = self.mark_actions.get_result_by_xyz_cell_id(node_xyz, cell_id)
if out is None:
print('attrs.get_result_by_xyz_cell_id bug')
result_name, result_values, node_id, xyz = out
return result_name, result_values, node_id, xyz
def get_result_by_cell_id(self, cell_id, world_position, icase=None):
"""should handle multiple cell_ids"""
res_name, result_values, xyz = self.mark_actions.get_result_by_cell_id(
cell_id, world_position, icase=icase)
return res_name, result_values, xyz
def mark_elements(self, eids,
stop_on_failure: bool=False, show_command: bool=True):
"""mark the elements by the ElementID"""
icase_result = 1 # ElementID
icase_to_apply = self.icase
self.mark_elements_by_different_case(
eids, icase_result, icase_to_apply,
stop_on_failure=stop_on_failure, show_command=False)
self.log_command(f'mark_elements(eids={eids})')
def mark_elements_by_case(self, eids,
stop_on_failure: bool=False, show_command: bool=True):
"""mark the elements by the current case"""
icase_result = self.icase
icase_to_apply = self.icase
self.mark_elements_by_different_case(
eids, icase_result, icase_to_apply,
stop_on_failure=stop_on_failure, show_command=False)
self.log_command(f'mark_elements_by_case(eids={eids})')
def mark_elements_by_different_case(self, eids, icase_result: int, icase_to_apply: int,
stop_on_failure: bool=False, show_command: bool=False):
"""
Marks a series of elements with custom text labels
Parameters
----------
eids : int, List[int]
the elements to apply a message to
icase_result : int
the case to draw the result from
icase_to_apply : int
the key in label_actors to slot the result into
TODO: fix the following
correct : applies to the icase_to_apply
incorrect : applies to the icase_result
Examples
--------
.. code-block::
eids = [16563, 16564, 8916703, 16499, 16500, 8916699,
16565, 16566, 8916706, 16502, 16503, 8916701]
icase_result = 22
icase_to_apply = 25
self.mark_elements_by_different_case(eids, icase_result, icase_to_apply)
"""
self.mark_actions.mark_elements_by_different_case(
eids, icase_result, icase_to_apply,
stop_on_failure=stop_on_failure, show_command=show_command)
#def mark_max_elements(self, neids, show_command: bool=True):
#"""mark the elements by the top/btm x elements"""
#def mark_min_elements(self, neids, show_command: bool=True):
#"""mark the elements by the top/btm x elements"""
def mark_nodes(self, nids, icase, text):
"""
Marks a series of nodes with custom text labels
Parameters
----------
nids : int, List[int]
the nodes to apply a message to
icase : int
the key in label_actors to slot the result into
text : str, List[str]
the text to display
0 corresponds to the NodeID result
self.mark_nodes(1, 0, 'max')
self.mark_nodes(6, 0, 'min')
self.mark_nodes([1, 6], 0, 'max')
self.mark_nodes([1, 6], 0, ['max', 'min'])
"""
self.mark_actions.mark_nodes(nids, icase, text)
def create_annotation(self, text, x, y, z):
"""
Creates the actual annotation
Parameters
----------
text : str
the text to display
the annotation object
x, y, z : float
the position of the label
Returns
-------
annotation : vtkBillboardTextActor3D
the annotation object
"""
annotation = self.mark_actions.create_annotation(text, x, y, z)
return annotation
#---------------------------------------------------------------------------
def on_update_geometry_properties_window(self, geometry_properties):
"""updates the EditGeometryProperties window"""
self.edit_geometry_properties_obj.on_update_geometry_properties_window(
geometry_properties)
@start_stop_performance_mode
def on_update_geometry_properties(self, out_data, name=None, write_log=True):
"""
Applies the changed properties to the different actors if
something changed.
Note that some of the values are limited. This prevents
points/lines from being shrunk to 0 and also the actor being
actually "hidden" at the same time. This prevents confusion
when you try to show the actor and it's not visible.
"""
self.edit_geometry_properties_obj.on_update_geometry_properties(
out_data, name=name, write_log=write_log)
@start_stop_performance_mode
def on_update_geometry_properties_override_dialog(self, geometry_properties):
"""
Update the goemetry properties and overwite the options in the
edit geometry properties dialog if it is open.
Parameters
-----------
geometry_properties : dict {str : CoordProperties or AltGeometry}
Dictionary from name to properties object. Only the names included in
``geometry_properties`` are modified.
"""
self.edit_geometry_properties_obj.on_update_geometry_properties_override_dialog(
geometry_properties)
#---------------------------------------------------------------------------
def update_text_actors(self, subcase_id, subtitle,
imin, min_value,
imax, max_value, label, location):
"""
Updates the text actors in the lower left
Max: 1242.3
Min: 0.
Subcase: 1 Subtitle:
Label: SUBCASE 1; Static
"""
self.tool_actions.update_text_actors(subcase_id, subtitle,
imin, min_value,
imax, max_value, label, location)
def create_text(self, position, label, text_size=18):
"""creates the lower left text actors"""
self.tool_actions.create_text(position, label, text_size=text_size)
def turn_text_off(self):
"""turns all the text actors off"""
self.tool_actions.turn_text_off()
def turn_text_on(self):
"""turns all the text actors on"""
self.tool_actions.turn_text_on()
@start_stop_performance_mode
def export_case_data(self, icases=None):
"""exports CSVs of the requested cases"""
self.tool_actions.export_case_data(icases=icases)
@start_stop_performance_mode
def on_load_user_geom(self, csv_filename=None, name=None, color=None):
"""
Loads a User Geometry CSV File of the form:
# id x y z
GRID, 1, 0.2, 0.3, 0.3
GRID, 2, 1.2, 0.3, 0.3
GRID, 3, 2.2, 0.3, 0.3
GRID, 4, 5.2, 0.3, 0.3
grid, 5, 5.2, 1.3, 2.3 # case insensitive
# ID, nodes
BAR, 1, 1, 2
TRI, 2, 1, 2, 3
# this is a comment
QUAD, 3, 1, 5, 3, 4
QUAD, 4, 1, 2, 3, 4 # this is after a blank line
#RESULT,4,CENTROID,AREA(%f),PROPERTY_ID(%i)
# in element id sorted order: value1, value2
#1.0, 2.0 # bar
#1.0, 2.0 # tri
#1.0, 2.0 # quad
#1.0, 2.0 # quad
#RESULT,NODE,NODEX(%f),NODEY(%f),NODEZ(%f)
# same difference
#RESULT,VECTOR3,GEOM,DXYZ
# 3xN
Parameters
----------
csv_filename : str (default=None -> load a dialog)
the path to the user geometry CSV file
name : str (default=None -> extract from fname)
the name for the user points
color : (float, float, float)
RGB values as 0.0 <= rgb <= 1.0
"""
self.tool_actions.on_load_user_geom(csv_filename=csv_filename, name=name, color=color)
@start_stop_performance_mode
def on_load_csv_points(self, csv_filename=None, name=None, color=None):
"""
Loads a User Points CSV File of the form:
1.0, 2.0, 3.0
1.5, 2.5, 3.5
Parameters
-----------
csv_filename : str (default=None -> load a dialog)
the path to the user points CSV file
name : str (default=None -> extract from fname)
the name for the user points
color : (float, float, float)
RGB values as 0.0 <= rgb <= 1.0
"""
is_failed = self.tool_actions.on_load_csv_points(
csv_filename=csv_filename, name=name, color=color)
return is_failed
#---------------------------------------------------------------------------
def create_groups_by_visible_result(self, nlimit=50):
"""
Creates group by the active result
This should really only be called for integer results < 50-ish.
"""
return self.group_actions.create_groups_by_visible_result(nlimit=nlimit)
def create_groups_by_property_id(self):
"""
Creates a group for each Property ID.
As this is somewhat Nastran specific, create_groups_by_visible_result exists as well.
"""
return self.group_actions.create_groups_by_property_id()
#---------------------------------------------------------------------------
def update_camera(self, code):
self.view_actions.update_camera(code)
def _update_camera(self, camera=None):
self.view_actions._update_camera(camera)
def on_pan_left(self, event):
self.view_actions.on_pan_left(event)
def on_pan_right(self, event):
self.view_actions.on_pan_right(event)
def on_pan_up(self, event):
self.view_actions.on_pan_up(event)
def on_pan_down(self, event):
self.view_actions.on_pan_down(event)
#------------------------------
def rotate(self, rotate_deg, render=True):
"""rotates the camera by a specified amount"""
self.view_actions.rotate(rotate_deg, render=render)
def on_rotate_clockwise(self):
"""rotate clockwise"""
self.view_actions.rotate(15.0)
def on_rotate_cclockwise(self):
"""rotate counter clockwise"""
self.view_actions.rotate(-15.0)
#------------------------------
def zoom(self, value):
return self.view_actions.zoom(value)
def on_increase_magnification(self):
"""zoom in"""
self.view_actions.on_increase_magnification()
def on_decrease_magnification(self):
"""zoom out"""
self.view_actions.on_decrease_magnification()
def set_focal_point(self, focal_point):
"""
Parameters
----------
focal_point : (3, ) float ndarray
The focal point
[ 188.25109863 -7. -32.07858658]
"""
self.view_actions.set_focal_point(focal_point)
def on_surface(self):
"""sets the main/toggle actors to surface"""
self.view_actions.on_surface()
def on_wireframe(self):
"""sets the main/toggle actors to wirefreme"""
self.view_actions.on_wireframe()
def on_take_screenshot(self, fname=None, magnify=None, show_msg=True):
"""
Take a screenshot of a current view and save as a file
Parameters
----------
fname : str; default=None
None : pop open a window
str : bypass the popup window
magnify : int; default=None
None : use self.settings.magnify
int : resolution increase factor
show_msg : bool; default=True
log the command
"""
self.tool_actions.on_take_screenshot(fname=fname, magnify=magnify, show_msg=show_msg)
def get_camera_data(self):
"""see ``set_camera_data`` for arguments"""
return self.camera_obj.get_camera_data()
def on_set_camera(self, name, show_log=True):
"""see ``set_camera_data`` for arguments"""
self.camera_obj.on_set_camera(name, show_log=show_log)
def on_set_camera_data(self, camera_data, show_log=True):
"""
Sets the current camera
Parameters
----------
camera_data : Dict[key] : value
defines the camera
position : (float, float, float)
where am I is xyz space
focal_point : (float, float, float)
where am I looking
view_angle : float
field of view (angle); perspective only?
view_up : (float, float, float)
up on the screen vector
clip_range : (float, float)
start/end distance from camera where clipping starts
parallel_scale : float
???
parallel_projection : bool (0/1)
flag?
TODO: not used
distance : float
distance to the camera
i_vector = focal_point - position
j'_vector = view_up
use:
i x j' -> k
k x i -> j
or it's like k'
"""
self.camera_obj.on_set_camera_data(camera_data, show_log=show_log)
@property
def IS_GUI_TESTING(self):
return 'test_' in sys.argv[0]
@property
def iren(self):
return self.vtk_interactor
@property
def render_window(self):
return self.vtk_interactor.GetRenderWindow()
#------------------------------
def get_xyz_cid0(self, model_name=None):
xyz = self.xyz_cid0
return xyz
def get_element_ids(self, model_name=None, ids=None):
"""wrapper around element_ids"""
if ids is None:
return self.element_ids
return self.element_ids[ids]
def get_node_ids(self, model_name=None, ids=None):
"""wrapper around node_ids"""
if ids is None:
return self.node_ids
return self.node_ids[ids]
def get_reverse_node_ids(self, model_name=None, ids=None):
"""wrapper around node_ids"""
if ids is None:
return np.array([])
# include all but the indicies sepecified
include_ids = np.ones(self.node_ids.shape, dtype=self.node_ids.dtype)
include_ids[ids] = 0
return self.node_ids[include_ids]
#------------------------------
# these are overwritten
def log_debug(self, msg):
"""turns logs into prints to aide debugging"""
if self.debug:
print('DEBUG: ', msg)
def log_info(self, msg):
"""turns logs into prints to aide debugging"""
if self.debug:
print('INFO: ', msg)
def log_error(self, msg):
"""turns logs into prints to aide debugging"""
#if self.debug:
print('ERROR: ', msg)
def log_warning(self, msg):
"""turns logs into prints to aide debugging"""
if self.debug:
print('WARNING: ', msg)
def log_command(self, msg):
"""turns logs into prints to aide debugging"""
if self.debug:
print('COMMAND: ', msg)
def Render(self): # pragma: no cover
pass
class ModelData:
def __init__(self, parent: GuiAttributes):
self.geometry_properties = OrderedDict()
self.groups = {}
self.group_active = 'main'
self.follower_nodes = {}
self.follower_functions = {}
self.label_actors = {-1 : []}
self.label_ids = {}
self.label_scale = 1.0 # in percent
self.result_cases = {}
def __repr__(self):
msg = ('ModelData:\n'
f'result_cases.keys() = {self.result_cases.keys()}')
return msg
def _add_fmt(fmts: List[str], fmt: str, geom_results_funcs, data):
"""
Adds a format
Parameters
----------
fmts : List[formats]
format : List[fmt, macro_name, geo_fmt, geo_func, res_fmt, res_func]
macro_name : ???
???
geo_fmt : ???
???
geo_func : ???
???
res_fmt : ???
???
res_func : ???
???
fmt : str
nastran, cart3d, etc.
geom_results_funcs : str
'get_nastran_wildcard_geometry_results_functions'
'get_cart3d_wildcard_geometry_results_functions'
data : function
the outputs from ``get_nastran_wildcard_geometry_results_functions()``
so 1 or more formats (macro_name, geo_fmt, geo_func, res_fmt, res_func)
"""
msg = 'macro_name, geo_fmt, geo_func, res_fmt, res_func = data\n'
msg += 'data = %s'
if isinstance(data, tuple):
assert len(data) == 5, msg % str(data)
macro_name, geo_fmt, geo_func, res_fmt, res_func = data
fmts.append((fmt, macro_name, geo_fmt, geo_func, res_fmt, res_func))
elif isinstance(data, list):
for datai in data:
assert len(datai) == 5, msg % str(datai)
macro_name, geo_fmt, geo_func, res_fmt, res_func = datai
fmts.append((fmt, macro_name, geo_fmt, geo_func, res_fmt, res_func))
else:
raise TypeError(data)
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,634
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/dev/bdf_vectorized2/test/test_bdf.py
|
"""
``test_bdfv`` runs multiple checks on a BDF in order to make sure that:
- no data is lost on IO
- card field types are correct (e.g. node_ids are integers)
- various card methods (e.g. Area) work correctly
As such, ``test_bdfv`` is very useful for debugging models.
"""
import os
import sys
import traceback
import warnings
from typing import List
import numpy as np
warnings.simplefilter('always')
np.seterr(all='raise')
from pyNastran.utils.numpy_utils import integer_types
from pyNastran.utils import check_path
from pyNastran.utils.arg_handling import argparse_to_dict, swap_key, update_message
from pyNastran.bdf.errors import (
#CrossReferenceError,
CardParseSyntaxError, DuplicateIDsError, MissingDeckSections)
from pyNastran.dev.bdf_vectorized2.bdf_vectorized import BDF, read_bdf
from pyNastran.bdf.test.test_bdf import (divide, get_matrix_stats,
compare_card_content, get_test_bdf_usage_args_examples)
from pyNastran.bdf.mesh_utils.skin_solid_elements import write_skin_solid_faces
import pyNastran.bdf.test
TEST_PATH = pyNastran.bdf.test.__path__[0]
class DisabledCardError(RuntimeError):
"""lets bdf_test.py flag cards as auto-crashing and then skipping the deck (e.g., CGEN)"""
pass
def run_lots_of_files(filenames, folder='', debug=False, xref=True, check=True,
punch=False, cid=None, nastran='', encoding=None,
size=None, is_double=None, post=None, sum_load=True, dev=True,
crash_cards=None, pickle_obj=True):
"""
Runs multiple BDFs
Parameters
----------
folder : str
the folder where the bdf_filename is
filenames : List[str]
the bdf files to analyze
debug : bool, optional
run with debug logging (default=False)
xref : bool / str / List[bool/str], optional
True : cross reference the model
False : don't cross reference the model
'safe' : do safe cross referencing
check : bool / List[bool], optional
validate cards for things like mass, area, etc. (default=True)
punch : bool / List[bool], optional
this is a PUNCH file (no executive/case control decks; default=False)
cid : int / None, optional
convert the model grids to an alternate coordinate system (default=None; no conversion)
size : int / List[int], optional
The field width of the model (8/16)
is_double : bool / List[bool], optional
Is this a double precision model?
True : size = 16
False : size = {8, 16}
nastran : str, optional
the path to nastran (default=''; no analysis)
post : int / List[int], optional
the PARAM,POST,value to run
sum_load : bool; default=True
should the loads be summed
dev : bool; default=True
True : crashes if an Exception occurs
False : doesn't crash; useful for running many tests
crash_cards : List[str, str, ...]
list of cards that are invalid and automatically crash the run
pickle_obj : bool; default=True
tests pickling
Examples
--------
All control lists must be the same length.
You can run xref=True and xref=False with::
run_lots_of_files(filenames, xref=[True, False]) # valid
"""
filenames = list(set(filenames))
filenames.sort()
if size is None:
sizes = [8]
elif isinstance(size, integer_types):
sizes = [size]
else:
sizes = size
if is_double is None:
is_doubles = [8]
elif isinstance(is_double, bool):
is_doubles = [is_double]
else:
is_doubles = is_double
if post is None:
posts = [-1]
elif isinstance(post, integer_types):
posts = [post]
else:
posts = post
size_doubles_post = []
print('posts=%s' % posts)
for size, is_double, post in zip(sizes, is_doubles, posts):
size_doubles_post.append((size, is_double, post))
#debug = True
filenames2 = []
diff_cards = []
for filename in filenames:
if (filename.endswith(('.bdf', '.dat', '.nas')) and
'pyNastran_crash' not in filename and
'skin_file' not in filename):
filenames2.append(filename)
failed_files = []
npass = 1
nfailed = 1
for filename in filenames2:
abs_filename = os.path.abspath(os.path.join(folder, filename))
if folder != '':
print("filename = %s" % abs_filename)
is_passed = False
try:
for size, is_double, post in size_doubles_post:
fem1, fem2, diff_cards2 = run_bdf(folder, filename, debug=debug,
xref=xref, check=check, punch=punch,
cid=cid, encoding=encoding,
is_folder=True, dynamic_vars={},
nastran=nastran, size=size, is_double=is_double,
nerrors=0,
post=post, sum_load=sum_load, dev=dev,
crash_cards=crash_cards,
run_extract_bodies=False, pickle_obj=pickle_obj)
del fem1
del fem2
diff_cards += diff_cards
is_passed = True
except KeyboardInterrupt:
sys.exit('KeyboardInterrupt...sys.exit()')
except DisabledCardError:
#if dev:
#pass
raise
#except IOError:
#pass
#except RuntimeError: # only temporarily uncomment this when running lots of tests
#pass
#except AttributeError: # only temporarily uncomment this when running lots of tests
#pass
#except SyntaxError: # only temporarily uncomment this when running lots of tests
#pass
except SystemExit:
sys.exit('sys.exit...')
except Exception:
traceback.print_exc(file=sys.stdout)
#raise
print('-' * 80)
if is_passed:
sys.stderr.write('%i %s' % (npass, abs_filename))
npass += 1
else:
sys.stderr.write('*%s ' % nfailed + abs_filename)
nfailed += 1
failed_files.append(abs_filename)
sys.stderr.write('\n')
print('*' * 80)
try:
print("diff_cards1 = %s" % list(set(diff_cards)))
except TypeError:
print("diff_cards2 = %s" % diff_cards)
return failed_files
def run_bdf(folder, bdf_filename, debug=False, xref=True, check=True, punch=False,
mesh_form='combined', is_folder=False, print_stats=False,
encoding=None, sum_load=True, size=8, is_double=False,
stop=False, nastran='', post=-1, dynamic_vars=None,
quiet=False, dumplines=False, dictsort=False, run_extract_bodies=False,
nerrors=0, dev=False, crash_cards=None, safe_xref=True, pickle_obj=False, safe=False,
stop_on_failure=True):
"""
Runs a single BDF
Parameters
----------
folder : str
the folder where the bdf_filename is
bdf_filename : str
the bdf file to analyze
debug : bool, optional
run with debug logging (default=False)
xref : bool / str, optional
True : cross reference the model
False : don't cross reference the model
'safe' : do safe cross referencing
check : bool, optional
validate cards for things like mass, area, etc.
punch : bool, optional
this is a PUNCH file (no executive/case control decks)
cid : int / None, optional
convert the model grids to an alternate coordinate system (default=None; no conversion)
mesh_form : str, optional, {'combined', 'separate'}
'combined' : interspersed=True
'separate' : interspersed=False
is_folder : bool, optional
attach the test path and the folder to the bdf_filename
print_stats : bool, optional
get a nicely formatted message of all the cards in the model
sum_load : bool; default=True
Sum the static loads (doesn't work for frequency-based loads)
size : int, optional, {8, 16}
The field width of the model
is_double : bool, optional
Is this a double precision model?
True : size = 16
False : size = {8, 16}
stop : bool; default=False
stop reading the first BDF
nastran : str, optional
the path to nastran (default=''; no analysis)
post : int, optional
the PARAM,POST,value to run
dynamic vars : dict[str]=int / float / str / None
support OpenMDAO syntax %myvar; max variable length=7
quiet : bool; default=False
suppresses print messages
dumplines: bool; default=False
writes pyNastran_dump.bdf
dictsort : bool; default=False
writes pyNastran_dict.bdf
run_extract_bodies : bool; default=False
isolate the fem bodies; typically 1 body; code is still buggy
dev : bool; default=False
True : crashes if an Exception occurs
False : doesn't crash; useful for running many tests
pickle_obj : bool; default=True
tests pickling
"""
if not quiet:
print('debug = %s' % debug)
if dynamic_vars is None:
dynamic_vars = {}
if crash_cards is None:
crash_cards = []
# TODO: why do we need this?
bdf_model = str(bdf_filename)
if not quiet:
print("bdf_model = %s" % bdf_model)
if is_folder:
bdf_model = os.path.join(TEST_PATH, folder, bdf_filename)
model, ext = os.path.splitext(bdf_model)
out_model = '%s.test_bdfv%s' % (model, ext)
fem1, fem2, diff_cards = run_and_compare_fems(
bdf_model, out_model, debug=debug, xref=xref, check=check,
punch=punch, mesh_form=mesh_form,
print_stats=print_stats, encoding=encoding,
sum_load=sum_load, size=size, is_double=is_double,
stop=stop, nastran=nastran, post=post,
dynamic_vars=dynamic_vars,
quiet=quiet, dumplines=dumplines, dictsort=dictsort,
nerrors=nerrors, dev=dev, crash_cards=crash_cards,
safe_xref=safe_xref,
run_extract_bodies=run_extract_bodies, pickle_obj=pickle_obj,
stop_on_failure=stop_on_failure,
)
return fem1, fem2, diff_cards
def run_and_compare_fems(
bdf_model, out_model, debug=False, xref=True, check=True,
punch=False, mesh_form='combined',
print_stats=False, encoding=None,
sum_load=True, size=8, is_double=False,
stop=False, nastran='', post=-1, dynamic_vars=None,
quiet=False, dumplines=False, dictsort=False,
nerrors=0, dev=False, crash_cards=None,
safe_xref=True, run_extract_bodies=False, pickle_obj=False,
stop_on_failure=True,
):
"""runs two fem models and compares them"""
assert os.path.exists(bdf_model), '%r doesnt exist' % bdf_model
fem1 = BDF(debug=debug, log=None)
fem1.set_error_storage(nparse_errors=nerrors, stop_on_parsing_error=True,
nxref_errors=nerrors, stop_on_xref_error=True)
if dynamic_vars:
fem1.set_dynamic_syntax(dynamic_vars)
if not quiet:
fem1.log.info('starting fem1')
sys.stdout.flush()
fem2 = None
diff_cards = []
try:
#nastran_cmd = 'nastran scr=yes bat=no old=no news=no '
nastran_cmd = ''
#try:
fem1 = run_fem1(fem1, bdf_model, out_model, mesh_form, xref, punch, sum_load,
size, is_double,
run_extract_bodies=run_extract_bodies,
encoding=encoding, crash_cards=crash_cards, safe_xref=safe_xref,
pickle_obj=pickle_obj, stop=stop)
if stop:
if not quiet:
print('card_count:')
print('-----------')
for card_name, card_count in sorted(fem1.card_count.items()):
print('key=%-8s value=%s' % (card_name, card_count))
return fem1, None, None
fem2 = run_fem2(bdf_model, out_model, xref, punch, sum_load, size, is_double, mesh_form,
encoding=encoding, debug=debug, quiet=quiet,
stop_on_failure=stop_on_failure)
diff_cards = compare(fem1, fem2, xref=xref, check=check,
print_stats=print_stats, quiet=quiet)
test_get_cards_by_card_types(fem2)
#fem2.update_model_by_desvars(xref)
#except Exception:
#return 1, 2, 3
run_nastran(bdf_model, nastran_cmd, post, size, is_double)
except KeyboardInterrupt:
sys.exit('KeyboardInterrupt...sys.exit()')
except IOError: # only temporarily uncomment this when running lots of tests
if not dev:
raise
except CardParseSyntaxError: # only temporarily uncomment this when running lots of tests
if not dev:
raise
print('failed test because CardParseSyntaxError...ignoring')
except MissingDeckSections:
if not dev:
raise
print('failed test because MissingDeckSections...ignoring')
except DuplicateIDsError as e:
# only temporarily uncomment this when running lots of tests
if 'GRIDG' in fem1.card_count or 'CGEN' in fem1.card_count or 'SPCG' in fem1.card_count:
print('failed test because mesh adaption (GRIDG,CGEN,SPCG)...ignoring')
print(e)
elif not dev:
raise
else:
print('failed test because DuplicateIDsError...ignoring')
except DisabledCardError as e:
if not dev:
raise
except RuntimeError as e:
# only temporarily uncomment this when running lots of tests
if not dev:
raise
if 'GRIDG' in fem1.card_count or 'CGEN' in fem1.card_count or 'SPCG' in fem1.card_count:
print('failed test because mesh adaption (GRIDG,CGEN,SPCG)...ignoring')
print(e)
else:
raise
#except AttributeError: # only temporarily uncomment this when running lots of tests
#pass
except SyntaxError as e:
# only temporarily uncomment this when running lots of tests
if not dev:
raise
if 'GRIDG' in fem1.card_count or 'CGEN' in fem1.card_count or 'SPCG' in fem1.card_count:
print('failed test because mesh adaption (GRIDG,CGEN,SPCG)...ignoring')
print(e)
else:
raise
except KeyError as e: # only temporarily uncomment this when running lots of tests
if not dev:
raise
if 'GRIDG' in fem1.card_count or 'CGEN' in fem1.card_count or 'SPCG' in fem1.card_count:
print('failed test because mesh adaption (GRIDG,CGEN,SPCG)...ignoring')
print(e)
else:
raise
#except AssertionError: # only temporarily uncomment this when running lots of tests
#pass
except SystemExit:
sys.exit('sys.exit...')
except Exception:
#exc_type, exc_value, exc_traceback = sys.exc_info()
#print "\n"
traceback.print_exc(file=sys.stdout)
#print msg
print("-" * 80)
raise
if not quiet:
print("-" * 80)
return (fem1, fem2, diff_cards)
def run_nastran(bdf_model, nastran, post=-1, size=8, is_double=False):
"""
Verifies that a valid bdf was written by running nastran and parsing
the OP2. Many cards do not support double precision and since there
is no list, a test is necessary.
"""
if nastran:
from pyNastran.op2.op2 import read_op2
dirname = os.path.dirname(bdf_model)
basename = os.path.basename(bdf_model).split('.')[0]
f04_model = os.path.join(dirname, 'out_%s.f04' % basename)
f06_model = os.path.join(dirname, 'out_%s.f06' % basename)
op2_model = os.path.join(dirname, 'out_%s.f06' % basename)
log_model = os.path.join(dirname, 'out_%s.log' % basename)
xdb_model = os.path.join(dirname, 'out_%s.xdb' % basename)
pch_model = os.path.join(dirname, 'out_%s.pch' % basename)
asm_model = os.path.join(dirname, 'out_%s.asm' % basename)
master_model = os.path.join(dirname, 'out_%s.master' % basename)
#op2_model = os.path.join(dirname, 'out_%s.op2' % basename)
#cwd = os.getcwd()
cwd = dirname
bdf_model2 = os.path.join(cwd, 'out_%s.bdf' % basename)
op2_model2 = os.path.join(cwd, 'out_%s.op2' % basename)
#f06_model2 = os.path.join(cwd, 'out_%s.f06' % basename)
print(bdf_model2)
#if os.path.exists(bdf_model2):
#os.remove(bdf_model2)
# make sure we're writing an OP2
bdf = read_bdf(bdf_model, debug=False)
if 'POST' in bdf.params:
param_post = bdf.params['POST']
#print('post = %s' % post)
param_post.update_values(value1=post)
#print('post = %s' % post)
else:
card = ['PARAM', 'POST', post]
bdf.add_card(card, 'PARAM', is_list=True)
bdf.write_bdf(bdf_model2, size=size, is_double=is_double)
#os.rename(outModel, outModel2)
if not os.path.exists(f06_model):
os.system(nastran + bdf_model2)
for fnamei in [f04_model, log_model, xdb_model, pch_model, asm_model, master_model]:
if os.path.exists(fnamei):
os.remove(fnamei)
if not os.path.exists(op2_model):
raise RuntimeError('%s failed' % op2_model)
op2 = read_op2(op2_model2)
print(op2.get_op2_stats())
def run_fem1(fem1, bdf_model, out_model, mesh_form, xref, punch, sum_load, size, is_double,
run_extract_bodies=False, encoding=None, crash_cards=None, safe_xref=True,
pickle_obj=False, stop=False):
"""
Reads/writes the BDF
Parameters
----------
fem1 : BDF()
The BDF object
bdf_model : str
The root path of the bdf filename
out_model : str
The path to the output bdf
mesh_form : str {combined, separate}
'combined' : interspersed=True
'separate' : interspersed=False
xref : bool
The xref mode
punch : bool
punch flag
sum_load : bool
static load sum flag
size : int, {8, 16}
size flag
is_double : bool
double flag
safe_xref : bool; default=False
???
run_extract_bodies : bool; default=False
isolate the fem bodies; typically 1 body; code is still buggy
encoding : str; default=None
the file encoding
crash_cards : ???
???
"""
if crash_cards is None:
crash_cards = []
check_path(bdf_model, 'bdf_model')
try:
if '.pch' in bdf_model:
fem1.read_bdf(bdf_model, xref=False, punch=True, encoding=encoding)
else:
fem1.read_bdf(bdf_model, xref=False, punch=punch, encoding=encoding)
for card in crash_cards:
if card in fem1.card_count:
raise DisabledCardError('card=%r has been disabled' % card)
#fem1.geom_check(geom_check=True, xref=False)
if not stop and not xref:
skin_filename = 'skin_file.bdf'
write_skin_solid_faces(fem1, skin_filename, size=16, is_double=False)
if os.path.exists(skin_filename):
read_bdf(skin_filename, log=fem1.log)
os.remove(skin_filename)
if xref:
#if run_extract_bodies:
#extract_bodies(fem1)
# 1. testing that these methods word without xref
#fem1._get_rigid()
#get_dependent_nid_to_components(fem1)
#fem1._get_maps(eids=None, map_names=None,
#consider_0d=True, consider_0d_rigid=True,
#consider_1d=True, consider_2d=True, consider_3d=True)
#get_dependent_nid_to_components(fem1)
# 1. testing that these methods work with xref
fem1._get_rigid()
#common_node_ids = list(fem1.nodes.keys())
#fem1.get_rigid_elements_with_node_ids(common_node_ids)
#for spc_id in set(list(fem1.spcadds.keys()) + list(fem1.spcs.keys())):
#fem1.get_reduced_spcs(spc_id)
#for mpc_id in set(list(fem1.mpcadds.keys()) + list(fem1.mpcs.keys())):
#fem1.get_reduced_mpcs(mpc_id)
#get_dependent_nid_to_components(fem1)
#fem1._get_maps(eids=None, map_names=None,
#consider_0d=True, consider_0d_rigid=True,
#consider_1d=True, consider_2d=True, consider_3d=True)
#get_dependent_nid_to_components(fem1)
#fem1.get_pid_to_node_ids_and_elements_array(pids=None, etypes=None, idtype='int32',
#msg=' which is required by test_bdf')
#fem1.get_property_id_to_element_ids_map(msg=' which is required by test_bdf')
#fem1.get_material_id_to_property_ids_map(msg=' which is required by test_bdf')
#fem1.get_element_ids_list_with_pids(pids=None)
#fem1.get_element_ids_dict_with_pids(pids=None, stop_if_no_eids=False,
#msg=' which is required by test_bdf')
#fem1.get_node_id_to_element_ids_map()
#fem1.get_node_id_to_elements_map()
read_bdf(fem1.bdf_filename, encoding=encoding,
debug=fem1.debug, log=fem1.log)
fem1 = remake_model(bdf_model, fem1, pickle_obj)
#fem1.geom_check(geom_check=True, xref=True)
except Exception:
print("failed reading %r" % bdf_model)
raise
#out_model = bdf_model + '_out'
#if cid is not None and xref:
#fem1.resolve_grids(cid=cid)
if mesh_form is None:
pass
elif mesh_form == 'combined':
fem1.write_bdf(out_model, interspersed=True, size=size, is_double=is_double)
elif mesh_form == 'separate':
fem1.write_bdf(out_model, interspersed=False, size=size, is_double=is_double)
else:
msg = "mesh_form=%r; allowed_mesh_forms=['combined','separate']" % mesh_form
raise NotImplementedError(msg)
#fem1.write_as_ctria3(out_model)
fem1._get_maps()
#remove_unused_materials(fem1)
#remove_unused(fem1)
#units_to = ['m', 'kg', 's']
#units_from = ['m', 'kg', 's']
#convert(fem1, units_to, units=units_from)
if xref:
check_for_cd_frame(fem1)
#try:
#fem1.get_area_breakdown()
#fem1.get_volume_breakdown()
#except Exception:
#if len(fem1.masses) > 0:
#fem1.log.warning('no elements with area/volume found, but elements with mass were')
#else:
#fem1.log.warning('no elements found')
#if len(fem1.elements) + len(fem1.masses) > 0:
#try:
#fem1.get_mass_breakdown()
#except RuntimeError:
#fem1.log.warning('no elements with mass found')
return fem1
def remake_model(bdf_model, fem1, pickle_obj):
"""reloads the model if we're testing pickling"""
remake = pickle_obj
if remake:
#log = fem1.log
model_name = os.path.splitext(bdf_model)[0]
obj_model = '%s.test_bdfv.obj' % (model_name)
#out_model_8 = '%s.test_bdfv.bdf' % (model_name)
#out_model_16 = '%s.test_bdfv.bdf' % (model_name)
fem1.save(obj_model)
fem1.save(obj_model, unxref=False)
#fem1.write_bdf(out_model_8)
fem1.get_bdf_stats()
fem1 = BDF(debug=fem1.debug, log=fem1.log)
fem1.load(obj_model)
#fem1.write_bdf(out_model_8)
#fem1.log = log
os.remove(obj_model)
fem1.get_bdf_stats()
return fem1
def check_for_cd_frame(fem1):
"""
A cylindrical/spherical CD frame will cause problems with the
grid point force transformation
"""
if any([card_name in fem1.card_count for card_name in ['GRID', 'SPOINT', 'EPOINT', 'RINGAX']]):
icd_transform, icp_transform, xyz_cp, nid_cp_cd = fem1.get_displacement_index_xyz_cp_cd(
fdtype='float64', idtype='int32', sort_ids=True)
cds = np.unique(nid_cp_cd[:, 2])
cd_coords = []
for cd in cds:
if cd == -1:
continue
coord = fem1.coords[cd]
# coordRs work in op2 extraction
if coord.type not in ['CORD2R', 'CORD1R']:
cd_coords.append(cd)
if cd_coords:
msg = (
'GRID-CD coords=%s can cause a problem in the OP2 results processing; '
'be careful' % cd_coords
)
fem1.log.warning(msg)
def run_fem2(bdf_model, out_model, xref, punch,
sum_load, size, is_double, mesh_form,
encoding=None, debug=False, quiet=False,
stop_on_failure=True):
"""
Reads/writes the BDF to verify nothing has been lost
Parameters
----------
bdf_model : str
the filename to run
out_model
xref : bool
xrefs
punch : bool
punches
sum_load : bool
sums static load
size : int
???
is_double : bool
???
mesh_form : str {combined, separate}
'combined' : interspersed=True
'separate' : interspersed=False
debug : bool
debugs
quiet : bool
supress prints
"""
assert os.path.exists(bdf_model), bdf_model
assert os.path.exists(out_model), out_model
fem2 = BDF(debug=debug, log=None)
if not quiet:
fem2.log.info('starting fem2')
sys.stdout.flush()
try:
fem2.read_bdf(out_model, xref=False, punch=punch, encoding=encoding)
except Exception:
print("failed reading %r" % out_model)
raise
out_model_2 = bdf_model + '_out2'
if xref and sum_load:
if 'POST' in fem2.params:
value = fem2.params['POST'].values[0]
if value >= 0:
msg = 'PARAM,POST,%i is not supported by the OP2 reader' % value
fem2.log.error(msg)
else:
msg = 'PARAM,POST,0 is not supported by the OP2 reader'
fem2.log.error(msg)
p0 = np.array([0., 0., 0.])
subcase_keys = fem2.case_control_deck.get_subcase_list()
subcases = fem2.subcases
sol_200_map = fem2.case_control_deck.sol_200_map
sol_base = fem2.sol
is_restart = False
for line in fem2.system_command_lines:
if line.strip().upper().startswith('RESTART'):
is_restart = True
#if not is_restart:
#validate_case_control(fem2, p0, sol_base, subcase_keys, subcases, sol_200_map,
#stop_on_failure=stop_on_failure)
if mesh_form is not None:
fem2.write_bdf(out_model_2, interspersed=False, size=size, is_double=is_double)
os.remove(out_model_2)
#fem2.write_as_ctria3(out_model_2)
return fem2
def _assert_has_spc(subcase, fem):
"""
SPCs may be defined on SPC/SPC1 cards or may be defined on
the GRID PS field
"""
if 'SPC' not in subcase:
has_ps = False
for nid, node in fem.nodes.items():
if node.ps:
has_ps = True
break
assert subcase.has_parameter('SPC', 'STATSUB') or has_ps, subcase
def require_cards(card_names, log, soltype, sol, subcase):
nerrors = 0
for card_name in card_names:
if card_name not in subcase:
log.error('A %s card is required for %s - SOL %i\n%s' % (
card_name, soltype, sol, subcase))
nerrors += 1
return nerrors
def test_get_cards_by_card_types(model: BDF) -> None:
"""
Verifies the ``model.get_cards_by_card_types`` method works
"""
# setup to remove hackish cards
card_types = list(model.card_count.keys())
removed_cards = []
for card_type in ['ENDDATA', 'INCLUDE', 'JUNK']:
if card_type in model.card_count:
removed_cards.append(card_type)
for removed_card in removed_cards:
card_types.remove(removed_card)
removed_cards = []
for card_type in card_types:
if card_type not in model.cards_to_read:
try:
removed_cards.append(card_type)
#print('removed %s' % card_type)
except ValueError:
msg = 'card_type=%s cant be removed' % card_type
raise ValueError(msg)
for removed_card in removed_cards:
card_types.remove(removed_card)
# we now have a list of card types we would like to extract
# we'll get the associated cards
card_dict = model.get_cards_by_card_types(card_types,
reset_type_to_slot_map=False)
for card_type, cards in card_dict.items():
for card in cards:
msg = 'this should never crash here...card_type=%s card.type=%s' % (
card_type, card.type)
if card_type != card.type:
raise RuntimeError(msg)
def compare_card_count(fem1: BDF, fem2: BDF,
print_stats: bool=False, quiet: bool=False) -> List[str]:
"""Checks that no cards from fem1 are lost when we write fem2"""
cards1 = fem1.card_count
cards2 = fem2.card_count
for key in cards1:
if key != key.upper():
raise RuntimeError('Proper capitalization wasnt determined')
if print_stats:
print(fem1.get_bdf_stats())
print(fem1.loads)
else:
fem1.get_bdf_stats()
return compute_ints(cards1, cards2, fem1, quiet=quiet)
def compute_ints(cards1, cards2, fem1, quiet=True):
"""
computes the difference / ratio / inverse-ratio between
fem1 and fem2 to verify the number of card are the same:
Examples
--------
name fem1 fem2 diff ratio 1/ratio
==== ==== ==== ==== ====== =======
GRID 1 1 1 1. 1.
*SPOINT 10 1 9 10. 0.1
The * indicates a change, which may or may not be a problem.
"""
card_keys1 = set(cards1.keys())
card_keys2 = set(cards2.keys())
all_keys = card_keys1.union(card_keys2)
diff_keys1 = list(all_keys.difference(card_keys1))
diff_keys2 = list(all_keys.difference(card_keys2))
list_keys1 = list(card_keys1)
list_keys2 = list(card_keys2)
if diff_keys1 or diff_keys2:
print(' diff_keys1=%s diff_keys2=%s' % (diff_keys1, diff_keys2))
for key in sorted(all_keys):
msg = ''
if key in list_keys1:
value1 = cards1[key]
else:
value1 = 0
if key in list_keys2:
value2 = cards2[key]
else:
value2 = 0
diff = abs(value1 - value2)
star = ' '
if diff and key not in ['INCLUDE']:
star = '*'
if key not in fem1.cards_to_read:
star = '-'
factor1 = divide(value1, value2)
factor2 = divide(value2, value1)
factor_msg = ''
if not quiet or not star or factor1 != factor2:
if factor1 != factor2:
factor_msg = 'diff=%s factor1=%g factor2=%g' % (
diff, factor1, factor2)
msg += ' %skey=%-7s value1=%-7s value2=%-7s' % (
star, key, value1, value2) + factor_msg
if msg:
msg = msg.rstrip()
print(msg)
#return list_keys1 + list_keys2
return diff_keys1 + diff_keys2
def compute(cards1, cards2, quiet=False):
"""
Computes the difference between two dictionaries to data is the same
"""
card_keys1 = set(cards1.keys())
card_keys2 = set(cards2.keys())
all_keys = card_keys1.union(card_keys2)
diff_keys1 = list(all_keys.difference(card_keys1))
diff_keys2 = list(all_keys.difference(card_keys2))
list_keys1 = list(card_keys1)
list_keys2 = list(card_keys2)
msg = ''
if diff_keys1 or diff_keys2:
msg = 'diff_keys1=%s diff_keys2=%s' % (diff_keys1, diff_keys2)
for key in sorted(all_keys):
msg = ''
if key in list_keys1:
value1 = cards1[key]
else:
value2 = 0
if key in list_keys2:
value2 = cards2[key]
else:
value2 = 0
if key == 'INCLUDE':
if not quiet:
msg += ' key=%-7s value1=%-7s value2=%-7s' % (
key, value1, value2)
else:
msg += ' *key=%-7s value1=%-7s value2=%-7s' % (
key, value1, value2)
msg = msg.rstrip()
if msg:
print(msg)
def compare(fem1, fem2, xref=True, check=True, print_stats=True, quiet=False):
"""compares two fem objects"""
diff_cards = compare_card_count(fem1, fem2, print_stats=print_stats, quiet=quiet)
if xref and check:
#get_element_stats(fem1, fem2, quiet=quiet)
get_matrix_stats(fem1, fem2)
#compare_card_content(fem1, fem2)
return diff_cards
def test_bdfv_argparse(argv=None):
"""test_bdf argument parser"""
if argv is None:
argv = sys.argv[1:] # same as argparse
#print('get_inputs; argv was None -> %s' % argv)
else:
# drop the pyNastranGUI; same as argparse
argv = argv[1:]
encoding = sys.getdefaultencoding()
import argparse
parent_parser = argparse.ArgumentParser()
parent_parser.add_argument('BDF_FILENAME', help='path to BDF/DAT/NAS file',
type=str)
parent_parser.add_argument('-v', '--version', action='version',
version=pyNastran.__version__)
#nargs : str/int
# * : 0 or more
# + : one or more
# ? : optional
# int : int values
# --------------------------------------------------------------------------
# Options
xref_safe_group = parent_parser.add_mutually_exclusive_group()
xref_safe_group.add_argument(
'-x', '--xref', action='store_false',
help='disables cross-referencing and checks of the BDF (default=True -> on)')
xref_safe_group.add_argument(
'--safe', action='store_true',
help='Use safe cross-reference (default=False)')
parent_parser.add_argument(
'-p', '--punch', action='store_true',
help='disables reading the executive and case control decks in the BDF\n'
'(default=False -> reads entire deck)')
stop_check_group = parent_parser.add_mutually_exclusive_group()
stop_check_group.add_argument(
'-c', '--check', action='store_true',
help='disables BDF checks. Checks run the methods on \n'
' every element/property to test them. May fails if a \n'
' card is fully not supported (default=False)')
stop_check_group.add_argument('--stop', action='store_true', # dev
help='Stop after first read/write (default=False)\n')
width_group = parent_parser.add_mutually_exclusive_group()
width_group.add_argument(
'-l', '--large', action='store_true',
help='writes the BDF in large field, single precision format (default=False)')
width_group.add_argument(
'-d', '--double', action='store_true',
help='writes the BDF in large field, double precision format (default=False)')
parent_parser.add_argument(
'-L', '--loads', action='store_false',
help='Disables forces/moments summation for the different subcases (default=True)')
parent_parser.add_argument('-e', '--nerrors', nargs=1, default=100,
help='Allow for cross-reference errors (default=100)')
parent_parser.add_argument('--encoding', nargs=1, default=encoding,
help='the encoding method (default=%r)\n' % encoding)
parent_parser.add_argument('-q', '--quiet', action='store_true',
help='prints debug messages (default=False)')
# --------------------------------------------------------------------------
#'Developer:\n'
parent_parser.add_argument('--crash', nargs=1, type=str,
help='Crash on specific cards (e.g. CGEN,EGRID)')
parent_parser.add_argument('--dumplines', action='store_true',
help='Writes the BDF exactly as read with the INCLUDEs processed\n'
'(pyNastran_dump.bdf)')
parent_parser.add_argument('--dictsort', action='store_true',
help='Writes the BDF exactly as read with the INCLUDEs processed\n'
'(pyNastran_dict.bdf)')
parent_parser.add_argument('--profile', action='store_true',
help='Profiles the code (default=False)\n')
parent_parser.add_argument('--pickle', action='store_true',
help='Pickles the data objects (default=False)\n')
parent_parser.add_argument('--hdf5', action='store_true',
help='Save/load the BDF in HDF5 format')
usage, args, examples = get_test_bdf_usage_args_examples(encoding)
# --------------------------------------------------------------------------
#argv
#print(argv)
usage, args, examples = get_test_bdf_usage_args_examples(encoding)
usage = usage.replace('test_bdf', 'test_bdfv')
examples = examples.replace('test_bdf', 'test_bdfv')
msg = usage + args + examples
update_message(parent_parser, usage, args, examples)
#try:
#args = parent_parser.parse_args(args=argv)
#except SystemExit:
#fobj = StringIO()
##args = parent_parser.format_usage()
#parent_parser.print_usage(file=fobj)
#args = fobj.getvalue()
#raise
args = parent_parser.parse_args(args=argv)
args2 = argparse_to_dict(args)
optional_args = [
'double', 'large', 'crash', 'quiet', 'profile',
'xref', 'safe', 'check', 'punch', 'loads', 'stop', 'encoding',
'dumplines', 'dictsort', 'nerrors', 'pickle', 'hdf5',
]
for arg in optional_args:
swap_key(args2, arg, '--' + arg)
return args2
def main():
"""The main function for the command line ``test_bdfv`` script."""
data = test_bdfv_argparse()
for key, value in sorted(data.items()):
print("%-12s = %r" % (key.strip('--'), value))
import time
time0 = time.time()
is_double = False
if data['--double']:
size = 16
is_double = True
elif data['--large']:
size = 16
else:
size = 8
crash_cards = []
if data['--crash']:
crash_cards = data['--crash'].split(',')
#print(data)
debug = True
if data['--quiet']:
debug = None
if data['--profile']:
import pstats
import cProfile
prof = cProfile.Profile()
prof.runcall(
run_bdf,
'.',
data['BDF_FILENAME'],
debug=debug,
xref=['--xref'],
check=not(data['--check']),
punch=data['--punch'],
size=size,
is_double=is_double,
sum_load=data['--loads'],
stop=data['--stop'],
quiet=data['--quiet'],
dumplines=data['--dumplines'],
dictsort=data['--dictsort'],
nerrors=data['--nerrors'],
encoding=data['--encoding'],
crash_cards=crash_cards,
run_extract_bodies=False,
pickle_obj=data['--pickle'],
safe_xref=data['--safe'],
print_stats=True,
stop_on_failure=False,
)
prof.dump_stats('bdf.profile')
stats = pstats.Stats("bdf.profile")
stats.sort_stats('tottime') # time in function
#stats.sort_stats('cumtime') # time in function & subfunctions
stats.strip_dirs()
stats.print_stats(40)
#retval = prof.runcall(self.method_actual, *args, **kwargs)
#print(prof.dump_stats(datafn))
#cProfile.runctx(
#code,
#None, # globs
#None,
#'junk.stats',
#1) # sort
#p = pstats.Stats('restats')
#p.strip_dirs().sort_stats(-1).print_stats()
else:
run_bdf(
'.',
data['BDF_FILENAME'],
debug=debug,
xref=data['--xref'],
# xref_safe=data['--xref_safe'],
check=not(data['--check']),
punch=data['--punch'],
size=size,
is_double=is_double,
sum_load=data['--loads'],
stop=data['--stop'],
quiet=data['--quiet'],
dumplines=data['--dumplines'],
dictsort=data['--dictsort'],
nerrors=data['--nerrors'],
encoding=data['--encoding'],
crash_cards=crash_cards,
run_extract_bodies=False,
pickle_obj=data['--pickle'],
safe_xref=data['--safe'],
print_stats=True,
stop_on_failure=False,
)
print("total time: %.2f sec" % (time.time() - time0))
if __name__ == '__main__': # pragma: no cover
main()
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,635
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py
|
# coding: utf-8
#pylint disable=C0103
from itertools import count
import warnings
from typing import Tuple, List
import numpy as np
from pyNastran.utils.numpy_utils import integer_types
from pyNastran.op2.op2_interface.write_utils import to_column_bytes, view_dtype, view_idtype_as_fdtype
from pyNastran.op2.tables.oes_stressStrain.real.oes_objects import (
StressObject, StrainObject, OES_Object)
from pyNastran.op2.result_objects.op2_objects import get_times_dtype
from pyNastran.f06.f06_formatting import write_floats_13e, _eigenvalue_header
from pyNastran.op2.errors import SixtyFourBitError
class RealPlateArray(OES_Object):
def __init__(self, data_code, is_sort1, isubcase, dt):
OES_Object.__init__(self, data_code, isubcase, apply_data_code=False)
#self.code = [self.format_code, self.sort_code, self.s_code]
#self.ntimes = 0 # or frequency/mode
#self.ntotal = 0
self.ielement = 0
self.nelements = 0 # result specific
self.nnodes = None
#if is_sort1:
#pass
#else:
#raise NotImplementedError('SORT2')
@property
def is_real(self) -> bool:
return True
@property
def is_complex(self) -> bool:
return False
@property
def nnodes_per_element(self) -> int:
if self.element_type in [33, 74, 83, 227, 228]:
nnodes_per_element = 1
elif self.element_type == 144:
nnodes_per_element = 5
elif self.element_type == 64: # CQUAD8
nnodes_per_element = 5
elif self.element_type == 82: # CQUADR
nnodes_per_element = 5
elif self.element_type == 70: # CTRIAR
nnodes_per_element = 4
elif self.element_type == 75: # CTRIA6
nnodes_per_element = 4
else:
raise NotImplementedError(f'name={self.element_name!r} type={self.element_type}')
return nnodes_per_element
def _reset_indices(self) -> None:
self.itotal = 0
self.ielement = 0
def get_headers(self):
raise NotImplementedError('%s needs to implement get_headers' % self.__class__.__name__)
def is_bilinear(self):
if self.element_type in [33, 74]: # CQUAD4, CTRIA3
return False
elif self.element_type in [144, 64, 82, 70, 75]: # CQUAD4
return True
else:
raise NotImplementedError(f'name={self.element_name} type={self.element_type}')
def build(self):
"""sizes the vectorized attributes of the RealPlateArray"""
#print("self.ielement = %s" % self.ielement)
#print('ntimes=%s nelements=%s ntotal=%s' % (self.ntimes, self.nelements, self.ntotal))
assert self.ntimes > 0, 'ntimes=%s' % self.ntimes
assert self.nelements > 0, 'nelements=%s' % self.nelements
assert self.ntotal > 0, 'ntotal=%s' % self.ntotal
#nnodes = 2
#ntotal = 99164
# 99164 / 2 = 49582
# nelements = 49582
# nnodes = 49582 * 2 = 99164
#self.names = []
#factor = self.size // 4
nnodes_per_element = self.nnodes_per_element
#print(self.code_information())
#print('nnodes_per_element =', nnodes_per_element)
nlayers_per_element = 2 * nnodes_per_element
#print('nnodes_per_element[%s, %s] = %s' % (
#self.isubcase, self.element_type, nnodes_per_element))
self.nnodes = nnodes_per_element
#self.nelements //= nnodes_per_element
self.nelements //= self.ntimes
#self.ntotal //= factor
self.itime = 0
self.ielement = 0
self.itotal = 0
#self.ntimes = 0
#self.nelements = 0
#print("***name=%s type=%s nnodes_per_element=%s ntimes=%s nelements=%s ntotal=%s" % (
#self.element_name, self.element_type, nnodes_per_element, self.ntimes,
#self.nelements, self.ntotal))
dtype, idtype, fdtype = get_times_dtype(self.nonlinear_factor, self.size, self.analysis_fmt)
if self.is_sort1:
ntimes = self.ntimes
nlayers = self.nelements
else:
# NUMBER OF CQUAD4 ELEMENTS = 956
# NUMBER OF CTRIA3 ELEMENTS = 27
#***nelments=956 nlayers_per_element=2 ntimes=201 nlayers=1912
#***nelments=27 nlayers_per_element=2 ntimes=201
#print(self.ntimes, self.nelements, self.ntotal, self._ntotals)
nelements = self.ntimes # good
ntimes = self._ntotals[0] // nlayers_per_element
nlayers = nelements * nlayers_per_element
#print(f'***nelments={nelements} nlayers_per_element={nlayers_per_element} ntimes={ntimes} -> nlayers={nlayers}')
#nelements = self._ntotals[0] # good
#nlayers += 1
#ntimes = nlayers // nelements
assert nlayers % nelements == 0
#print('***', self.element_name, nlayers)
#assert nelements == 4, self.ntimes
#nelements = 4
#nelements = = self.ntimes // 2
#print(f'ntimes={ntimes} nelements={nelements} nlayers={nlayers}; '
#f'nlayers_per_element={nlayers_per_element}')
#bbb
#assert ntimes == 1, ntimes
#print(self.code_information())
if self.analysis_code == 1:
#ntimes = 1
if ntimes != 1:
# C:\MSC.Software\simcenter_nastran_2019.2\tpl_post1\acc002.op2
warnings.warn(f'ntimes != 1; {self.element_name}-{self.element_type}\n'
f'ntimes={ntimes} _ntotals={self._ntotals} '
f'sort_method={self.sort_method} nlayers_per_element={nlayers_per_element} nlayers={nlayers}')
assert nlayers >= 2, self.code_information()
_times = np.zeros(ntimes, dtype=dtype)
element_node = np.zeros((nlayers, 2), dtype=idtype)
#[fiber_dist, oxx, oyy, txy, angle, majorP, minorP, ovm]
data = np.zeros((ntimes, nlayers, 8), dtype=fdtype)
if self.load_as_h5:
#for key, value in sorted(self.data_code.items()):
#print(key, value)
group = self._get_result_group()
self._times = group.create_dataset('_times', data=_times)
self.element_node = group.create_dataset('element_node', data=element_node)
self.data = group.create_dataset('data', data=data)
else:
self._times = _times
self.element_node = element_node
self.data = data
#print(self.element_node.shape, self.data.shape)
def build_dataframe(self):
"""creates a pandas dataframe"""
import pandas as pd
headers = self.get_headers()
nelements = self.element_node.shape[0] // 2
if self.is_fiber_distance:
fiber_distance = ['Top', 'Bottom'] * nelements
else:
fiber_distance = ['Mean', 'Curvature'] * nelements
fd = np.array(fiber_distance, dtype='unicode')
node = pd.Series(data=self.element_node[:, 1])
node.replace(to_replace=0, value='CEN', inplace=True)
element_node = [
self.element_node[:, 0],
node,
fd,
]
if self.nonlinear_factor not in (None, np.nan):
# Mode 1 2 3
# Freq 1.482246e-10 3.353940e-09 1.482246e-10
# Eigenvalue -8.673617e-19 4.440892e-16 8.673617e-19
# Radians 9.313226e-10 2.107342e-08 9.313226e-10
# ElementID NodeID Location Item
# 8 0 Top fiber_distance -1.250000e-01 -1.250000e-01 -1.250000e-01
# oxx 7.092928e-12 -3.259632e-06 -9.558293e-12
# oyy 3.716007e-12 -2.195630e-06 -5.435632e-12
# txy -7.749725e-14 1.438695e-07 -6.269848e-13
# angle -1.313964e+00 8.243371e+01 -8.154103e+01
# omax 7.094705e-12 -2.176520e-06 -5.342388e-12
# omin 3.714229e-12 -3.278742e-06 -9.651537e-12
# von_mises 6.146461e-12 2.889834e-06 8.374427e-12
# Bottom fiber_distance 1.250000e-01 1.250000e-01 1.250000e-01
# oxx -7.530338e-12 2.134777e-06 1.063986e-11
# oyy -4.434658e-12 -9.347183e-07 6.212209e-12
# txy 2.291380e-12 -5.399188e-07 -4.161393e-12
# angle 6.201962e+01 -9.690845e+00 -3.099370e+01
# omax -3.217317e-12 2.226978e-06 1.313966e-11
# omin -8.747680e-12 -1.026920e-06 3.712415e-12
# von_mises 7.663484e-12 2.881133e-06 1.173255e-11
# 9 0 Top fiber_distance -1.250000e-01 -1.250000e-01 -1.250000e-01
#
#LoadStep 1.0
#ElementID NodeID Location Item
#2001 CEN Top fiber_distance -0.635000
# Bottom oxx 26.197712
#2007 CEN Top oyy 65.378319
# Bottom txy -28.221191
#2008 CEN Top angle -62.383610
#... ...
#2024 CEN Bottom txy -28.961452
#2025 CEN Top angle -21.011902
# Bottom omax -23.810177
#2033 CEN Top omin -110.334686
# Bottom von_mises 100.566292
#
column_names, column_values = self._build_dataframe_transient_header()
names = ['ElementID', 'NodeID', 'Location', 'Item']
data_frame = self._build_pandas_transient_element_node(
column_values, column_names,
headers, element_node, self.data, from_tuples=False, from_array=True,
names=names,
)
else:
# option B - nice!
df1 = pd.DataFrame(element_node).T
df1.columns = ['ElementID', 'NodeID', 'Location']
df2 = pd.DataFrame(self.data[0])
df2.columns = headers
data_frame = df1.join(df2)
data_frame = data_frame.reset_index().set_index(['ElementID', 'NodeID', 'Location'])
self.data_frame = data_frame
def __eq__(self, table): # pragma: no cover
assert self.is_sort1 == table.is_sort1
self._eq_header(table)
if not np.array_equal(self.data, table.data):
msg = 'table_name=%r class_name=%s\n' % (self.table_name, self.__class__.__name__)
msg += '%s\n' % str(self.code_information())
i = 0
for itime in range(self.ntimes):
for ie, element_nodei in enumerate(self.element_node):
(eid, nid) = element_nodei
t1 = self.data[itime, ie, :]
t2 = table.data[itime, ie, :]
(fiber_dist1, oxx1, oyy1, txy1, angle1, major_p1, minor_p1, ovm1) = t1
(fiber_dist2, oxx2, oyy2, txy2, angle2, major_p2, minor_p2, ovm2) = t2
# vm stress can be NaN for some reason...
if not np.array_equal(t1[:-1], t2[:-1]):
msg += '(%s, %s) (%s, %s, %s, %s, %s, %s, %s, %s) (%s, %s, %s, %s, %s, %s, %s, %s)\n' % (
eid, nid,
fiber_dist1, oxx1, oyy1, txy1, angle1, major_p1, minor_p1, ovm1,
fiber_dist2, oxx2, oyy2, txy2, angle2, major_p2, minor_p2, ovm2)
i += 1
if i > 10:
print(msg)
raise ValueError(msg)
#print(msg)
if i > 0:
raise ValueError(msg)
return True
def add_new_eid_sort1(self, dt, eid, node_id,
fiber_dist1, oxx1, oyy1, txy1, angle1, major_principal1, minor_principal1, ovm1,
fiber_dist2, oxx2, oyy2, txy2, angle2, major_principal2, minor_principal2, ovm2):
assert isinstance(eid, integer_types), eid
assert isinstance(node_id, integer_types), node_id
self._times[self.itime] = dt
#assert self.itotal == 0, oxx
self.element_node[self.itotal, :] = [eid, node_id]
self.element_node[self.itotal+1, :] = [eid, node_id]
self.data[self.itime, self.itotal, :] = [fiber_dist1, oxx1, oyy1, txy1, angle1,
major_principal1, minor_principal1, ovm1]
self.data[self.itime, self.itotal+1, :] = [fiber_dist2, oxx2, oyy2, txy2, angle2,
major_principal2, minor_principal2, ovm2]
self.itotal += 2
self.ielement += 2
def add_sort1(self, dt, eid, node_id,
fiber_dist1, oxx1, oyy1, txy1, angle1, major_principal1, minor_principal1, ovm1,
fiber_dist2, oxx2, oyy2, txy2, angle2, major_principal2, minor_principal2, ovm2):
assert eid is not None, eid
assert isinstance(eid, integer_types) and eid > 0, 'dt=%s eid=%s' % (dt, eid)
assert isinstance(node_id, integer_types), node_id
self.element_node[self.itotal, :] = [eid, node_id]
self.element_node[self.itotal+1, :] = [eid, node_id]
self.data[self.itime, self.itotal, :] = [fiber_dist1, oxx1, oyy1, txy1, angle1,
major_principal1, minor_principal1, ovm1]
self.data[self.itime, self.itotal+1, :] = [fiber_dist2, oxx2, oyy2, txy2, angle2,
major_principal2, minor_principal2, ovm2]
self.itotal += 2
#self.ielement += 2
def _get_sort2_itime_ilower_iupper_from_itotal(self, dt, eid: int, nid: int,
debug=False) -> Tuple[int, int, int]:
ntimes = self.data.shape[0]
# the monotonic element index (no duplicates)
ielement = self.itime
# itime = self.itime
itime = self.ielement
#ie_upper = self.ielement
#ie_lower = self.ielement + 1
#itotal = self.itotal
#inid = 0
nnodes = self.nnodes_per_element
#itime = self.ielement // nnodes
#ilayer = self.itotal % 2 == 0 # 0/1
#ielement_inid = self.itotal // ntimes
inid = self.itotal // (2 * ntimes)
if self.element_type in [33, 74, 227, 228]:
# CQUAD4-33, CTRIA3-74, CTRIAR-227, CQUADR-228
assert inid == 0, (self.element_name, self.element_type, inid)
#print('inid', inid)
elif self.element_type in [64, 144]: # CQUAD8, CQUAD4-144
assert inid in (0, 1, 2, 3, 4), (self.element_name, self.element_type, inid)
elif self.element_type == 75: # CQUAD8
assert inid in (0, 1, 2, 3), (self.element_name, self.element_type, inid)
else:
raise NotImplementedError((self.element_name, self.element_type, inid))
#inid = self.ielement % nnodes
#itotal = self.itotal
#if itime >= self.data.shape[0]:# or itotal >= self.element_node.shape[0]:
ielement = self.itime
#if self.element_name == 'CQUAD8':
#print(f'*SORT2 {self.element_name}: itime={itime} ielement={ielement} ilayer={ilayer} inid={inid} itotal={itotal} dt={dt} eid={eid} nid={nid}')
#print(f'*SORT2 {self.element_name}: itime={itime} ielement={ielement} ilayer=False inid={inid} itotal={itotal+1} dt={dt} eid={eid} nid={nid}')
#print(self.data.shape)
#print(self.element_node.shape)
#else:
#aaa
#print(itime, inid, ielement)
#ibase = 2 * ielement # ctria3/cquad4-33
if debug:
print(f'ielement={ielement} nnodes={nnodes} inid={inid}')
ibase = 2 * (ielement * nnodes + inid)
#ibase = ielement_inid
ie_upper = ibase
ie_lower = ibase + 1
#if self.element_name == 'CTRIAR': # and self.table_name == 'OESATO2':
#debug = False
#if self.element_name == 'CTRIAR': # and self.table_name in ['OSTRRMS1', 'OSTRRMS2']:
#debug = True
#if debug:
#print(f'SORT2 {self.table_name} {self.element_name}: itime={itime} ie_upper={ie_upper} ielement={self.itime} inid={inid} nid={nid} itotal={itotal} dt={dt} eid={eid} nid={nid}')
#print(f'SORT2 {self.table_name} {self.element_name}: itime={itime} ie_lower={ie_lower} ielement={self.itime} inid={inid} nid={nid} itotal={itotal+1} dt={dt} eid={eid} nid={nid}')
return itime, ie_upper, ie_lower
def add_new_eid_sort2(self, dt, eid, node_id,
fiber_dist1, oxx1, oyy1, txy1, angle1, major_principal1, minor_principal1, ovm1,
fiber_dist2, oxx2, oyy2, txy2, angle2, major_principal2, minor_principal2, ovm2):
assert isinstance(eid, integer_types), eid
assert isinstance(node_id, integer_types), node_id
#itime, itotal = self._get_sort2_itime_ielement_from_itotal()
itime, ie_upper, ie_lower = self._get_sort2_itime_ilower_iupper_from_itotal(dt, eid, node_id)
try:
#print(f'SORT2: itime={itime} -> dt={dt}; ie_upper={ie_upper} -> eid={eid} ({self.element_name})')
self._times[itime] = dt
#assert self.itotal == 0, oxx
#if itime == 0:
self.element_node[ie_upper, :] = [eid, node_id] # 0 is center
self.element_node[ie_lower, :] = [eid, node_id] # 0 is center
except Exception:
itime, ie_upper, ie_lower = self._get_sort2_itime_ilower_iupper_from_itotal(
dt, eid, node_id, debug=True)
print(f'SORT2: itime={itime} -> dt={dt}; ie_upper={ie_upper} -> eid={eid} ({self.element_name})')
raise
#print(self.element_node)
#self.data[self.itime, ie_upper, :] = [fiber_dist1, oxx1, oyy1, txy1, angle1,
#major_principal1, minor_principal1, ovm1]
#self.data[self.itime, ie_lower, :] = [fiber_dist2, oxx2, oyy2, txy2, angle2,
#major_principal2, minor_principal2, ovm2]
self.itotal += 2
#self.ielement += 1
def add_sort2(self, dt, eid, node_id,
fiber_dist1, oxx1, oyy1, txy1, angle1, major_principal1, minor_principal1, ovm1,
fiber_dist2, oxx2, oyy2, txy2, angle2, major_principal2, minor_principal2, ovm2):
assert eid is not None, eid
assert isinstance(eid, integer_types) and eid > 0, 'dt=%s eid=%s' % (dt, eid)
assert isinstance(node_id, integer_types), node_id
itime, ie_upper, ie_lower = self._get_sort2_itime_ilower_iupper_from_itotal(dt, eid, node_id)
#print(f'SORT2b: itime={itime} -> dt={dt}; ie_upper={ie_upper} -> eid={eid} nid={node_id}')
#print(self.element_node.shape)
#if itime == 0:
self.element_node[ie_upper, :] = [eid, node_id]
self.element_node[ie_lower, :] = [eid, node_id]
#print(self.element_node.tolist())
self.data[itime, ie_upper, :] = [fiber_dist1, oxx1, oyy1, txy1, angle1,
major_principal1, minor_principal1, ovm1]
self.data[itime, ie_lower, :] = [fiber_dist2, oxx2, oyy2, txy2, angle2,
major_principal2, minor_principal2, ovm2]
self.itotal += 2
#self.ielement += 2
def get_stats(self, short: bool=False) -> List[str]:
if not self.is_built:
return [
'<%s>\n' % self.__class__.__name__,
f' ntimes: {self.ntimes:d}\n',
f' ntotal: {self.ntotal:d}\n',
]
nelements = self.nelements
ntimes = self.ntimes
nnodes = self.nnodes
ntotal = self.ntotal
nlayers = 2
nelements = self.ntotal // self.nnodes // 2
msg = []
if self.nonlinear_factor not in (None, np.nan): # transient
msgi = ' type=%s ntimes=%i nelements=%i nnodes_per_element=%i nlayers=%i ntotal=%i\n' % (
self.__class__.__name__, ntimes, nelements, nnodes, nlayers, ntotal)
ntimes_word = 'ntimes'
else:
msgi = ' type=%s nelements=%i nnodes_per_element=%i nlayers=%i ntotal=%i\n' % (
self.__class__.__name__, nelements, nnodes, nlayers, ntotal)
ntimes_word = '1'
msg.append(msgi)
headers = self.get_headers()
n = len(headers)
msg.append(' data: [%s, ntotal, %i] where %i=[%s]\n' % (ntimes_word, n, n,
str(', '.join(headers))))
msg.append(f' element_node.shape = {self.element_node.shape}\n')
msg.append(f' data.shape={self.data.shape}\n')
msg.append(f' element type: {self.element_name}-{self.element_type}\n')
msg.append(f' s_code: {self.s_code}\n')
msg += self.get_data_code()
return msg
def get_element_index(self, eids):
# elements are always sorted; nodes are not
itot = np.searchsorted(eids, self.element_node[:, 0]) #[0]
return itot
def eid_to_element_node_index(self, eids):
ind = np.ravel([np.searchsorted(self.element_node[:, 0] == eid) for eid in eids])
#ind = searchsorted(eids, self.element)
#ind = ind.reshape(ind.size)
#ind.sort()
return ind
def write_f06(self, f06_file, header=None, page_stamp='PAGE %s',
page_num: int=1, is_mag_phase: bool=False, is_sort1: bool=True):
if header is None:
header = []
msg, nnodes, cen = _get_plate_msg(self)
# write the f06
ntimes = self.data.shape[0]
eids = self.element_node[:, 0]
nids = self.element_node[:, 1]
#cen_word = 'CEN/%i' % nnodes
cen_word = cen
for itime in range(ntimes):
dt = self._times[itime]
header = _eigenvalue_header(self, header, itime, ntimes, dt)
f06_file.write(''.join(header + msg))
#print("self.data.shape=%s itime=%s ieids=%s" % (str(self.data.shape), itime, str(ieids)))
#[fiber_dist, oxx, oyy, txy, angle, majorP, minorP, ovm]
fiber_dist = self.data[itime, :, 0]
oxx = self.data[itime, :, 1]
oyy = self.data[itime, :, 2]
txy = self.data[itime, :, 3]
angle = self.data[itime, :, 4]
major_principal = self.data[itime, :, 5]
minor_principal = self.data[itime, :, 6]
ovm = self.data[itime, :, 7]
is_linear = self.element_type in {33, 74, 227, 228, 83}
is_bilinear = self.element_type in {64, 70, 75, 82, 144}
for (i, eid, nid, fdi, oxxi, oyyi, txyi, anglei, major, minor, ovmi) in zip(
count(), eids, nids, fiber_dist, oxx, oyy, txy, angle, major_principal, minor_principal, ovm):
[fdi, oxxi, oyyi, txyi, major, minor, ovmi] = write_floats_13e(
[fdi, oxxi, oyyi, txyi, major, minor, ovmi])
ilayer = i % 2
# tria3
if is_linear: # CQUAD4, CTRIA3, CTRIAR linear, CQUADR linear
if ilayer == 0:
f06_file.write('0 %6i %-13s %-13s %-13s %-13s %8.4f %-13s %-13s %s\n' % (
eid, fdi, oxxi, oyyi, txyi, anglei, major, minor, ovmi))
else:
f06_file.write(' %6s %-13s %-13s %-13s %-13s %8.4f %-13s %-13s %s\n' % (
'', fdi, oxxi, oyyi, txyi, anglei, major, minor, ovmi))
elif is_bilinear: # CQUAD8, CTRIAR, CTRIA6, CQUADR, CQUAD4
# bilinear
if nid == 0 and ilayer == 0: # CEN
f06_file.write('0 %8i %8s %-13s %-13s %-13s %-13s %8.4f %-13s %-13s %s\n' % (
eid, cen_word, fdi, oxxi, oyyi, txyi, anglei, major, minor, ovmi))
elif ilayer == 0:
f06_file.write(' %8s %8i %-13s %-13s %-13s %-13s %8.4f %-13s %-13s %s\n' % (
'', nid, fdi, oxxi, oyyi, txyi, anglei, major, minor, ovmi))
elif ilayer == 1:
f06_file.write(' %8s %8s %-13s %-13s %-13s %-13s %8.4f %-13s %-13s %s\n\n' % (
'', '', fdi, oxxi, oyyi, txyi, anglei, major, minor, ovmi))
else: # pragma: no cover
msg = 'element_name=%s self.element_type=%s' % (
self.element_name, self.element_type)
raise NotImplementedError(msg)
f06_file.write(page_stamp % page_num)
page_num += 1
return page_num - 1
def get_nnodes_bilinear(self):
"""gets the number of nodes and whether or not the element has bilinear results"""
is_bilinear = False
if self.element_type == 74:
nnodes = 3
elif self.element_type == 33:
nnodes = 4
elif self.element_type == 144:
nnodes = 4
is_bilinear = True
elif self.element_type == 82: # CQUADR
nnodes = 4
is_bilinear = True
elif self.element_type == 64: # CQUAD8
nnodes = 4
is_bilinear = True
elif self.element_type == 75: # CTRIA6
nnodes = 3
is_bilinear = True
elif self.element_type == 70: # CTRIAR
nnodes = 3
is_bilinear = True
elif self.element_type == 227: # CTRIAR-linear
nnodes = 3
is_bilinear = False
elif self.element_type == 228: # CQUADR-linear
nnodes = 4
is_bilinear = False
else:
raise NotImplementedError(f'name={self.element_name} type={self.element_type}')
return nnodes, is_bilinear
def write_op2(self, op2_file, op2_ascii, itable, new_result,
date, is_mag_phase=False, endian='>'):
"""writes an OP2"""
import inspect
from struct import Struct, pack
frame = inspect.currentframe()
call_frame = inspect.getouterframes(frame, 2)
op2_ascii.write(f'{self.__class__.__name__}.write_op2: {call_frame[1][3]}\n')
if itable == -1:
self._write_table_header(op2_file, op2_ascii, date)
itable = -3
nnodes, is_bilinear = self.get_nnodes_bilinear()
if is_bilinear:
nnodes_all = nnodes + 1
ntotal = 2 + 17 * nnodes_all
else:
nnodes_all = nnodes
#print("nnodes_all =", nnodes_all)
#cen_word_ascii = f'CEN/{nnodes:d}'
cen_word_bytes = b'CEN/'
idtype = self.element_node.dtype
fdtype = self.data.dtype
if self.size == 4:
pass
else:
print(f'downcasting {self.class_name}...')
#cen_word_bytes = b'CEN/ '
idtype = np.int32(1)
fdtype = np.float32(1.0)
#msg.append(f' element_node.shape = {self.element_node.shape}\n')
#msg.append(f' data.shape={self.data.shape}\n')
eids = self.element_node[:, 0]
nids = self.element_node[:, 1]
max_id = self.element_node.max()
if max_id > 99999999:
raise SixtyFourBitError(f'64-bit OP2 writing is not supported; max id={max_id}')
eids_device = eids * 10 + self.device_code
nelements = len(np.unique(eids))
nlayers = len(eids)
#print('nelements =', nelements)
#print('nlayers =', nlayers)
nnodes_per_element = nlayers // nelements // 2
# 21 = 1 node, 3 principal, 6 components, 9 vectors, 2 p/ovm
#ntotal = ((nnodes * 21) + 1) + (nelements * 4)
ntotali = self.num_wide
ntotal = ntotali * nelements
assert nnodes > 1, nnodes
op2_ascii.write(f' ntimes = {self.ntimes}\n')
#[fiber_dist, oxx, oyy, txy, angle, majorP, minorP, ovm]
op2_ascii.write(' #elementi = [eid_device, fd1, sx1, sy1, txy1, angle1, major1, minor1, vm1,\n')
op2_ascii.write(' # fd2, sx2, sy2, txy2, angle2, major2, minor2, vm2,]\n') # 1+16
op2_ascii.write(' #elementi = [eid_device, node1, fd1, sx1, sy1, txy1, angle1, major1, minor1, vm1,\n')
op2_ascii.write(' # fd2, sx2, sy2, txy2, angle2, major2, minor2, vm2,]\n') # 1 + 17*5
op2_ascii.write(' #elementi = [ node2, fd1, sx1, sy1, txy1, angle1, major1, minor1, vm1,\n')
op2_ascii.write(' # fd2, sx2, sy2, txy2, angle2, major2, minor2, vm2,]\n') # 17
op2_ascii.write(' #elementi = [ node3, fd1, sx1, sy1, txy1, angle1, major1, minor1, vm1,\n')
op2_ascii.write(' # fd2, sx2, sy2, txy2, angle2, major2, minor2, vm2,]\n') # 17
op2_ascii.write(' #elementi = [ node4, fd1, sx1, sy1, txy1, angle1, major1, minor1, vm1,\n')
op2_ascii.write(' # fd2, sx2, sy2, txy2, angle2, major2, minor2, vm2,]\n') # 17
op2_ascii.write(' #elementi = [ node5, fd1, sx1, sy1, txy1, angle1, major1, minor1, vm1,\n')
op2_ascii.write(' # fd2, sx2, sy2, txy2, angle2, major2, minor2, vm2,]\n') # 17
if not self.is_sort1:
raise NotImplementedError('SORT2')
#struct_isi8f = Struct('i 4s i 8f')
#struct_i8f = Struct(endian + b'i8f')
#struct_8f = Struct(endian + b'8f')
nelements_nnodes = len(nids) // 2
is_centroid = self.element_type in [33, 74, 227, 228]
is_nodes = self.element_type in [64, 70, 75, 82, 144]
if is_centroid:
eids_device2 = to_column_bytes([eids_device[::2]], idtype).view(fdtype)
assert len(eids_device2) == nelements
elif is_nodes:
cen_word_array_temp = np.full((nelements, 1), cen_word_bytes)
cen_word_array = cen_word_array_temp.view(fdtype)
eids_device2 = view_idtype_as_fdtype(eids_device[::2*nnodes_per_element].reshape(nelements, 1),
fdtype)
nids2 = view_idtype_as_fdtype(nids[::2].reshape(nelements_nnodes, 1),
fdtype)
#nheader = 15
struct_i = Struct('i')
struct_13i = Struct('13i')
op2_ascii.write(f'nelements={nelements:d}\n')
for itime in range(self.ntimes):
self._write_table_3(op2_file, op2_ascii, new_result, itable, itime)
# record 4
#print('stress itable = %s' % itable)
itable -= 1
header = [4, itable, 4,
4, 1, 4,
4, 0, 4,
4, ntotal, 4,
4 * ntotal]
op2_file.write(struct_13i.pack(*header))
op2_ascii.write('r4 [4, 0, 4]\n')
op2_ascii.write(f'r4 [4, {itable:d}, 4]\n')
op2_ascii.write(f'r4 [4, {4 * ntotal:d}, 4]\n')
if is_centroid:
# [eid_device, fdi, oxxi, oyyi, txyi, anglei, major, minor, ovmi]
# [ fdi, oxxi, oyyi, txyi, anglei, major, minor, ovmi]
datai = view_dtype(self.data[itime, :, :].reshape(nelements, 16), fdtype)
data_out = np.hstack([eids_device2, datai])
elif is_nodes:
# CQUAD8, CTRIAR, CTRIA6, CQUADR, CQUAD4
# bilinear
datai = view_dtype(
self.data[itime, :, :].reshape(nelements*nnodes_per_element, 16),
fdtype)
nids_data = np.hstack([nids2, datai]).reshape(nelements, nnodes_per_element*17)
data_out = np.hstack([eids_device2, cen_word_array, nids_data])
else: # pragma: no cover
msg = f'element_name={self.element_name} element_type={self.element_type}'
raise NotImplementedError(msg)
assert data_out.size == ntotal, f'data_out.shape={data_out.shape} size={data_out.size}; ntotal={ntotal}'
op2_file.write(data_out)
itable -= 1
header = [4 * ntotal,]
op2_file.write(struct_i.pack(*header))
op2_ascii.write('footer = %s\n' % header)
new_result = False
return itable
class RealPlateStressArray(RealPlateArray, StressObject):
def __init__(self, data_code, is_sort1, isubcase, dt):
RealPlateArray.__init__(self, data_code, is_sort1, isubcase, dt)
StressObject.__init__(self, data_code, isubcase)
def get_headers(self) -> List[str]:
fiber_dist = 'fiber_distance' if self.is_fiber_distance else 'fiber_curvature'
ovm = 'von_mises' if self.is_von_mises else 'max_shear'
headers = [fiber_dist, 'oxx', 'oyy', 'txy', 'angle', 'omax', 'omin', ovm]
return headers
class RealPlateStrainArray(RealPlateArray, StrainObject):
"""
used for:
- RealPlateStressArray
- RealPlateStrainArray
"""
def __init__(self, data_code, is_sort1, isubcase, dt):
RealPlateArray.__init__(self, data_code, is_sort1, isubcase, dt)
StrainObject.__init__(self, data_code, isubcase)
def get_headers(self) -> List[str]:
fiber_dist = 'fiber_distance' if self.is_fiber_distance else 'fiber_curvature'
ovm = 'von_mises' if self.is_von_mises else 'max_shear'
headers = [fiber_dist, 'exx', 'eyy', 'exy', 'angle', 'emax', 'emin', ovm]
return headers
def _get_plate_msg(self):
von_mises = 'VON MISES' if self.is_von_mises else 'MAX SHEAR'
if self.is_stress:
if self.is_fiber_distance:
quad_msg_temp = [' ELEMENT FIBER STRESSES IN ELEMENT COORD SYSTEM PRINCIPAL STRESSES (ZERO SHEAR) \n',
' ID GRID-ID DISTANCE NORMAL-X NORMAL-Y SHEAR-XY ANGLE MAJOR MINOR %s \n' % von_mises]
tri_msg_temp = [' ELEMENT FIBER STRESSES IN ELEMENT COORD SYSTEM PRINCIPAL STRESSES (ZERO SHEAR) \n',
' ID. DISTANCE NORMAL-X NORMAL-Y SHEAR-XY ANGLE MAJOR MINOR %s\n' % von_mises]
else:
quad_msg_temp = [' ELEMENT FIBER STRESSES IN ELEMENT COORD SYSTEM PRINCIPAL STRESSES (ZERO SHEAR) \n',
' ID GRID-ID CURVATURE NORMAL-X NORMAL-Y SHEAR-XY ANGLE MAJOR MINOR %s \n' % von_mises]
tri_msg_temp = [' ELEMENT FIBER STRESSES IN ELEMENT COORD SYSTEM PRINCIPAL STRESSES (ZERO SHEAR) \n',
' ID. CURVATURE NORMAL-X NORMAL-Y SHEAR-XY ANGLE MAJOR MINOR %s\n' % von_mises]
cquad4_msg = [' S T R E S S E S I N Q U A D R I L A T E R A L E L E M E N T S ( Q U A D 4 )\n'] + tri_msg_temp
cquad8_msg = [' S T R E S S E S I N Q U A D R I L A T E R A L E L E M E N T S ( Q U A D 8 )\n'] + tri_msg_temp
cquadr_msg = [' S T R E S S E S I N Q U A D R I L A T E R A L E L E M E N T S ( Q U A D R )\n'] + tri_msg_temp
#cquadr_bilinear_msg = [' S T R E S S E S I N Q U A D R I L A T E R A L E L E M E N T S ( Q U A D R ) OPTION = BILIN \n \n'] + quad_msg_temp
cquad4_bilinear_msg = [' S T R E S S E S I N Q U A D R I L A T E R A L E L E M E N T S ( Q U A D 4 ) OPTION = BILIN \n \n'] + quad_msg_temp
ctria3_msg = [' S T R E S S E S I N T R I A N G U L A R E L E M E N T S ( T R I A 3 )\n'] + tri_msg_temp
ctria6_msg = [' S T R E S S E S I N T R I A N G U L A R E L E M E N T S ( T R I A 6 )\n'] + tri_msg_temp
ctriar_msg = [' S T R E S S E S I N T R I A N G U L A R E L E M E N T S ( T R I A R )\n'] + tri_msg_temp
else:
if self.is_fiber_distance:
quad_msg_temp = [' ELEMENT STRAIN STRAINS IN ELEMENT COORD SYSTEM PRINCIPAL STRAINS (ZERO SHEAR) \n',
' ID GRID-ID DISTANCE NORMAL-X NORMAL-Y SHEAR-XY ANGLE MAJOR MINOR %s \n' % von_mises]
tri_msg_temp = [' ELEMENT FIBER STRAINS IN ELEMENT COORD SYSTEM PRINCIPAL STRAINS (ZERO SHEAR) \n',
' ID. DISTANCE NORMAL-X NORMAL-Y SHEAR-XY ANGLE MAJOR MINOR %s\n' % von_mises]
else:
quad_msg_temp = [' ELEMENT STRAIN STRAINS IN ELEMENT COORD SYSTEM PRINCIPAL STRAINS (ZERO SHEAR) \n',
' ID GRID-ID CURVATURE NORMAL-X NORMAL-Y SHEAR-XY ANGLE MAJOR MINOR %s \n' % von_mises]
tri_msg_temp = [' ELEMENT STRAIN STRAINS IN ELEMENT COORD SYSTEM PRINCIPAL STRAINS (ZERO SHEAR) \n',
' ID. CURVATURE NORMAL-X NORMAL-Y SHEAR-XY ANGLE MAJOR MINOR %s\n' % von_mises]
cquad4_msg = [' S T R A I N S I N Q U A D R I L A T E R A L E L E M E N T S ( Q U A D 4 )\n'] + tri_msg_temp
cquad8_msg = [' S T R A I N S I N Q U A D R I L A T E R A L E L E M E N T S ( Q U A D 8 )\n'] + tri_msg_temp
cquadr_msg = [' S T R A I N S I N Q U A D R I L A T E R A L E L E M E N T S ( Q U A D R )\n'] + tri_msg_temp
#cquadr_bilinear_msg = [' S T R A I N S I N Q U A D R I L A T E R A L E L E M E N T S ( Q U A D R ) OPTION = BILIN \n \n'] + quad_msg_temp
cquad4_bilinear_msg = [' S T R A I N S I N Q U A D R I L A T E R A L E L E M E N T S ( Q U A D 4 ) OPTION = BILIN \n \n'] + quad_msg_temp
cquadr_msg = [' S T R A I N S I N Q U A D R I L A T E R A L E L E M E N T S ( Q U A D R )\n'] + tri_msg_temp
ctria3_msg = [' S T R A I N S I N T R I A N G U L A R E L E M E N T S ( T R I A 3 )\n'] + tri_msg_temp
ctria6_msg = [' S T R A I N S I N T R I A N G U L A R E L E M E N T S ( T R I A 6 )\n'] + tri_msg_temp
ctriar_msg = [' S T R A I N S I N T R I A N G U L A R E L E M E N T S ( T R I A R )\n'] + tri_msg_temp
if self.element_type in [74, 83]:
msg = ctria3_msg
nnodes = 3
cen = 'CEN/3'
elif self.element_type == 33:
msg = cquad4_msg
nnodes = 4
cen = 'CEN/4'
#elif self.element_type == 228:
#msg = cquadr_msg
#nnodes = 4
#cen = None # 'CEN/4'
elif self.element_type == 144:
msg = cquad4_bilinear_msg
nnodes = 4
cen = 'CEN/4'
elif self.element_type in [82, 228]: # CQUADR bilinear, CQUADR linear
msg = cquadr_msg
nnodes = 4
cen = 'CEN/4'
elif self.element_type == 64: # CQUAD8
msg = cquad8_msg
nnodes = 4
cen = 'CEN/8'
elif self.element_type == 75: # CTRIA6
msg = ctria6_msg
nnodes = 3
cen = 'CEN/6'
elif self.element_type in [70, 227]:
# 70: CTRIAR bilinear
# 227: CTRIAR linear
msg = ctriar_msg
nnodes = 3
cen = 'CEN/3'
else: # pragma: no cover
raise NotImplementedError(f'name={self.element_name} type={self.element_type}')
return msg, nnodes, cen
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,636
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/utils/__init__.py
|
"""
defines:
- deprecated(old_name, new_name, deprecated_version, levels=None)
- print_bad_path(path)
- object_attributes(obj, mode='public', keys_to_skip=None)
- object_methods(obj, mode='public', keys_to_skip=None)
"""
# -*- coding: utf-8 -*-
from types import MethodType, FunctionType
import os
import io
import sys
import getpass
import inspect
import warnings
from pathlib import PurePath
from abc import abstractmethod
from typing import List, Optional, Union, Any
import pyNastran
def ipython_info() -> Optional[str]:
"""determines if iPython/Jupyter notebook is running"""
try:
return get_ipython()
except NameError:
return None
def is_file_obj(filename: str) -> bool:
"""does this object behave like a file object?"""
return (
(hasattr(filename, 'read') and hasattr(filename, 'write'))
or isinstance(filename, (io.IOBase, io.StringIO))
)
def b(string: str) -> bytes:
"""reimplementation of six.b(...) to work in Python 2"""
return string.encode('latin-1')
#def merge_dicts(dict_list, strict: bool=True):
#"""merges two or more dictionaries"""
#assert isinstance(dict_list, list), type(dict_list)
#dict_out = {}
#for adict in dict_list:
#assert isinstance(adict, dict), adict
#for key, value in adict.items():
#if key not in dict_out:
#dict_out[key] = value
#elif strict:
#raise RuntimeError('key=%r exists in multiple dictionaries' % key)
#else:
#print('key=%r is dropped?' % key)
#return dict_out
def remove_files(filenames):
"""remvoes a series of files; quietly continues if the file can't be removed"""
for filename in filenames:
try:
os.remove(filename)
except OSError:
pass
def is_binary_file(filename: Union[str, PurePath]) -> bool:
"""
Return true if the given filename is binary.
Parameters
----------
filename : str
the filename to test
Returns
-------
binary_flag : bool
True if filename is a binary file (contains null byte)
and False otherwise.
:raises: IOError if the file cannot be opened.
Based on the idea (.. seealso:: http://bytes.com/topic/python/answers/21222-determine-file-type-binary-text)
that file is binary if it contains null.
.. warning:: this may not work for unicode."""
assert isinstance(filename, (str, PurePath)), f'{filename!r} is not a valid filename'
check_path(filename)
with io.open(filename, mode='rb') as fil:
for chunk in iter(lambda: fil.read(1024), bytes()):
if b'\0' in chunk: # found null byte
return True
return False
def check_path(filename: str, name: str='file') -> None:
"""checks that the file exists"""
try:
exists = os.path.exists(filename)
except TypeError:
msg = 'cannot find %s=%r\n' % (name, filename)
raise TypeError(msg)
if not exists:
msg = 'cannot find %s=%r\n%s' % (name, filename, print_bad_path(filename))
raise FileNotFoundError(msg)
def print_bad_path(path: str) -> str:
"""
Prints information about the existence (access possibility) of the parts
of the given path. Useful for debugging when the path to a given file
is wrong.
Parameters
----------
path : str
path to check
Returns
-------
msg : str
string with informations whether access to parts of the path
is possible
"""
#raw_path = path
if len(path) > 255:
path = os.path.abspath(_filename(path))
npath = os.path.dirname(path)
res = [path]
while path != npath:
path, npath = npath, os.path.dirname(npath)
res.append(path)
msg = {True: 'passed', False: 'failed'}
return '\n'.join(['%s: %s' % (msg[os.path.exists(i)], i[4:]) for i in res])
path = os.path.abspath(path)
npath = os.path.dirname(path)
res = [path]
while path != npath:
path, npath = npath, os.path.dirname(npath)
res.append(path)
msg = {True: 'passed', False: 'failed'}
return '\n'.join(['%s: %s' % (msg[os.path.exists(i)], i) for i in res])
def _filename(filename: str) -> str:
"""
Prepends some magic data to a filename in order to have long filenames.
.. warning:: This might be Windows specific.
"""
if len(filename) > 255:
return '\\\\?\\' + filename
return filename
def __object_attr(obj, mode, keys_to_skip, attr_type, filter_properties: bool=False):
"""list object attributes of a given type"""
#print('keys_to_skip=%s' % keys_to_skip)
keys_to_skip = [] if keys_to_skip is None else keys_to_skip
test = {
'public': lambda k: (not k.startswith('_') and k not in keys_to_skip),
'private': lambda k: (k.startswith('_') and not k.startswith('__')
and k not in keys_to_skip),
'both': lambda k: (not k.startswith('__') and k not in keys_to_skip),
'all': lambda k: (k not in keys_to_skip),
}
if not mode in test:
raise ValueError('Wrong mode! Accepted modes: public, private, both, all.')
check = test[mode]
out = []
obj_type = type(obj)
for key in dir(obj):
#if isinstance(key, abstractmethod): # doesn't work...
#print(key, ' abstractmethod')
#if isinstance(key, FunctionType):
#print(key, ' FunctionType')
#if isinstance(key, MethodType):
#print(key, ' MethodType')
if key in keys_to_skip or not check(key):
continue
try:
value = getattr(obj, key)
save_value = attr_type(value)
if not save_value:
continue
if filter_properties:
if not isinstance(getattr(obj_type, key, None), property):
out.append(key)
else:
out.append(key)
except Exception:
pass
out.sort()
return out
#return sorted([k for k in dir(obj) if (check(k) and
# attr_type(getattr(obj, k)))])
def object_methods(obj: Any, mode: str='public',
keys_to_skip: Optional[List[str]]=None) -> List[str]:
"""
List the names of methods of a class as strings. Returns public methods
as default.
Parameters
----------
obj : instance
the object for checking
mode : str
defines what kind of methods will be listed
* "public" - names that do not begin with underscore
* "private" - names that begin with single underscore
* "both" - private and public
* "all" - all methods that are defined for the object
keys_to_skip : List[str]; default=None -> []
names to not consider to avoid deprecation warnings
Returns
-------
method : List[str]
sorted list of the names of methods of a given type
or None if the mode is wrong
"""
return __object_attr(obj, mode, keys_to_skip, lambda x: isinstance(x, MethodType))
def object_stats(obj: Any, mode: str='public',
keys_to_skip: Optional[List[str]]=None,
filter_properties: bool=False) -> str:
"""Prints out an easy to read summary of the object"""
msg = '%s:\n' % obj.__class__.__name__
attrs = object_attributes(
obj, mode=mode, keys_to_skip=keys_to_skip,
filter_properties=filter_properties)
for name in sorted(attrs):
#if short and '_ref' in name:
#continue
value = getattr(obj, name)
msg += ' %-6s : %r\n' % (name, value)
return msg
def object_attributes(obj: Any, mode: str='public',
keys_to_skip: Optional[List[str]]=None,
filter_properties: bool=False) -> List[str]:
"""
List the names of attributes of a class as strings. Returns public
attributes as default.
Parameters
----------
obj : instance
the object for checking
mode : str
defines what kind of attributes will be listed
* 'public' - names that do not begin with underscore
* 'private' - names that begin with single underscore
* 'both' - private and public
* 'all' - all attributes that are defined for the object
keys_to_skip : List[str]; default=None -> []
names to not consider to avoid deprecation warnings
filter_properties: bool: default=False
filters the @property objects
Returns
-------
attribute_names : List[str]
sorted list of the names of attributes of a given type or None
if the mode is wrong
"""
#if hasattr(obj, '__properties__'):
#keys_to_skip += obj.__properties__()
return __object_attr(
obj, mode, keys_to_skip,
lambda x: not isinstance(x, (MethodType, FunctionType)),
filter_properties=filter_properties,
)
#def remove_files(*filenames):
#"""delete a list of files"""
#failed_list = []
#for filename in filenames:
#try:
#os.remove(filename)
#except OSError: # OSError is the general version of WindowsError
#failed_list.append(filename)
#return failed_list
def int_version(name: str, version: str) -> List[int]:
"""splits the version into a tuple of integers"""
sversion = version.split('-')[0].split('+')[0]
#numpy
#scipy
#matplotlib
#qtpy
#vtk
#cpylog
#pyNastran
# '1.20.0rc1'
# '1.4.0+dev.8913610a0'
if 'rc' not in name:
# it's gotta be something...
# matplotlib3.1rc1
sversion = sversion.split('rc')[0]
try:
return [int(val) for val in sversion.split('.')]
except ValueError:
raise SyntaxError('cannot determine version for %s %s' % (name, sversion))
def deprecated(old_name: str, new_name: str, deprecated_version: str,
levels: Optional[List[int]]=None) -> None:
"""
Throws a deprecation message and crashes if past a specific version.
Parameters
----------
old_name : str
the old function name
new_name : str
the new function name
deprecated_version : float
the version the method was first deprecated in
levels : List[int]
the deprecation levels to show
[1, 2, 3] shows 3 levels up from this function (good for classes)
None : ???
TODO: turn this into a decorator?
"""
assert isinstance(deprecated_version, str), type(deprecated_version)
assert isinstance(levels, list), type(levels)
assert old_name != new_name, "'%s' and '%s' are the same..." % (old_name, new_name)
version = pyNastran.__version__.split('_')[0]
dep_ver_tuple = tuple([int(i) for i in deprecated_version.split('.')])
ver_tuple = tuple([int(i) for i in version.split('.')[:2]])
#new_line = ''
msg = "'%s' was deprecated in v%s (current=%s)" % (
old_name, deprecated_version, version)
if new_name:
msg += "; replace it with '%s'\n" % new_name
for level in levels:
# jump to get out of the inspection code
frame = sys._getframe(3 + level)
line_no = frame.f_lineno
code = frame.f_code
try:
#filename = os.path.basename(frame.f_globals['__file__'])
filename = os.path.basename(inspect.getfile(code))
except Exception:
print(code)
raise
source_lines, line_no0 = inspect.getsourcelines(code)
delta_nlines = line_no - line_no0
try:
line = source_lines[delta_nlines]
except IndexError:
break
msg += ' %-25s:%-4s %s\n' % (filename, str(line_no) + ';', line.strip())
user_name = getpass.getuser()
if ver_tuple > dep_ver_tuple: # or 'id' in msg:
# fail
raise NotImplementedError(msg)
elif user_name not in ['sdoyle', 'travis']:
warnings.warn(msg, DeprecationWarning)
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,637
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/bdf/bdf_interface/attributes.py
|
"""defines the BDF attributes"""
from __future__ import annotations
from collections import defaultdict
from typing import List, Dict, Optional, Any, Union, TYPE_CHECKING
from numpy import array # type: ignore
from pyNastran.utils import object_attributes, object_methods, deprecated
#from pyNastran.bdf.case_control_deck import CaseControlDeck
from pyNastran.bdf.cards.coordinate_systems import CORD2R
#from pyNastran.bdf.cards.constraints import ConstraintObject
from pyNastran.bdf.cards.aero.zona import ZONA
if TYPE_CHECKING: # pragma: no cover
from pyNastran.bdf.cards.dmig import DMIG, DMI, DMIJ, DMIK, DMIJI
BDF_FORMATS = {'nx', 'msc', 'optistruct', 'zona', 'nasa95', 'mystran'}
class BDFAttributes:
"""defines attributes of the BDF"""
def __init__(self):
"""creates the attributes for the BDF"""
self.__init_attributes()
self._is_cards_dict = False
self.is_nx = False
self.is_msc = False
self.is_mystran = False
self.is_nasa95 = False
self.is_zona = False
self.save_file_structure = False
self.is_superelements = False
self.set_as_msc()
self.units = [] # type: List[str]
def set_as_msc(self):
self._nastran_format = 'msc'
self.is_nx = False
self.is_msc = True
self.is_optistruct = False
self.is_mystran = False
self.is_nasa95 = False
self.is_zona = False
def set_as_nx(self):
self._nastran_format = 'nx'
self.is_nx = True
self.is_msc = False
self.is_optistruct = False
self.is_mystran = False
self.is_nasa95 = False
self.is_zona = False
def set_as_optistruct(self):
self._nastran_format = 'optistruct'
self.is_nx = False
self.is_msc = False
self.is_optistruct = True
self.is_mystran = False
self.is_nasa95 = False
self.is_zona = False
def set_as_zona(self):
self._nastran_format = 'zona'
self.is_nx = False
self.is_msc = False
self.is_optistruct = False
self.is_mystran = False
self.is_nasa95 = False
self.is_zona = True
def set_as_mystran(self):
self._nastran_format = 'mystran'
self.is_nx = False
self.is_msc = False
self.is_optistruct = False
self.is_mystran = True
self.is_nasa95 = False
self.is_zona = False
self._update_for_mystran()
def set_as_nasa95(self):
self._nastran_format = 'nasa95'
self.is_nx = False
self.is_msc = False
self.is_optistruct = False
self.is_mystran = False
self.is_nasa95 = True
self.is_zona = False
self._update_for_nasa95()
def __properties__(self):
"""the list of @property attributes"""
return ['nastran_format', 'is_long_ids', 'sol', 'subcases',
'nnodes', 'node_ids', 'point_ids', 'npoints',
'nelements', 'element_ids', 'nproperties', 'property_ids',
'nmaterials', 'material_ids', 'ncoords', 'coord_ids',
'ncaeros', 'caero_ids', 'wtmass', 'is_bdf_vectorized', 'nid_map']
def object_attributes(self, mode: str='public',
keys_to_skip: Optional[List[str]]=None,
filter_properties: bool=False) -> List[str]:
"""
List the names of attributes of a class as strings. Returns public
attributes as default.
Parameters
----------
mode : str
defines what kind of attributes will be listed
* 'public' - names that do not begin with underscore
* 'private' - names that begin with single underscore
* 'both' - private and public
* 'all' - all attributes that are defined for the object
keys_to_skip : List[str]; default=None -> []
names to not consider to avoid deprecation warnings
filter_properties: bool: default=False
filters the @property objects
Returns
-------
attribute_names : List[str]
sorted list of the names of attributes of a given type or None
if the mode is wrong
"""
if keys_to_skip is None:
keys_to_skip = []
my_keys_to_skip = [
#'case_control_deck',
'log',
'node_ids', 'coord_ids', 'element_ids', 'property_ids',
'material_ids', 'caero_ids', 'is_long_ids',
'nnodes', 'ncoords', 'nelements', 'nproperties',
'nmaterials', 'ncaeros', 'npoints',
'point_ids', 'subcases',
'_card_parser', '_card_parser_b', '_card_parser_prepare',
'object_methods', 'object_attributes',
]
return object_attributes(self, mode=mode, keys_to_skip=keys_to_skip+my_keys_to_skip,
filter_properties=filter_properties)
def object_methods(self, mode: str='public', keys_to_skip: Optional[List[str]]=None) -> List[str]:
"""
List the names of methods of a class as strings. Returns public methods
as default.
Parameters
----------
obj : instance
the object for checking
mode : str
defines what kind of methods will be listed
* "public" - names that do not begin with underscore
* "private" - names that begin with single underscore
* "both" - private and public
* "all" - all methods that are defined for the object
keys_to_skip : List[str]; default=None -> []
names to not consider to avoid deprecation warnings
Returns
-------
method : List[str]
sorted list of the names of methods of a given type
or None if the mode is wrong
"""
if keys_to_skip is None:
keys_to_skip = []
my_keys_to_skip = [] # type: List[str]
my_keys_to_skip = [
#'case_control_deck',
'log', #'mpcObject', 'spcObject',
'node_ids', 'coord_ids', 'element_ids', 'property_ids',
'material_ids', 'caero_ids', 'is_long_ids',
'nnodes', 'ncoords', 'nelements', 'nproperties',
'nmaterials', 'ncaeros',
'point_ids', 'subcases',
'_card_parser', '_card_parser_b',
'object_methods', 'object_attributes',
]
return object_methods(self, mode=mode, keys_to_skip=keys_to_skip+my_keys_to_skip)
def deprecated(self, old_name: str, new_name: str, deprecated_version: str) -> None:
"""deprecates methods"""
return deprecated(old_name, new_name, deprecated_version, levels=[0, 1, 2])
def clear_attributes(self) -> None:
"""removes the attributes from the model"""
self.__init_attributes()
self.nodes = {}
self.loads = {} # type: Dict[int, List[Any]]
self.load_combinations = {} # type: Dict[int, List[Any]]
def reset_errors(self) -> None:
"""removes the errors from the model"""
self._ixref_errors = 0
self._stored_xref_errors = []
def __init_attributes(self) -> None:
"""
Creates storage objects for the BDF object.
This would be in the init but doing it this way allows for better
inheritance
References:
1. http://www.mscsoftware.com/support/library/conf/wuc87/p02387.pdf
"""
self.reset_errors()
self.bdf_filename = None
self.punch = None
self._encoding = None
self._is_long_ids = False # ids > 8 characters
#: ignore any ECHOON flags
self.force_echo_off = True
#: list of Nastran SYSTEM commands
self.system_command_lines = [] # type: List[str]
#: list of execive control deck lines
self.executive_control_lines = [] # type: List[str]
#: list of case control deck lines
self.case_control_lines = [] # type: List[str]
# dictionary of BDFs
self.superelement_models = {}
self.initial_superelement_models = [] # the keys before superelement mirroring
self._auto_reject = False
self._solmap_to_value = {
'NONLIN': 101, # 66 -> 101 per Reference 1
'SESTATIC': 101,
'SESTATICS': 101,
'SEMODES': 103,
'BUCKLING': 105,
'SEBUCKL': 105,
'NLSTATIC': 106,
'SEDCEIG': 107,
'SEDFREQ': 108,
'SEDTRAN': 109,
'SEMCEIG': 110,
'SEMFREQ': 111,
'SEMTRAN': 112,
'CYCSTATX': 114,
'CYCMODE': 115,
'CYCBUCKL': 116,
'CYCFREQ': 118,
'NLTRAN': 129,
'AESTAT': 144,
'FLUTTR': 145,
'SEAERO': 146,
'NLSCSH': 153,
'NLTCSH': 159,
'DBTRANS': 190,
'DESOPT': 200,
# guessing
#'CTRAN' : 115,
'CFREQ' : 118,
# solution 200 names
'STATICS': 101,
'MODES': 103,
'BUCK': 105,
'DFREQ': 108,
'MFREQ': 111,
'MTRAN': 112,
'DCEIG': 107,
'MCEIG': 110,
#'HEAT' : None,
#'STRUCTURE': None,
#'DIVERGE' : None,
'FLUTTER': 145,
'SAERO': 146,
}
self.rsolmap_to_str = {
66: 'NONLIN',
101: 'SESTSTATIC', # linear static
103: 'SEMODES', # modal
105: 'BUCKLING', # buckling
106: 'NLSTATIC', # non-linear static
107: 'SEDCEIG', # direct complex frequency response
108: 'SEDFREQ', # direct frequency response
109: 'SEDTRAN', # direct transient response
110: 'SEMCEIG', # modal complex eigenvalue
111: 'SEMFREQ', # modal frequency response
112: 'SEMTRAN', # modal transient response
114: 'CYCSTATX',
115: 'CYCMODE',
116: 'CYCBUCKL',
118: 'CYCFREQ',
129: 'NLTRAN', # nonlinear transient
144: 'AESTAT', # static aeroelastic
145: 'FLUTTR', # flutter/aeroservoelastic
146: 'SEAERO', # dynamic aeroelastic
153: 'NLSCSH', # nonlinear static thermal
159: 'NLTCSH', # nonlinear transient thermal
#187 - Dynamic Design Analysis Method
190: 'DBTRANS',
200: 'DESOPT', # optimization
}
# ------------------------ bad duplicates ----------------------------
self._iparse_errors = 0
self._nparse_errors = 0
self._stop_on_parsing_error = True
self._stop_on_duplicate_error = True
self._stored_parse_errors = [] # type: List[str]
self._duplicate_nodes = [] # type: List[str]
self._duplicate_elements = [] # type: List[str]
self._duplicate_properties = [] # type: List[str]
self._duplicate_materials = [] # type: List[str]
self._duplicate_masses = [] # type: List[str]
self._duplicate_thermal_materials = [] # type: List[str]
self._duplicate_coords = [] # type: List[str]
self.values_to_skip = {} # type: Dict[str, List[int]]
# ------------------------ structural defaults -----------------------
#: the analysis type
self._sol = None
#: used in solution 600, method
self.sol_method = None
#: the line with SOL on it, marks ???
self.sol_iline = None # type : Optional[int]
self.case_control_deck = None # type: Optional[Any]
#: store the PARAM cards
self.params = {} # type: Dict[str, PARAM]
self.mdlprm = None # type: MDLPRM
# ------------------------------- nodes -------------------------------
# main structural block
#: stores POINT cards
self.points = {} # type: Dict[int, POINT]
#self.grids = {}
self.spoints = {} # type: Dict[int, SPOINT]
self.epoints = {} # type: Dict[int, EPOINT]
#: stores GRIDSET card
self.grdset = None # type: Optional[GRDSET]
#: stores SEQGP cards
self.seqgp = None # type: Optional[SEQGP]
## stores RINGAX
self.ringaxs = {} # type: Dict[int, RINGAX]
## stores GRIDB
self.gridb = {} # type: Dict[int, GRIDB]
#: stores elements (CQUAD4, CTRIA3, CHEXA8, CTETRA4, CROD, CONROD,
#: etc.)
self.elements = {} # type: Dict[int, Any]
#: stores CBARAO, CBEAMAO
self.ao_element_flags = {} # type: Dict[int, Any]
#: stores BAROR
self.baror = None # type: Optional[BAROR]
#: stores BEAMOR
self.beamor = None # type: Optional[BEAMOR]
#: stores SNORM
self.normals = {} # type: Dict[int, SNORM]
#: stores rigid elements (RBE2, RBE3, RJOINT, etc.)
self.rigid_elements = {} # type: Dict[int, Any]
#: stores PLOTELs
self.plotels = {} # type: Optional[PLOTEL]
#: stores CONM1, CONM2, CMASS1,CMASS2, CMASS3, CMASS4, CMASS5
self.masses = {} # type: Dict[int, Any]
#: stores PMASS
self.properties_mass = {} # type: Dict[int, Any]
#: stores NSM, NSM1, NSML, NSML1
self.nsms = {} # type: Dict[int, List[Any]]
#: stores NSMADD
self.nsmadds = {} # type: Dict[int, List[Any]]
#: stores LOTS of propeties (PBAR, PBEAM, PSHELL, PCOMP, etc.)
self.properties = {} # type: Dict[int, Any]
#: stores MAT1, MAT2, MAT3, MAT8, MAT10, MAT11
self.materials = {} # type: Dict[int, Any]
#: defines the MAT4, MAT5
self.thermal_materials = {} # type: Dict[int, Any]
#: defines the MATHE, MATHP
self.hyperelastic_materials = {} # type: Dict[int, Any]
#: stores MATSx
self.MATS1 = {} # type: Dict[int, Any]
self.MATS3 = {} # type: Dict[int, Any]
self.MATS8 = {} # type: Dict[int, Any]
#: stores MATTx
self.MATT1 = {} # type: Dict[int, Any]
self.MATT2 = {} # type: Dict[int, Any]
self.MATT3 = {} # type: Dict[int, Any]
self.MATT4 = {} # type: Dict[int, Any]
self.MATT5 = {} # type: Dict[int, Any]
self.MATT8 = {} # type: Dict[int, Any]
self.MATT9 = {} # type: Dict[int, Any]
self.nxstrats = {} # type: Dict[int, Any]
#: stores the CREEP card
self.creep_materials = {} # type: Dict[int, Any]
self.tics = {} # type: Optional[Any]
# stores DLOAD entries.
self.dloads = {} # type: Dict[int, Any]
# stores ACSRCE, RLOAD1, RLOAD2, TLOAD1, TLOAD2, and ACSRCE,
# and QVECT entries.
self.dload_entries = {} # type: Dict[int, Any]
#self.gusts = {} # Case Control GUST = 100
#self.random = {} # Case Control RANDOM = 100
#: stores coordinate systems
origin = array([0., 0., 0.])
zaxis = array([0., 0., 1.])
xzplane = array([1., 0., 0.])
coord = CORD2R(cid=0, rid=0, origin=origin, zaxis=zaxis, xzplane=xzplane)
self.coords = {0 : coord} # type: Dict[int, Any]
# --------------------------- constraints ----------------------------
#: stores SUPORT1s
#self.constraints = {} # suport1, anything else???
self.suport = [] # type: List[Any]
self.suport1 = {} # type: Dict[int, Any]
self.se_suport = [] # type: List[Any]
#: stores SPC, SPC1, SPCAX, GMSPC
self.spcs = {} # type: Dict[int, List[Any]]
#: stores SPCADD
self.spcadds = {} # type: Dict[int, List[Any]]
self.spcoffs = {} # type: Dict[int, List[Any]]
self.mpcs = {} # type: Dict[int, List[Any]]
self.mpcadds = {} # type: Dict[int, List[Any]]
# --------------------------- dynamic ----------------------------
#: stores DAREA
self.dareas = {} # type: Dict[int, Any]
self.dphases = {} # type: Dict[int, Any]
self.pbusht = {} # type: Dict[int, Any]
self.pdampt = {} # type: Dict[int, Any]
self.pelast = {} # type: Dict[int, Any]
#: frequencies
self.frequencies = {} # type: Dict[int, List[Any]]
# ----------------------------------------------------------------
#: direct matrix input - DMIG
self.dmi = {} # type: Dict[str, Any]
self.dmig = {} # type: Dict[str, Any]
self.dmij = {} # type: Dict[str, Any]
self.dmiji = {} # type: Dict[str, Any]
self.dmik = {} # type: Dict[str, Any]
self.dmiax = {} # type: Dict[str, Any]
self.dti = {} # type: Dict[str, Any]
self._dmig_temp = defaultdict(list) # type: Dict[str, List[str]]
# ----------------------------------------------------------------
#: SETy
self.sets = {} # type: Dict[int, Any]
self.asets = [] # type: List[Any]
self.omits = [] # type: List[Any]
self.bsets = [] # type: List[Any]
self.csets = [] # type: List[Any]
self.qsets = [] # type: List[Any]
self.usets = {} # type: Dict[str, Any]
#: SExSETy
self.se_bsets = [] # type: List[Any]
self.se_csets = [] # type: List[Any]
self.se_qsets = [] # type: List[Any]
self.se_usets = {} # type: Dict[str, Any]
self.se_sets = {} # type: Dict[str, Any]
# ----------------------------------------------------------------
#: parametric
self.pset = {}
self.pval = {}
self.gmcurv = {}
self.gmsurf = {}
self.feedge = {}
self.feface = {}
# ----------------------------------------------------------------
#: tables
# TABLES1, ...
self.tables = {} # type: Dict[int, TABLES1]
# TABLEDx
self.tables_d = {} # type: Dict[int, Union[TABLED1, TABLED2, TABLED3, TABLED4]]
# TABLEMx
self.tables_m = {} # type: Dict[int, Union[TABLEM1, TABLEM2, TABLEM3, TABLEM4]]
#: random_tables
self.random_tables = {} # type: Dict[int, Any]
#: TABDMP1
self.tables_sdamping = {} # type: Dict[int, TABDMP1]
# ----------------------------------------------------------------
#: EIGB, EIGR, EIGRL methods
self.methods = {} # type: Dict[int, Union[EIGR, EIGRL, EIGB]]
# EIGC, EIGP methods
self.cMethods = {} # type: Dict[int, Union[EIGC, EIGP]]
# ---------------------------- optimization --------------------------
# optimization
self.dconadds = {} # type: Dict[int, DCONADD]
self.dconstrs = {} # type: Dict[int, DCONSTR]
self.desvars = {} # type: Dict[int, DESVAR]
self.topvar = {} # type: Dict[int, TOPVAR]
self.ddvals = {} # type: Dict[int, DDVAL]
self.dlinks = {} # type: Dict[int, DLINK]
self.dresps = {} # type: Dict[int, Union[DRESP1, DRESP2, DRESP3]]
self.dtable = None # type: Optional[DTABLE]
self.dequations = {} # type: Dict[int, DEQATN]
#: stores DVPREL1, DVPREL2...might change to DVxRel
self.dvprels = {} # type: Dict[int, Union[DVPREL1, DVPREL2]]
self.dvmrels = {} # type: Dict[int, Union[DVMREL1, DVMREL2]]
self.dvcrels = {} # type: Dict[int, Union[DVCREL1, DVCREL2]]
self.dvgrids = {} # type: Dict[int, DVGRID]
self.doptprm = None # type: Optional[DOPTPRM]
self.dscreen = {} # type: Dict[int, DSCREEN]
# nx optimization
self.group = {} # type: Dict[int, GROUP]
self.dmncon = {} # type: Dict[int, DMNCON]
self.dvtrels = {} # type: Dict[int, Union[DVTREL1, DVTREL2]]
# ------------------------- nonlinear defaults -----------------------
#: stores NLPCI
self.nlpcis = {} # type: Dict[int, NLPCI]
#: stores NLPARM
self.nlparms = {} # type: Dict[int, NLPARM]
#: stores TSTEPs, TSTEP1s
self.tsteps = {} # type: Dict[int, Union[TSTEP, TSTEP1]]
#: stores TSTEPNL
self.tstepnls = {} # type: Dict[int, TSTEPNL]
#: stores TF
self.transfer_functions = {} # type: Dict[int, TF]
#: stores DELAY
self.delays = {} # type: Dict[int, DELAY]
#: stores ROTORD, ROTORG
self.rotors = {} # type: Dict[int, Union[ROTORD, ROTORG]]
# --------------------------- aero defaults --------------------------
# aero cards
#: stores CAEROx
self.caeros = {} # type: Dict[int, Union[CAERO1, CAERO2, CAERO3, CAERO4, CAERO5]]
#: stores PAEROx
self.paeros = {} # type: Dict[int, Union[PAERO1, PAERO2, PAERO3, PAERO4, PAERO5]]
# stores MONPNT1
self.monitor_points = [] # type: List[Union[MONPNT1, MONPNT2, MONPNT3]]
#: stores AECOMP
self.aecomps = {} # type: Dict[int, AECOMP]
#: stores AEFACT
self.aefacts = {} # type: Dict[int, AEFACT]
#: stores AELINK
self.aelinks = {} # type: Dict[int, List[AELINK]]
#: stores AELIST
self.aelists = {} # type: Dict[int, AELIST]
#: stores AEPARAM
self.aeparams = {} # type: Dict[int, AEPARAM]
#: stores AESURF
self.aesurf = {} # type: Dict[int, AESURF]
#: stores AESURFS
self.aesurfs = {} # type: Dict[int, AESURFS]
#: stores AESTAT
self.aestats = {} # type: Dict[int, AESTAT]
#: stores CSSCHD
self.csschds = {} # type: Dict[int, CSSCHD]
#: store SPLINE1,SPLINE2,SPLINE4,SPLINE5
self.splines = {} # type: Dict[int, Union[SPLINE1, SPLINE2, SPLINE3, SPLINE4, SPLINE5]]
self.zona = ZONA(self)
# axisymmetric
self.axic = None # type: Optional[AXIC]
self.axif = None # type: Optional[AXIF]
self.ringfl = {} # type: Dict[int, RINGFL]
self._is_axis_symmetric = False
# cyclic
self.cyax = None # type: Optional[CYAX]
self.cyjoin = {} # type: Dict[int, CYJOIN]
self.modtrak = None # type: Optional[MODTRAK]
# acoustic
self.acmodl = None
# ------ SOL 144 ------
#: stores AEROS
self.aeros = None # type: Optional[AEROS]
#: stores TRIM, TRIM2
self.trims = {} # type: Dict[int, Union[TRIM, TRIM2]]
#: stores DIVERG
self.divergs = {} # type: Dict[int, DIVERG]
# ------ SOL 145 ------
#: stores AERO
self.aero = None # type: Optional[AERO]
#: stores FLFACT
self.flfacts = {} # type: Dict[int, FLFACT]
#: stores FLUTTER
self.flutters = {} # type: Dict[int, FLUTTER]
#: mkaeros
self.mkaeros = [] # type: List[Union[MKAERO1,MKAERO2]]
# ------ SOL 146 ------
#: stores GUST cards
self.gusts = {} # type: Dict[int, GUST]
# ------------------------- thermal defaults -------------------------
# BCs
#: stores thermal boundary conditions - CONV,RADBC
self.bcs = {} # type: Dict[int, Union[CONV, RADBC]]
#: stores PHBDY
self.phbdys = {} # type: Dict[int, PHBDY]
#: stores convection properties - PCONV, PCONVM ???
self.convection_properties = {} # type: Dict[int, Union[PCONV, PCONVM]]
#: stores TEMPD
self.tempds = {} # type: Dict[int, TEMPD]
#: stores VIEW
self.views = {} # type: Dict[int, VIEW]
#: stores VIEW3D
self.view3ds = {} # type: Dict[int, VIEW3D]
self.radset = None
self.radcavs = {} # type: Dict[int, RADCAV]
self.radmtx = {} # type: Dict[int, RADMTX]
# -------------------------contact cards-------------------------------
self.bcbodys = {} # type: Dict[int, BCBODY]
self.bcparas = {} # type: Dict[int, BCPARA]
self.bcrparas = {} # type: Dict[int, BCRPARA]
self.bctparas = {} # type: Dict[int, BCTPARA]
self.bctadds = {} # type: Dict[int, BCTADD]
self.bctsets = {} # type: Dict[int, BCTSET]
self.bsurf = {} # type: Dict[int, BSURF]
self.bsurfs = {} # type: Dict[int, BSURFS]
self.bconp = {} # type: Dict[int, BCONP]
self.blseg = {} # type: Dict[int, BLSEG]
self.bfric = {} # type: Dict[int, BFRIC]
self.bgadds = {} # type: Dict[int, BGADD]
self.bgsets = {} # type: Dict[int, BGSET]
self.bctparms = {} # type: Dict[int, BCTPARAM]
#--------------------------superelements------------------------------
self.setree = {} # type: Dict[int, SETREE]
self.senqset = {} # type: Dict[int, Union[SENQSET, SENQSET1]]
self.sebulk = {} # type: Dict[int, SEBULK]
self.sebndry = {} # type: Dict[int, SEBNDRY]
self.release = {} # type: Dict[int, RELEASE]
self.seloc = {} # type: Dict[int, SELOC]
self.sempln = {} # type: Dict[int, SEMPLN]
self.seconct = {} # type: Dict[int, SECONCT]
self.selabel = {} # type: Dict[int, SELABEL]
self.seexcld = {} # type: Dict[int, SEEXCLD]
self.seelt = {} # type: Dict[int, SEELT]
self.seload = {} # type: Dict[int, SELOAD]
self.csuper = {} # type: Dict[int, CSUPER]
self.csupext = {} # type: Dict[int, CSUPEXT]
# ---------------------------------------------------------------------
self._type_to_id_map = defaultdict(list) # type: Dict[int, List[Any]]
self._slot_to_type_map = {
'params' : ['PARAM'],
'mdlprm': ['MDLPRM'],
'nodes' : ['GRID', 'SPOINT', 'EPOINT'], # 'RINGAX',
'points' : ['POINT'],
'ringaxs' : ['RINGAX', 'POINTAX'],
'ringfl' : ['RINGFL'],
'axic' : ['AXIC'],
'axif' : ['AXIF'],
'acmodl' : ['ACMODL'],
'grdset' : ['GRDSET'],
'gridb' : ['GRIDB'],
'seqgp' : ['SEQGP'],
'ao_element_flags' : ['CBARAO'],
#'POINTAX', 'RINGAX',
# CMASS4 lies in the QRG
'masses' : ['CONM1', 'CONM2', 'CMASS1', 'CMASS2', 'CMASS3', 'CMASS4'],
'elements' : [
'CELAS1', 'CELAS2', 'CELAS3', 'CELAS4',
# 'CELAS5',
'CBUSH', 'CBUSH1D', 'CBUSH2D',
'CDAMP1', 'CDAMP2', 'CDAMP3', 'CDAMP4', 'CDAMP5',
'CFAST', 'GENEL',
'CBAR', 'CROD', 'CTUBE', 'CBEAM', 'CBEAM3', 'CONROD', 'CBEND',
'CTRIA3', 'CTRIA6', 'CTRIAR',
'CQUAD4', 'CQUAD8', 'CQUADR', 'CQUAD',
'CPLSTN3', 'CPLSTN6', 'CPLSTN4', 'CPLSTN8',
'CPLSTS3', 'CPLSTS6', 'CPLSTS4', 'CPLSTS8',
'CTRAX3', 'CTRAX6', 'CTRIAX', 'CTRIAX6',
'CQUADX', 'CQUADX4', 'CQUADX8',
'CCONEAX',
'CTETRA', 'CPYRAM', 'CPENTA', 'CHEXA', 'CIHEX1', 'CIHEX2', 'CHEXA1', 'CHEXA2',
'CSHEAR', 'CVISC', 'CRAC2D', 'CRAC3D',
'CGAP',
# thermal
'CHBDYE', 'CHBDYG', 'CHBDYP',
# acoustic
'CHACAB', 'CAABSF', 'CHACBR',
],
'normals' : ['SNORM'],
'nsms' : ['NSM', 'NSM1', 'NSML', 'NSML1'],
'nsmadds' : ['NSMADD'],
'rigid_elements' : ['RBAR', 'RBAR1', 'RBE1', 'RBE2', 'RBE3', 'RROD', 'RSPLINE', 'RSSCON'],
'plotels' : ['PLOTEL'],
'properties_mass' : ['PMASS'],
#'properties_acoustic' : ['PACABS'],
'properties' : [
# acoustic
'PACABS', 'PAABSF', 'PACBAR',
# 0d
'PELAS', 'PGAP', 'PFAST',
'PBUSH', 'PBUSH1D',
'PDAMP', 'PDAMP5',
# 1d
'PROD', 'PBAR', 'PBARL', 'PBEAM', 'PTUBE', 'PBEND', 'PBCOMP', 'PBRSECT', 'PBMSECT',
'PBEAML', # not fully supported
'PBEAM3',
# 2d
'PLPLANE', 'PPLANE',
'PSHELL', 'PCOMP', 'PCOMPG', 'PSHEAR',
'PSOLID', 'PLSOLID', 'PVISC', 'PRAC2D', 'PRAC3D',
'PIHEX', 'PCOMPS', 'PCOMPLS',
'PCONEAX',
],
'pdampt' : ['PDAMPT'],
'pelast' : ['PELAST'],
'pbusht' : ['PBUSHT'],
# materials
'materials' : ['MAT1', 'MAT2', 'MAT3', 'MAT8', 'MAT9', 'MAT10', 'MAT11',
'MAT3D', 'MATG'],
'hyperelastic_materials' : ['MATHE', 'MATHP'],
'creep_materials' : ['CREEP'],
'MATT1' : ['MATT1'],
'MATT2' : ['MATT2'],
'MATT3' : ['MATT3'],
'MATT4' : ['MATT4'], # thermal
'MATT5' : ['MATT5'], # thermal
'MATT8' : ['MATT8'],
'MATT9' : ['MATT9'],
'MATS1' : ['MATS1'],
'MATS3' : ['MATS3'],
'MATS8' : ['MATS8'],
'nxstrats' : ['NXSTRAT'],
# 'MATHE'
#'EQUIV', # testing only, should never be activated...
# thermal materials
'thermal_materials' : ['MAT4', 'MAT5'],
# spc/mpc constraints - TODO: is this correct?
'spcadds' : ['SPCADD'],
'spcs' : ['SPC', 'SPC1', 'SPCAX', 'GMSPC'],
'spcoffs' : ['SPCOFF', 'SPCOFF1'],
'mpcadds' : ['MPCADD'],
'mpcs' : ['MPC'],
'suport' : ['SUPORT'],
'suport1' : ['SUPORT1'],
'se_suport' : ['SESUP'],
'setree' : ['SETREE'],
'senqset' : ['SENQSET'],
'sebulk' : ['SEBULK'],
'sebndry' : ['SEBNDRY'],
'release' : ['RELEASE'],
'seloc' : ['SELOC'],
'sempln' : ['SEMPLN'],
'seconct' : ['SECONCT'],
'selabel' : ['SELABEL'],
'seexcld' : ['SEEXCLD'],
'seelt' : ['SEELT'],
'seload' : ['SELOAD'],
'csuper' : ['CSUPER'],
'csupext' : ['CSUPEXT'],
# loads
'load_combinations' : ['LOAD', 'LSEQ', 'CLOAD'],
'loads' : [
'FORCE', 'FORCE1', 'FORCE2',
'MOMENT', 'MOMENT1', 'MOMENT2',
'GRAV', 'ACCEL', 'ACCEL1',
'PLOAD', 'PLOAD1', 'PLOAD2', 'PLOAD4',
'RFORCE', 'RFORCE1', 'SLOAD',
'SPCD', 'LOADCYN', 'LOADCYH', 'DEFORM',
# msgmesh
#'GMLOAD',
# thermal
'TEMP', 'TEMPB3', 'TEMPRB',
'QBDY1', 'QBDY2', 'QBDY3', 'QHBDY', 'QVOL',
# axisymmetric
'PLOADX1', 'FORCEAX', 'PRESAX', 'TEMPAX',
],
'cyjoin' : ['CYJOIN'],
'cyax' : ['CYAX'],
'modtrak' : ['MODTRAK'],
'dloads' : ['DLOAD'],
# stores RLOAD1, RLOAD2, TLOAD1, TLOAD2, and ACSRCE entries.
'dload_entries' : ['ACSRCE', 'TLOAD1', 'TLOAD2', 'RLOAD1', 'RLOAD2',
'QVECT', 'RANDPS', 'RANDT1'],
# aero cards
'aero' : ['AERO'],
'aeros' : ['AEROS'],
'gusts' : ['GUST', 'GUST2'],
'flutters' : ['FLUTTER'],
'flfacts' : ['FLFACT'],
'mkaeros' : ['MKAERO1', 'MKAERO2'],
'aecomps' : ['AECOMP', 'AECOMPL'],
'aefacts' : ['AEFACT'],
'aelinks' : ['AELINK'],
'aelists' : ['AELIST'],
'aeparams' : ['AEPARM'],
'aesurf' : ['AESURF'],
'aesurfs' : ['AESURFS'],
'aestats' : ['AESTAT'],
'caeros' : ['CAERO1', 'CAERO2', 'CAERO3', 'CAERO4', 'CAERO5', 'CAERO7', 'BODY7'],
'paeros' : ['PAERO1', 'PAERO2', 'PAERO3', 'PAERO4', 'PAERO5', 'SEGMESH'],
'monitor_points' : ['MONPNT1', 'MONPNT2', 'MONPNT3', 'MONDSP1'],
'splines' : ['SPLINE1', 'SPLINE2', 'SPLINE3', 'SPLINE4', 'SPLINE5', 'SPLINE6', 'SPLINE7'],
'panlsts' : ['PANLST1', 'PANLST2', 'PANLST3'],
'csschds' : ['CSSCHD',],
#'SPLINE3', 'SPLINE6', 'SPLINE7',
'trims' : ['TRIM', 'TRIM2'],
'divergs' : ['DIVERG'],
# coords
'coords' : ['CORD1R', 'CORD1C', 'CORD1S',
'CORD2R', 'CORD2C', 'CORD2S',
'GMCORD', 'ACOORD', 'CORD3G'],
# temperature cards
'tempds' : ['TEMPD'],
'phbdys' : ['PHBDY'],
'convection_properties' : ['PCONV', 'PCONVM'],
# stores thermal boundary conditions
'bcs' : ['CONV', 'CONVM', 'RADBC', 'RADM', 'TEMPBC'],
# dynamic cards
'dareas' : ['DAREA'],
'tics' : ['TIC'],
'dphases' : ['DPHASE'],
'nlparms' : ['NLPARM'],
'nlpcis' : ['NLPCI'],
'tsteps' : ['TSTEP'],
'tstepnls' : ['TSTEPNL', 'TSTEP1'],
'transfer_functions' : ['TF'],
'delays' : ['DELAY'],
'rotors' : ['ROTORG', 'ROTORD'],
'frequencies' : ['FREQ', 'FREQ1', 'FREQ2', 'FREQ3', 'FREQ4', 'FREQ5'],
# direct matrix input cards
'dmig' : ['DMIG'],
'dmiax' : ['DMIAX'],
'dmij' : ['DMIJ'],
'dmiji' : ['DMIJI'],
'dmik' : ['DMIK'],
'dmi' : ['DMI'],
'dti' : ['DTI'],
# optimzation
'dequations' : ['DEQATN'],
'dtable' : ['DTABLE'],
'dconstrs' : ['DCONSTR', 'DCONADD'],
'desvars' : ['DESVAR'],
'topvar' : ['TOPVAR'],
'ddvals' : ['DDVAL'],
'dlinks' : ['DLINK'],
'dresps' : ['DRESP1', 'DRESP2', 'DRESP3'],
'dvprels' : ['DVPREL1', 'DVPREL2'],
'dvmrels' : ['DVMREL1', 'DVMREL2'],
'dvcrels' : ['DVCREL1', 'DVCREL2'],
'dvgrids' : ['DVGRID'],
'doptprm' : ['DOPTPRM'],
'dscreen' : ['DSCREEN'],
# optimization - nx
'dmncon' : ['DMNCON'],
'dvtrels' : ['DVTREL1'],
'group' : ['GROUP'],
# sets
'asets' : ['ASET', 'ASET1'],
'omits' : ['OMIT', 'OMIT1'],
'bsets' : ['BSET', 'BSET1'],
'qsets' : ['QSET', 'QSET1'],
'csets' : ['CSET', 'CSET1'],
'usets' : ['USET', 'USET1'],
'sets' : ['SET1', 'SET3'],
# super-element sets
'se_bsets' : ['SEBSET', 'SEBSET1'],
'se_csets' : ['SECSET', 'SECSET1'],
'se_qsets' : ['SEQSET', 'SEQSET1'],
'se_usets' : ['SEUSET', 'SEQSET1'],
'se_sets' : ['SESET'],
'radset' : ['RADSET'],
'radcavs' : ['RADCAV', 'RADLST'],
'radmtx' : ['RADMTX'],
# SEBSEP
# parametric
'pset' : ['PSET'],
'pval' : ['PVAL'],
'gmcurv' : ['GMCURV'],
'gmsurf' : ['GMSURF'],
'feedge' : ['FEEDGE'],
'feface' : ['FEFACE'],
# tables
'tables' : [
'TABLEH1', 'TABLEHT',
'TABLES1', 'TABLEST',
],
'tables_d' : ['TABLED1', 'TABLED2', 'TABLED3', 'TABLED4', 'TABLED5'],
'tables_m' : ['TABLEM1', 'TABLEM2', 'TABLEM3', 'TABLEM4'],
'tables_sdamping' : ['TABDMP1'],
'random_tables' : ['TABRND1', 'TABRNDG'],
# initial conditions - sid (set ID)
##'TIC', (in bdf_tables.py)
# methods
'methods' : ['EIGB', 'EIGR', 'EIGRL'],
# cMethods
'cMethods' : ['EIGC', 'EIGP'],
# contact
'bcbodys' : ['BCBODY'],
'bcparas' : ['BCPARA'],
'bctparas' : ['BCTPARA'],
'bcrparas' : ['BCRPARA'],
'bctparms' : ['BCTPARM'],
'bctadds' : ['BCTADD'],
'bctsets' : ['BCTSET'],
'bgadds' : ['BGADD'],
'bgsets' : ['BGSET'],
'bsurf' : ['BSURF'],
'bsurfs' : ['BSURFS'],
'bconp' : ['BCONP'],
'blseg' : ['BLSEG'],
'bfric' : ['BFRIC'],
'views' : ['VIEW'],
'view3ds' : ['VIEW3D'],
## other
#'INCLUDE', # '='
#'ENDDATA',
} # type: Dict[str, List[str]]
self._type_to_slot_map = self.get_rslot_map()
@property
def type_slot_str(self) -> str:
"""helper method for printing supported cards"""
nchars = len('Card Group')
#nchars_cards = 0
for card_group in self._slot_to_type_map:
nchars = max(nchars, len(card_group))
nline = 58
fmt = '| %%-%ss | %%-%ss |\n' % (nchars, nline)
fmt_plus = '+%%-%ss+%%-%ss+\n' % (nchars + 2, nline + 2)
dash1 = '-' * (nchars + 2)
dash2 = '-' * (nline + 2)
dash_plus = fmt_plus % (dash1, dash2)
html_msg = [
dash_plus,
fmt % ('Card Group', 'Cards'),
]
for card_group, card_types in sorted(self._slot_to_type_map.items()):
valid_cards = [card_type for card_type in card_types
if card_type in self.cards_to_read]
valid_cards.sort()
if len(valid_cards) == 0:
continue
#i = 0
sublines = []
subline = ''
while valid_cards:
card_type = valid_cards.pop(0)
# the +2 is for the comma and space
len_card_type = len(card_type) + 2
nline_new = len(subline) + len_card_type
if nline_new > nline:
sublines.append(subline.rstrip(' '))
subline = ''
subline += '%s, ' % card_type
if subline:
sublines.append(subline.rstrip(', '))
html_msg.append(dash_plus)
for isub, subline in enumerate(sublines):
if isub > 0: # adds intermediate dash lines
html_msg.append(dash_plus)
html_msg.append(fmt % (card_group, subline))
card_group = ''
html_msg.append(dash_plus)
#for card_group, card_types in sorted(self._slot_to_type_map.items()):
#html_msg.append('| %s | %s |' % (card_group, ', '.join(card_types)))
#html_msg.append(
#fmt_plus % ('-'*(nchars + 2), '-'*(nline + 2))
#)
msg = ''.join(html_msg)
return msg
@property
def nastran_format(self) -> str:
return self._nastran_format
@nastran_format.setter
def nastran_format(self, nastran_format: str) -> None:
assert isinstance(nastran_format, str), nastran_format
fmt_lower = nastran_format.lower().strip()
if fmt_lower not in BDF_FORMATS:
raise RuntimeError(nastran_format)
self._nastran_format = fmt_lower
@property
def is_long_ids(self) -> bool:
return self._is_long_ids
#if self._nastran_format == 'nx' or self._is_long_ids:
#return True
#return False
def _set_punch(self) -> None:
"""updates the punch flag"""
if self.punch is None:
# writing a mesh without using read_bdf
if self.system_command_lines or self.executive_control_lines or self.case_control_deck:
self.punch = False
else:
self.punch = True
@property
def sol(self) -> int:
"""gets the solution (e.g. 101, 103)"""
return self._sol
@sol.setter
def sol(self, sol: int) -> int:
"""sets the solution (e.g. 101, 103)"""
self._sol = sol
if len(self.executive_control_lines) == 0:
self.executive_control_lines = ['SOL %s' % sol, 'CEND']
self.sol_iline = 0
return self._sol
@property
def subcases(self) -> Dict[int, Optional[Any]]:
"""gets the subcases"""
if self.case_control_deck is None:
return {}
return self.case_control_deck.subcases
#@property
#def grids(self):
#"""might be renaming self.nodes to self.grids"""
#return self.nodes
#@property.setter
#def grids(self, grids):
#"""might be renaming self.nodes to self.grids"""
#self.nodes = grids
@property
def nnodes(self) -> int:
"""gets the number of GRIDs"""
return len(self.nodes)
@property
def node_ids(self):
"""gets the GRID ids"""
return self.nodes.keys()
@property
def point_ids(self):
"""gets the GRID, SPOINT, EPOINT ids"""
return set(self.node_ids) | set(list(self.spoints.keys())) | set(list(self.epoints.keys()))
@property
def npoints(self) -> int:
"""gets the number of GRID, SPOINT, EPOINT ids"""
return len(self.point_ids)
#--------------------
# Elements CARDS
@property
def nelements(self) -> int:
"""gets the number of element"""
return len(self.elements)
@property
def element_ids(self):
"""gets the element ids"""
return self.elements.keys()
#--------------------
# Property CARDS
@property
def nproperties(self) -> int:
"""gets the number of properties"""
return len(self.properties)
@property
def property_ids(self):
"""gets the property ids"""
return self.properties.keys()
#--------------------
# Material CARDS
@property
def nmaterials(self) -> int:
"""gets the number of materials"""
return len(self.materials)
@property
def material_ids(self):
"""gets the material ids"""
return self.materials.keys()
#--------------------
# Coords CARDS
@property
def ncoords(self) -> int:
"""gets the number of coordinate systems"""
return len(self.coords)
@property
def coord_ids(self):
"""gets the number of coordinate system ids"""
return self.coords.keys()
#--------------------
@property
def ncaeros(self) -> int:
"""gets the number of CAEROx panels"""
return len(self.caeros)
@property
def caero_ids(self):
"""gets the CAEROx ids"""
return self.caeros.keys()
@property
def wtmass(self):
"""
Gets the PARAM,WTMASS value, which defines the weight to mass
conversion factor
kg -> kg : 1.0
lb -> slug : 1/32.2
lb -> slinch : 1/(32.2*12)=1/386.4
"""
wtmass = 1.0
if 'WTMASS' in self.params:
param = self.params['WTMASS']
wtmass = param.values[0]
return wtmass
def set_param(self, key: str, values: Union[int, float, str, List[float]], comment: str='') -> None:
"""sets a param card; creates it if necessary"""
if isinstance(values, (int, float, str)):
values = [values]
key = key.upper()
if key in self.params:
param = self.params[key]
param.update_values(*values)
else:
self.add_param(key, values, comment=comment)
def get_param(self, key: str, default: Union[int, float, str, List[float]]
) -> Union[int, float, str, List[float]]:
"""gets a param card"""
key = key.upper()
if key in self.params:
param = self.params[key]
return param.value
return default
#--------------------
# deprecations
@property
def dmis(self) -> Dict[str, DMI]:
return self.dmi
@property
def dmigs(self) -> Dict[str, DMIG]:
return self.dmig
@property
def dmiks(self) -> Dict[str, DMIK]:
return self.dmik
@property
def dmijs(self) -> Dict[str, DMIJ]:
return self.dmij
@property
def dmijis(self) -> Dict[str, DMIJI]:
return self.dmiji
@dmis.setter
def dmis(self, dmi):
self.dmi = dmi
@dmigs.setter
def dmigs(self, dmig):
self.dmig = dmig
@dmiks.setter
def dmiks(self, dmik):
self.dmik = dmik
@dmijs.setter
def dmijs(self, dmij):
self.dmij = dmij
@dmijis.setter
def dmijis(self, dmiji):
self.dmiji = dmiji
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,638
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/op2/op2_geom.py
|
"""
Defines:
- read_op2_geom(op2_filename=None, combine=True, subcases=None,
exclude_results=None, include_results=None,
validate=True, xref=True,
build_dataframe=False, skip_undefined_matrices=True,
mode='msc', log=None, debug=True, debug_file=None, encoding=None)
- OP2Geom(make_geom=True, debug=False, log=None, debug_file=None, mode='msc')
- OP2
"""
from __future__ import annotations
from pickle import dump
from pathlib import PurePath
from typing import List, Optional, Union, Any, TYPE_CHECKING
import numpy as np
from pyNastran.op2.tables.geom.geom_common import GeomCommon
from pyNastran.op2.tables.geom.geom1 import GEOM1
from pyNastran.op2.tables.geom.geom2 import GEOM2
from pyNastran.op2.tables.geom.geom3 import GEOM3
from pyNastran.op2.tables.geom.geom4 import GEOM4
from pyNastran.op2.tables.geom.ept import EPT
from pyNastran.op2.tables.geom.mpt import MPT
from pyNastran.op2.tables.geom.edt import EDT
from pyNastran.op2.tables.geom.edom import EDOM
from pyNastran.op2.tables.geom.contact import CONTACT
from pyNastran.op2.tables.geom.dit import DIT
from pyNastran.op2.tables.geom.dynamics import DYNAMICS
from pyNastran.op2.tables.geom.axic import AXIC
from pyNastran.bdf.bdf import BDF
from pyNastran.bdf.errors import DuplicateIDsError
from pyNastran.op2.op2 import OP2, FatalError, SortCodeError, DeviceCodeError, FortranMarkerError
if TYPE_CHECKING: # pragma: no cover
from cpylog import SimpleLogger
def read_op2_geom(op2_filename: Optional[Union[str, PurePath]]=None,
combine: bool=True,
subcases: Optional[List[int]]=None,
exclude_results: Optional[List[str]]=None,
include_results: Optional[List[str]]=None,
validate: bool=True, xref: bool=True,
build_dataframe: bool=False, skip_undefined_matrices: bool=True,
mode: str='msc', log: SimpleLogger=None, debug: bool=True,
debug_file: Optional[str]=None,
encoding: Optional[str]=None):
"""
Creates the OP2 object without calling the OP2 class.
Parameters
----------
op2_filename : str (default=None -> popup)
the op2_filename
combine : bool; default=True
True : objects are isubcase based
False : objects are (isubcase, subtitle) based;
will be used for superelements regardless of the option
subcases : List[int, ...] / int; default=None->all subcases
list of [subcase1_ID,subcase2_ID]
exclude_results / include_results : List[str] / str; default=None
a list of result types to exclude/include
one of these must be None
validate : bool
runs various checks on the BDF (default=True)
xref : bool
should the bdf be cross referenced (default=True)
build_dataframe : bool; default=False
builds a pandas DataFrame for op2 objects
skip_undefined_matrices : bool; default=False
True : prevents matrix reading crashes
log : Log()
a logging object to write debug messages to
(.. seealso:: import logging)
debug : bool; default=False
enables the debug log and sets the debug in the logger
debug_file : str; default=None (No debug)
sets the filename that will be written to
encoding : str
the unicode encoding (default=None; system default)
Returns
-------
model : OP2()
an OP2 object
.. todo:: creates the OP2 object without all the read methods
.. note :: this method will change in order to return an object that
does not have so many methods
"""
model = OP2Geom(log=log, debug=debug, debug_file=debug_file, mode=mode)
model.set_subcases(subcases)
model.include_exclude_results(exclude_results=exclude_results,
include_results=include_results)
model.read_op2(op2_filename=op2_filename, build_dataframe=build_dataframe,
skip_undefined_matrices=skip_undefined_matrices, combine=combine,
encoding=encoding)
if validate:
model.validate()
if xref:
model.cross_reference()
return model
class OP2GeomCommon(OP2, GeomCommon):
"""interface for the OP2Geom class for to loading subclasses"""
def __init__(self, make_geom: bool=True,
debug: bool=False, log: Any=None,
debug_file: Optional[str]=None,
mode: Optional[str]=None):
"""
Initializes the OP2 object
Parameters
----------
make_geom : bool; default=False
reads the BDF tables
debug : bool; default=False
enables the debug log and sets the debug in the logger
log: log()
a logging object to write debug messages to
(.. seealso:: import logging)
debug_file : default=None -> no debug
sets the filename that will be written to
mode : str; default=None -> 'msc'
{msc, nx}
"""
#self.big_properties = {}
self.big_materials = {}
self.reader_geom2 = GEOM2(self)
self.reader_geom1 = GEOM1(self)
self.reader_geom3 = GEOM3(self)
self.reader_geom4 = GEOM4(self)
self.reader_ept = EPT(self)
self.reader_mpt = MPT(self)
self.reader_edt = EDT(self)
self.reader_edom = EDOM(self)
self.reader_contact = CONTACT(self)
self.reader_dit = DIT(self)
self.reader_dynamic = DYNAMICS(self)
self.reader_axic = AXIC(self)
OP2.__init__(self, debug=debug, log=log, debug_file=debug_file, mode=mode)
self.make_geom = True
# F:\work\pyNastran\examples\Dropbox\move_tpl\beamp10.op2
# F:\work\pyNastran\examples\Dropbox\move_tpl\ifsr22r.op2
# F:\work\pyNastran\examples\Dropbox\move_tpl\ifssh22.op2
# F:\work\pyNastran\examples\Dropbox\move_tpl\ifsr22r.op2
# F:\work\pyNastran\examples\Dropbox\move_tpl\ifsv02pp.op2
self._viewtb_map = {
(10300, 103, 16) : ['QUADP', self._read_fake],
(10400, 104, 15) : ['TRIAP', self._read_fake],
(10500, 105, 14) : ['BEAMP', self._read_fake],
(14100, 141, 18) : ['HEXAP', self._read_view_hexa],
(14200, 142, 16) : ['PENTAP', self._read_fake],
(14300, 143, 14) : ['TETRAP', self._read_fake],
#(10500, 105, 14) : ['???', self._read_fake],
#(10500, 105, 14) : ['???', self._read_fake],
}
def _read_view_hexa(self, data, n):
"""
Word Name Type Description
1 EID I Element identification number
2 CID I Coordinate system identification number -- from CID field
3 NX I View mesh subdivision -- from VIEW field
4 NY I View mesh subdivision -- from VIEW field
5 NZ I View mesh subdivision -- from VIEW field
6 MTH CHAR4 Method -- 'DIRE' means direct
7 MINEID I Mininum VUHEXA identification number for this element
8 MAXEID I Maximum VUHEXA identification number for this element
9 MINGID I Minimum grid identification number for this element
10 MAXGID I Maximum grid identification number for this element
11 G(8) I Corner grid identification numbers
"""
# C:\NASA\m4\formats\git\examples\move_tpl\ifsv34b.op2
ints = np.frombuffer(data[n:], self.idtype) # .tolist()
nelements = len(ints) // 18
assert len(ints) % 18 == 0
#print('nelements =', nelements)
ints2 = ints.reshape(nelements, 18)
for intsi in ints2:
eid, cid, nx, ny, nz, junk_imth, mineid, maxeid, mingid, maxgid, *nids = intsi
mth = data[n+20:n+24].decode('latin1')
#print(eid, cid, [nx, ny, nz], mth, [mineid, maxeid, mingid, maxgid], nids)
assert mth in ['DIRE', 'EXTR'], mth
n += 72
return n
def save(self, obj_filename: str='model.obj', unxref: bool=True) -> None:
"""Saves a pickleable object"""
#del self.log
#del self._card_parser, self._card_parser_prepare
#print(object_attributes(self, mode="all", keys_to_skip=[]))
with open(obj_filename, 'wb') as obj_file:
dump(self, obj_file)
def _get_table_mapper(self):
table_mapper = OP2._get_table_mapper(self)
table_mapper[b'CONTACT'] = [self.reader_contact.read_contact_4, self.reader_contact.read_contact_4]
table_mapper[b'CONTACTS'] = [self.reader_contact.read_contact_4, self.reader_contact.read_contact_4]
table_mapper[b'VIEWTB'] = [self._read_viewtb_4, self._read_viewtb_4]
table_mapper[b'EDT'] = [self.reader_edt.read_edt_4, self.reader_edt.read_edt_4]
table_mapper[b'EDTS'] = [self.reader_edt.read_edt_4, self.reader_edt.read_edt_4]
# geometry
table_mapper[b'GEOM1'] = [self.reader_geom1.read_geom1_4, self.reader_geom1.read_geom1_4]
table_mapper[b'GEOM2'] = [self.reader_geom2.read_geom2_4, self.reader_geom2.read_geom2_4]
table_mapper[b'GEOM3'] = [self.reader_geom3.read_geom3_4, self.reader_geom3.read_geom3_4]
table_mapper[b'GEOM4'] = [self.reader_geom4.read_geom4_4, self.reader_geom4.read_geom4_4]
# superelements
table_mapper[b'GEOM1S'] = [self.reader_geom1.read_geom1_4, self.reader_geom1.read_geom1_4]
table_mapper[b'GEOM2S'] = [self.reader_geom2.read_geom2_4, self.reader_geom2.read_geom2_4]
table_mapper[b'GEOM3S'] = [self.reader_geom3.read_geom3_4, self.reader_geom3.read_geom3_4]
table_mapper[b'GEOM4S'] = [self.reader_geom4.read_geom4_4, self.reader_geom4.read_geom4_4]
table_mapper[b'GEOM1N'] = [self.reader_geom1.read_geom1_4, self.reader_geom1.read_geom1_4]
table_mapper[b'GEOM2N'] = [self.reader_geom2.read_geom2_4, self.reader_geom2.read_geom2_4]
table_mapper[b'GEOM3N'] = [self.reader_geom3.read_geom3_4, self.reader_geom3.read_geom3_4]
table_mapper[b'GEOM4N'] = [self.reader_geom4.read_geom4_4, self.reader_geom4.read_geom4_4]
table_mapper[b'GEOM1OLD'] = [self.reader_geom1.read_geom1_4, self.reader_geom1.read_geom1_4]
table_mapper[b'GEOM2OLD'] = [self.reader_geom2.read_geom2_4, self.reader_geom2.read_geom2_4]
table_mapper[b'GEOM3OLD'] = [self.reader_geom3.read_geom3_4, self.reader_geom3.read_geom3_4]
table_mapper[b'GEOM4OLD'] = [self.reader_geom4.read_geom4_4, self.reader_geom4.read_geom4_4]
table_mapper[b'GEOM1ATV'] = [self.reader_geom1.read_geom1_4, self.reader_geom1.read_geom1_4]
table_mapper[b'GEOM2ATV'] = [self.reader_geom2.read_geom2_4, self.reader_geom2.read_geom2_4]
table_mapper[b'EDOM'] = [self.reader_edom.read_edom4_4, self.reader_edom.read_edom4_4] # optimization
table_mapper[b'EPT'] = [self.reader_ept.read_ept_4, self.reader_ept.read_ept_4]
table_mapper[b'EPTS'] = [self.reader_ept.read_ept_4, self.reader_ept.read_ept_4]
table_mapper[b'EPTOLD'] = [self.reader_ept.read_ept_4, self.reader_ept.read_ept_4]
table_mapper[b'EPTATV'] = [self.reader_ept.read_ept_4, self.reader_ept.read_ept_4]
table_mapper[b'MPT'] = [self.reader_mpt.read_mpt_4, self.reader_mpt.read_mpt_4]
table_mapper[b'MPTS'] = [self.reader_mpt.read_mpt_4, self.reader_mpt.read_mpt_4]
table_mapper[b'DYNAMIC'] = [self.reader_dynamic.read_dynamics_4, self.reader_dynamic.read_dynamics_4]
table_mapper[b'DYNAMICS'] = [self.reader_dynamic.read_dynamics_4, self.reader_dynamic.read_dynamics_4]
table_mapper[b'AXIC'] = [self.reader_axic.read_axic_4, self.reader_axic.read_axic_4]
# table objects (e.g. TABLED1)
table_mapper[b'DIT'] = [self.reader_dit.read_dit_4, self.reader_dit.read_dit_4]
table_mapper[b'DITS'] = [self.reader_dit.read_dit_4, self.reader_dit.read_dit_4]
return table_mapper
def _read_viewtb_4(self, data: bytes, ndata: int):
"""
View information table
Contains the relationship between each p-element and its view-elements
and view-grids.
"""
return self._read_geom_4(self._viewtb_map, data, ndata)
class OP2Geom(BDF, OP2GeomCommon):
"""creates an interface for the OP2 and BDF classes"""
_properties = [
'is_bdf_vectorized', 'nid_map', 'wtmass',
'is_real', 'is_complex', 'is_random',
'_sort_method', 'is_sort1', 'is_sort2',
'matrix_tables', 'table_name_str', 'is_geometry',
#'dmigs', 'dmijs', 'dmiks', 'dmijis', 'dtis', 'dmis',
]
def __init__(self, make_geom: bool=True,
debug: bool=False, log: Any=None,
debug_file: Optional[str]=None, mode: str='msc'):
"""
Initializes the OP2 object
Parameters
----------
make_geom : bool; default=False
reads the BDF tables
debug : bool; default=False
enables the debug log and sets the debug in the logger
log: log()
a logging object to write debug messages to
(.. seealso:: import logging)
debug_file : default=None -> no debug
sets the filename that will be written to
mode : str; default='msc'
{msc, nx}
"""
BDF.__init__(self, debug=debug, log=log)
OP2GeomCommon.__init__(self, make_geom=make_geom,
debug=debug, log=log, debug_file=debug_file, mode=mode)
@property
def is_geometry(self) -> bool:
return True
def read_op2(self, op2_filename: Optional[Union[str, PurePath]]=None, combine: bool=True,
build_dataframe: Optional[bool]=False,
skip_undefined_matrices: bool=False,
encoding: Optional[str]=None):
"""see ``OP2.read_op2``"""
OP2.read_op2(self, op2_filename=op2_filename, combine=combine,
build_dataframe=build_dataframe,
skip_undefined_matrices=skip_undefined_matrices,
encoding=encoding)
if len(self.nodes) == 0:
self.gpdt_to_nodes()
def gpdt_to_nodes(self):
"""converts the GPDT & EQEXIN tables to node ids"""
eqexin = self.op2_results.eqexin
gpdt = self.op2_results.gpdt
msg = ''
if eqexin is None:
msg += 'eqexin is None; '
if gpdt is None:
msg += 'gpdt is None'
return
if msg:
self.log.error('Cannot convert EQEXIN/GPDT to nodes because %s' % msg.rstrip('; '))
return
nid_cp_cd_ps = gpdt.nid_cp_cd_ps
xyz = gpdt.xyz
nids = eqexin.nid
for nid, nid_cp_cd_psi, xyzi in zip(nids, nid_cp_cd_ps, xyz):
_nid, cp, cd, ps = nid_cp_cd_psi
self.add_grid(nid, xyzi, cp=cp, cd=cd, ps=ps, seid=0, comment='')
def __getstate__(self):
"""clears out a few variables in order to pickle the object"""
raise NotImplementedError()
# Copy the object's state from self.__dict__ which contains
# all our instance attributes. Always use the dict.copy()
# method to avoid modifying the original state.
#adfasd
#state = BDF.__getstate__(self)
#print(state)
#state = self.__dict__.copy()
# Remove the unpicklable entries.
#i = 0
#for key, value in sorted(state.items()):
#if isinstance(value, dict) and len(value) == 0:
#continue
##if not isinstance(value, (str, int, float)):
#if i > 5: # 72
#del state[key]
#else:
#print(key, type(value), value)
#break
#i += 1
#i = 0
#for key, value in sorted(state.items()):
#if isinstance(value, dict) and len(value) == 0:
#continue
#if not isinstance(value, (str, int, float)):
#if i > 200: # 72
#del state[key]
#else:
#print(key, type(value), value)
#break
#i += 1
#return state
def export_hdf5_file(self, hdf5_file, exporter=None):
"""
Converts the OP2 objects into hdf5 object
Parameters
----------
hdf5_file : H5File()
an h5py object
exporter : HDF5Exporter; default=None
unused
TODO: doesn't support:
- BucklingEigenvalues
"""
#from pyNastran.op2.op2_interface.hdf5_interface import export_op2_to_hdf5_file
#op2_model = self
OP2GeomCommon.export_hdf5_file(self, hdf5_file)
BDF.export_hdf5_file(self, hdf5_file)
def bdf_to_op2_geom(model: BDF, validate: bool=True) -> OP2Geom:
"""converts a BDF() -> OP2Geom()"""
if isinstance(model, OP2Geom):
return model
assert model is not None
#assert op2_model is not None
#assert model.bdf_filename is not None
debug = model.debug
if debug is None:
debug = True
op2_geom_model = OP2Geom(make_geom=True, debug=debug, log=model.log,
debug_file=None,
mode='msc')
# apply data from our 2 models to the new model
_properties = model._properties
keys_to_skip = _properties + ['_properties', 'npoints', 'is_geometry']
for key in model.object_attributes(mode='both', keys_to_skip=keys_to_skip):
value = getattr(model, key)
#if isinstance(value, (dict, list)) and len(value) == 0:
#continue
#print(key, value)
try:
setattr(op2_geom_model, key, value)
except AttributeError:
op2_geom_model.log.error('cant set %r to %r' % (key, value))
raise
#op2_geom_model.nodes = bdf_model.nodes
#op2_geom_model.elements = bdf_model.elements
return op2_geom_model
def attach_op2_results_to_bdf(bdf_model: BDF, op2_model: Optional[OP2]=None,
validate: bool=True) -> OP2Geom:
"""We're up-coverting a BDF and an OP2 result into an OP2Geom object."""
op2_geom_model = bdf_to_op2_geom(bdf_model, validate=validate)
if op2_model is None:
return op2_geom_model
variables = [
'op2_filename', 'matrices', 'eigenvalues', 'eigenvalues_fluid',
'displacements', 'load_vectors', 'eigenvectors',
]
for key in variables:
if hasattr(op2_model, key):
value = getattr(op2_model, key)
setattr(op2_geom_model, key, value)
#if hasattr(op2_model, 'displacements'):
#op2_geom_model.displacements = op2_model.displacements
if validate:
assert len(op2_geom_model.nodes) > 0, op2_geom_model.get_bdf_stats()
return op2_geom_model
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,639
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/converters/nastran/gui/nastran_io.py
|
# pylint: disable=E1101,C1801,C0103
"""Defines the GUI IO file for Nastran."""
from __future__ import annotations
import os
import sys
import traceback
from itertools import chain
from io import StringIO
from collections import defaultdict, OrderedDict
from typing import List, Dict, Tuple, Any, TYPE_CHECKING
#VTK_TRIANGLE = 5
#VTK_QUADRATIC_TRIANGLE = 22
#VTK_QUAD = 9
#VTK_QUADRATIC_QUAD = 23
#VTK_TETRA = 10
#VTK_QUADRATIC_TETRA = 24
#VTK_WEDGE = 13
#VTK_QUADRATIC_WEDGE = 26
#VTK_HEXAHEDRON = 12
#VTK_QUADRATIC_HEXAHEDRON = 25
import numpy as np
from numpy.linalg import norm # type: ignore
#: makes vtk work on certain builds of vtk
#: we have to call this before vtk; you can't just try-except it
#: unused_import
from pyNastran.gui.qt_version import qt_version
if qt_version == 'pyqt5':
import PyQt5
elif qt_version == 'pyside2':
import PySide2
elif qt_version == 'pyqt6':
import PyQt6
else:
raise NotImplementedError(qt_version)
from qtpy import QtCore
from qtpy.QtWidgets import QDockWidget
import vtk
from vtk import (vtkTriangle, vtkQuad, vtkTetra, vtkWedge, vtkHexahedron,
vtkQuadraticTriangle, vtkQuadraticQuad, vtkQuadraticTetra,
vtkQuadraticWedge, vtkQuadraticHexahedron,
vtkPyramid, vtkQuadraticPyramid)
#from pyNastran import is_release
from pyNastran import __version__
from pyNastran.utils.numpy_utils import integer_types
from pyNastran.femutils.nan import (
isfinite, isfinite_and_greater_than, isfinite_and_nonzero,
isgreater_int)
from pyNastran.femutils.utils import duplicates, is_monotonic, underflow_norm
from pyNastran.bdf.bdf import (BDF,
CAERO1, CAERO2, CAERO3, CAERO4, CAERO5,
CQUAD4, CQUAD8, CQUAD, CQUADR, CSHEAR,
CTRIA3, CTRIA6, CTRIAR,
CPLSTN3, CPLSTN4, CPLSTN6, CPLSTN8,
CPLSTS3, CPLSTS4, CPLSTS6, CPLSTS8,
CTRSHL,
CTRAX3, CTRIAX6, CTRIAX, #CTRAX6,
CQUADX4, CQUADX8, CQUADX,
CONM2,
# nastran95
CQUAD1)
from pyNastran.bdf.cards.aero.aero import get_caero_subpanel_grid, build_caero_paneling
from pyNastran.bdf.cards.aero.zona import CAERO7, BODY7
from pyNastran.bdf.cards.elements.solid import (
CTETRA4, CTETRA10, CPENTA6, CPENTA15,
CHEXA8, CHEXA20, CIHEX1, CIHEX2, CHEXA1, CHEXA2,
CPYRAM5, CPYRAM13,
)
from pyNastran.bdf.mesh_utils.delete_bad_elements import (
tri_quality, quad_quality, get_min_max_theta)
from pyNastran.bdf.mesh_utils.export_mcids import export_mcids_all
from pyNastran.bdf.mesh_utils.forces_moments import get_load_arrays, get_pressure_array
from pyNastran.bdf.mesh_utils.mpc_dependency import get_mpc_node_ids
from pyNastran.bdf.mesh_utils.bdf_renumber import superelement_renumber
from pyNastran.op2.op2 import OP2
#from pyNastran.f06.f06_formatting import get_key0
from pyNastran.op2.op2_geom import OP2Geom
from pyNastran.op2.result_objects.stress_object import StressObject
from pyNastran.gui.utils.vtk.base_utils import numpy_to_vtk, numpy_to_vtkIdTypeArray
from pyNastran.gui.utils.vtk.vtk_utils import (
get_numpy_idtype_for_vtk, numpy_to_vtk_points, create_vtk_cells_of_constant_element_type)
from pyNastran.gui.qt_files.colors import (
RED_FLOAT, BLUE_FLOAT, GREEN_FLOAT, LIGHT_GREEN_FLOAT, PINK_FLOAT, PURPLE_FLOAT,
YELLOW_FLOAT, ORANGE_FLOAT)
from pyNastran.gui.errors import NoGeometry, NoSuperelements
from pyNastran.gui.gui_objects.gui_result import GuiResult, NormalResult
from pyNastran.gui.gui_objects.displacements import ForceTableResults, ElementalTableResults
from .wildcards import IS_H5PY, GEOM_METHODS_BDF
from .beams3d import get_bar_nids, get_beam_sections_map, create_3d_beams
from .geometry_helper import NastranGeometryHelper, get_material_arrays, get_suport_node_ids
from .results_helper import NastranGuiResults, fill_responses, _get_times
from .bdf_vectorized import add_vectorized_elements
from .utils import (
build_offset_normals_dims, build_map_centroidal_result,
get_nastran_gui_layer_word, check_for_missing_control_surface_boxes,
get_elements_nelements_unvectorized, get_shell_material_coord,
make_nid_map, store_warning)
from .menus.setup_model_sidebar import ModelSidebar
if TYPE_CHECKING: # pragma: no cover
from cpylog import SimpleLogger
from pyNastran.gui.gui_objects.settings import Settings
SIDE_MAP = {}
SIDE_MAP['CHEXA'] = {
1 : [4, 3, 2, 1],
2 : [1, 2, 6, 5],
3 : [2, 3, 7, 6],
4 : [3, 4, 8, 7],
5 : [4, 1, 5, 8],
6 : [5, 6, 7, 8],
}
NO_THETA = [
'CELAS1', 'CELAS2', 'CELAS3', 'CELAS4',
'CDAMP1', 'CDAMP2', 'CDAMP3', 'CDAMP4', 'CDAMP5',
'CBAR', 'CBEAM', 'CBEAM3', 'CBEND',
'CBUSH', 'CBUSH1D', 'CBUSH2D', 'CVISC',
'CONROD', 'CROD', 'CTUBE', 'PLOTEL',
'CHBDYP', 'GENEL',
]
DESIRED_RESULTS = [
# nodal
# ---------
'displacements', 'velocities', 'accelerations', 'temperatures',
'constraint_forces', 'spc_forces', 'mpc_forces', 'eigenvectors',
'contact_forces', 'glue_forces',
#'gridPointForces',
#'stress',
# untested
'load_vectors',
'applied_loads',
'force_vectors',
# ---------
# centroidal
'stress',
'chexa_stress', 'cpenta_stress', 'ctetra_stress',
'ctria3_stress', 'ctria3_stress',
'cquad8_stress''cquad4_stress',
'ctria3_composite_stress', 'ctria3_composite_stress',
'cquad8_composite_stress''cquad4_composite_stress',
'cbar_stress', 'cbeam_stress',
'crod_stress', 'conrod_stress', 'ctube_stress',
'celas1_stress', 'celas2_stress', 'celas3_stress', 'celas4_stress',
#=================================================
'strain',
'chexa_strain', 'cpenta_strain', 'ctetra_strein',
'ctria3_strain', 'ctria3_strain',
'cquad8_strain', 'cquad4_strain',
'ctria3_composite_strain', 'ctria3_composite_strain',
'cquad8_composite_strain', 'cquad4_composite_strain',
'cbar_strain', 'cbeam_strain',
'crod_strain', 'conrod_strain', 'ctube_strain',
'celas1_strain', 'celas2_strain', 'celas3_strain', 'celas4_strain',
]
IS_TESTING = 'test' in sys.argv[0]
class NastranIO_(NastranGuiResults, NastranGeometryHelper):
"""helper class that doesn't have any pyqt requirements"""
def __init__(self):
super().__init__()
self.nid_release_map = {}
self.make_spc_mpc_supports = True
#self.export_vtk = False
self.create_secondary_actors = True
def get_nastran_wildcard_geometry_results_functions(self):
"""gets the Nastran wildcard loader used in the file load menu"""
geom_methods_pch = 'Nastran Geometry - Punch (*.bdf; *.dat; *.nas; *.ecd; *.pch)'
combined_methods_op2 = 'Nastran Geometry + Results - OP2 (*.op2)'
results_fmts = ['Nastran OP2 (*.op2)',]
if IS_H5PY:
results_fmts.append('pyNastran H5 (*.h5)')
results_fmts.append('Patran nod (*.nod)')
results_fmt = ';;'.join(results_fmts)
#results_fmt = 'Nastran OP2 (*.op2)'
data_geom = (
'nastran',
GEOM_METHODS_BDF, self.load_nastran_geometry,
results_fmt, self.load_nastran_results)
data_geom_pch = (
'nastran',
geom_methods_pch, self.load_nastran_geometry,
results_fmt, self.load_nastran_results)
unused_data_geom_results = (
'nastran',
combined_methods_op2, self.load_nastran_geometry_and_results,
results_fmt, self.load_nastran_results)
return [data_geom, data_geom_pch]
#return [data_geom, data_geom_pch, data_geom_results]
def load_nastran_geometry_and_results(self, op2_filename, name='main', plot=True):
"""loads geometry and results, so you don't have to double define the same BDF/OP2"""
self.load_nastran_geometry(op2_filename, name='main', plot=False)
self.load_nastran_results(self.model) # name='main', plot=True
def on_create_coord(self):
pass
def _get_geometry_properties_by_name(self, names):
"""
Get a subset of the self.geometry_properties dict specified by
names. Any names not in the dict will be ignored.
Parameters
-----------
names : list [str, ...]
List of names.
Returns
--------
geometry_properties : dict {str : AltGeometry or CoordProperties}
Dictonairy from name to property object.
"""
geometry_properties = {}
for name in names:
try:
prop = self.gui.geometry_properties[name]
except KeyError:
continue
geometry_properties[name] = prop
return geometry_properties
def on_update_geometry_properties_window(self, geometry_properties):
"""updates the 'Edit Geometry Properties' window"""
self.gui.on_update_geometry_properties_window(geometry_properties)
def toggle_caero_sub_panels(self):
"""
Toggle the visibility of the CAERO sub panels
"""
if not self.has_caero:
return
names = ['caero', 'caero_subpanels']
geometry_properties = self.gui._get_geometry_properties_by_name(names)
self.show_caero_sub_panels = not self.show_caero_sub_panels
if self.show_caero_actor:
if self.show_caero_sub_panels:
geometry_properties['caero'].is_visible = False
geometry_properties['caero_subpanels'].is_visible = True
else:
geometry_properties['caero'].is_visible = True
geometry_properties['caero_subpanels'].is_visible = False
self.gui.on_update_geometry_properties_override_dialog(geometry_properties)
def toggle_conms(self):
"""
Toggle the visibility of the CONMS
"""
name = 'conm2'
if name in self.gui.geometry_actors:
geometry_properties_change = {name : self.gui.geometry_properties[name]}
visibility_prev = geometry_properties_change[name].is_visible
geometry_properties_change[name].is_visible = not visibility_prev
self.gui.on_update_geometry_properties_override_dialog(geometry_properties_change)
def _create_coord(self, dim_max, cid, coord, coord_type):
"""
Create a coordinate system
Parameters
----------
dim_max : float
the max model dimension; 10% of the max will be used for the
coord length
cid : int
the coordinate system id
coord : Coord()
the Nastran coord object
coord_type : str
a string of 'xyz', 'Rtz', 'Rtp' (xyz, cylindrical, spherical)
that changes the axis names
"""
origin = coord.origin
beta = coord.beta().T
## TODO: support FEMAP syntax which is????
self.gui.create_coordinate_system(
cid, dim_max, label='%s' % cid, origin=origin,
matrix_3x3=beta, coord_type=coord_type)
def _create_nastran_coords(self, model, dim_max):
"""
Creates the Nastran coordinate systems.
Parameters
----------
model : BDF()
the BDF object
dim_max : float
the max model dimension; 10% of the max will be used for the
coord length
"""
cid_types = {
'R' : 'xyz',
'C' : 'Rtz',
'S' : 'Rtp',
}
self.gui.create_global_axes(dim_max)
if not self.gui.settings.nastran_create_coords:
return
for cid, coord in sorted(model.coords.items()):
if cid in [0, -1]:
continue
cid_type = cid_types[coord.Type]
self.gui._create_coord(dim_max, cid, coord, cid_type)
def _remove_old_nastran_geometry(self, bdf_filename):
"""cleans up the nastran model"""
#return self._remove_old_geometry(bdf_filename)
# skip_reading = self.removeOldGeometry(bdf_filename)
skip_reading = False
if bdf_filename is None or bdf_filename == '':
#self.grid = vtk.vtkUnstructuredGrid()
#self.scalar_bar_actor.VisibilityOff()
skip_reading = True
return skip_reading
else:
self.gui.turn_text_off()
self.gui.grid.Reset()
#self.gui.eid_map = {}
#self.gui.nid_map = {}
self.gui.result_cases = {}
self.gui.ncases = 0
# TODO: is this doing anything?
for name in ('case_keys', 'icase', 'isubcase_name_map'):
if hasattr(self, name):
del name
return skip_reading
def get_xyz_in_coord(self, model: BDF, cid: int=0,
fdtype: str='float32', check_mirror: bool=True):
"""
Creates the grid points efficiently
Used by ``load_nastran_geometry_unvectorized``
"""
xyz_cid0, nid_cp_cd, icd_transform = build_superelement_model(model, cid=cid, fdtype=fdtype)
if len(xyz_cid0) == 1:
super_id = 0
nid_mapi = self.gui.nid_map
make_nid_map(nid_mapi, nid_cp_cd[super_id][:, 0])
self._add_nastran_spoints_to_grid(model.spoints, nid_mapi)
self.icd_transform = icd_transform[super_id]
return xyz_cid0[super_id], nid_cp_cd[super_id]
# superelements
self.icd_transform = icd_transform
xyz_cid0_full = []
nid_cp_cd_full = []
for super_id, xyz_cid0i in sorted(xyz_cid0.items()):
xyz_cid0_full.append(xyz_cid0[super_id])
nid_cp_cd_full.append(nid_cp_cd[super_id])
xyz_cid0_out = np.vstack(xyz_cid0_full)
nid_cp_cd_out = np.vstack(nid_cp_cd_full)
all_nids = nid_cp_cd_out[:, 0]
unids = np.unique(all_nids)
log = self.log
if not len(all_nids) == len(unids):
if model.sebulk and check_mirror:
_prepare_superelement_model(model, log)
return self.get_xyz_in_coord(model, cid=0, fdtype=fdtype, check_mirror=False)
msg = ('superelement nodes are not unique; use superelement_renumber\n'
'renumbering; duplicate nids=\n%s' % duplicates(all_nids))
raise NotImplementedError(msg)
if not is_monotonic(all_nids):
#msg = ('superelement nodes are not monotonic; use superelement_renumber\n'
#'renumbering; nids=\n%s' % all_nids)
#self.log.warning(msg)
isort = np.argsort(all_nids)
xyz_cid0_out = xyz_cid0_out[isort, :]
nid_cp_cd_out = nid_cp_cd_out[isort, :]
make_nid_map(self.gui.nid_map, nid_cp_cd_out[:, 0])
return xyz_cid0_out, nid_cp_cd_out
def get_xyz_in_coord_vectorized(self, model, cid=0, fdtype='float32'):
"""
Creates the grid points efficiently
Used by ``load_nastran_geometry_vectorized``
"""
xyz_cid0 = None
nid_cp_cd = None
if self.gui.nnodes > 0:
#xyz_cid0 = {}
#nid_cp_cd = {}
out = model.get_displacement_index_xyz_cp_cd(
fdtype=fdtype, idtype='int32')
icd_transform, icp_transform, xyz_cp, nid_cp_cd = out
self.icd_transform = icd_transform
#print("transform_xyzcp_to_xyz_cid")
#model.nodes.cp = nid_cp_cd[:, 1]
xyz_cid0 = model.transform_xyzcp_to_xyz_cid(
xyz_cp, nid_cp_cd[:, 0], icp_transform, cid=cid,
in_place=False)
model.nodes.xyz_cid0 = xyz_cid0
model.nodes.nids = nid_cp_cd[:, 0]
nid_map = self.gui.nid_map
for i, nid in enumerate(nid_cp_cd[:, 0]):
nid_map[nid] = i
self._add_nastran_spoints_to_grid(model.spoints, nid_map)
return xyz_cid0, nid_cp_cd
def _get_model_unvectorized(self, bdf_filename, xref_loads=True):
"""Loads the BDF/OP2 geometry"""
ext = '.bdf'
if isinstance(bdf_filename, str):
ext = os.path.splitext(bdf_filename)[1].lower()
elif isinstance(bdf_filename, BDF):
model = bdf_filename
xref_nodes = True
return model, xref_nodes
punch = None
if ext == '.pch':
punch = True
log = self.gui.log
self.model_type = 'nastran'
if ext == '.op2':
model = OP2Geom(make_geom=True, debug=False, log=log,
debug_file=None)
model.clear_results()
model.IS_TESTING = False
model.read_op2(op2_filename=bdf_filename)
elif ext == '.h5' and IS_H5PY:
model = BDF(log=log, debug=True)
model.load_hdf5_filename(bdf_filename)
model.validate()
elif ext == '.obj':
model = BDF(log=log, debug=True)
model.load(obj_filename=bdf_filename)
else: # read the bdf/punch
model = BDF(log=log, debug=True)
#model.set_error_storage(nparse_errors=0,
# stop_on_parsing_error=True,
# nxref_errors=0,
# stop_on_xref_error=True)
model.read_bdf(bdf_filename,
punch=punch, xref=False,
validate=True)
#print('done with read_bdf')
#xref_loads = False
#xref_aero = len(model.caeros) > 0
xref_nodes = True
#model.cross_reference()
model.safe_cross_reference(
xref=True,
xref_nodes=xref_nodes,
xref_elements=True,
xref_nodes_with_elements=False,
xref_properties=True,
xref_masses=True,
xref_materials=False,
xref_loads=xref_loads,
xref_constraints=False,
xref_optimization=False,
xref_aero=True,
xref_sets=False,
create_superelement_geometry=True,
)
return model, xref_nodes
def load_nastran_geometry(self, bdf_filename, name='main', plot=True, **kwargs):
"""
The entry point for Nastran geometry loading.
Parameters
----------
bdf_filename : varies
str: the Nastran filename to load
model : the BDF object
name : str
the name of the "main" actor for the GUI
plot : bool; default=True
should the model be generated or should we wait until
after the results are loaded
kwargs:
-------
is_geometry_results : bool; default=True
code is being called from load_nastran_geometry_and_results
not used...
"""
self.gui.eid_maps[name] = {}
self.gui.nid_maps[name] = {}
self.icd_transform = {}
#self.transforms = {}
#print('bdf_filename=%r' % bdf_filename)
#key = self.case_keys[self.icase]
#case = self.result_cases[key]
skip_reading = self._remove_old_nastran_geometry(bdf_filename)
# if 0:
# line_width = 3
# opacity = 1
# alt_grids = [
# ['caero', yellow, line_width, opacity],
# ['caero_subpanels', yellow, line_width, opacity],
# ]
# skip_reading = self._remove_old_geometry2(bdf_filename, alt_grids=alt_grids)
if skip_reading:
return
#load_geom = True
if isinstance(bdf_filename, str) and bdf_filename.lower().endswith(('.bdf', '.dat', '.pch',)): # '.op2'
# if we're running test_pynastrangui or we have the --test flag on the command line
# this has (technically) nothing to do with if we're running the tests or not
if IS_TESTING or self.gui.is_testing_flag:
try:
self.load_nastran_geometry_vectorized(bdf_filename, plot=plot)
except NoSuperelements:
self.log.error('\n' + traceback.format_exc())
self.load_nastran_geometry_unvectorized(bdf_filename, plot=plot)
else:
self.load_nastran_geometry_unvectorized(bdf_filename, plot=plot)
#self.load_nastran_geometry_vectorized(bdf_filename, plot=plot)
else:
self.load_nastran_geometry_unvectorized(bdf_filename, plot=plot)
self.gui.format = 'nastran'
def load_nastran_geometry_vectorized(self, bdf_filename, plot=True):
"""
The entry point for Nastran geometry loading.
Parameters
----------
bdf_filename : str
the Nastran filename to load
plot : bool; default=True
should the model be generated or should we wait until
after the results are loaded
"""
model_name = 'main'
#self.isubcase_name_map[None] = ['a', 'b']
reset_labels = True
if plot:
self.gui.scalar_bar_actor.VisibilityOff()
self.gui.scalar_bar_actor.Modified()
model = self._get_model_vectorized(bdf_filename)
nnodes = len(model.grid)
nspoints = len(model.spoints)
nepoints = len(model.epoints)
ncaero_cards = len(model.caeros)
ngridb = len(model.gridb)
#if model.spoints:
#spoints = sorted([spoint.nid for spoint in model.spoints.values()])
#if model.epoints:
#epoints = sorted([epoint.nid for epoint in model.epoints.values()])
ngui_nodes = nnodes + nspoints + nepoints + ngridb
if ngui_nodes + ncaero_cards == 0:
msg = 'nnodes + nspoints + nepoints = 0\n'
msg += 'card_count = %r' % str(model.card_count)
raise NoGeometry(msg)
nelements2 = len(model.elements2)
#nelements = len(model.elements) + nelements2
nelements = nelements2
nmasses = len(model.masses)
nplotels = len(model.plotels)
nrigid = len(model.rigid_elements)
#nmpc = len(model.mpcs) # really should only be allowed if we have it in a subcase
if len(model.superelement_models):
raise NoSuperelements('superelements are not supported in vectorized BDF')
if nelements + nmasses + ncaero_cards + nplotels + nrigid == 0:
msg = 'nelements + nmasses + ncaero_cards + nplotels + nrigid = 0\n'
msg += 'card_count = %r' % str(model.card_count)
raise NoGeometry(msg)
self.gui.nnodes = ngui_nodes
self.gui.nelements = nelements # approximate...
self.gui.log_info("nnodes=%i nelements=%i" % (self.nnodes, self.nelements))
msg = model.get_bdf_stats(return_type='string')
self.gui.log_debug(msg)
msg = model.get_bdf_stats(return_type='list')
# this call will break the GUI if there are a lot of lines and
# by a lot I mean 37641. It's fine for a single call.
#for msgi in msg:
#model.log.debug(msgi)
nconm2 = 0
#if 'CONM2' in model.card_count:
#nconm2 += model.card_count['CONM2']
#if 'CMASS1' in model.card_count:
#nconm2 += model.card_count['CMASS1']
#if 'CMASS2' in model.card_count:
#nconm2 += model.card_count['CMASS2']
if nconm2 > 0:
self.gui.create_alternate_vtk_grid(
'conm2', color=ORANGE_FLOAT, line_width=5, opacity=1., point_size=4,
representation='point', follower_function=None)
# Allocate grids
self.gui.grid.Allocate(self.nelements, 1000)
#self._create_caero_actors(ncaeros, ncaeros_sub, ncaeros_cs, has_control_surface)
#if nconm2 > 0:
#self.gui.alt_grids['conm2'].Allocate(nconm2, 1000)
if self.save_data:
self.model = model
#-----------------------------------------------------------------------
# nodes/coords
#print('get_xyz_in_coord')
dim_max = 1.0
xyz_cid0, nid_cp_cd = self.get_xyz_in_coord_vectorized(model, cid=0, fdtype='float32')
if xyz_cid0 is not None:
dim_max = self._points_to_vtkpoints_coords(model, xyz_cid0)
#-----------------------------------------------------------------------
#------------------------------------------------------------
# TEMP
j = 0
results = self._map_elements_vectorized(self.nid_map, model, j, dim_max,
nid_cp_cd, plot=True, xref_loads=True)
has_control_surface = False
geometry_names = []
#------------------------------------------------------------
cases = OrderedDict()
form = ['Geometry', None, []]
form0 = form[2]
subcase_id = 0
colormap = self.gui.settings.colormap
if self.gui.nnodes > 0:
icase = 0
all_nids = nid_cp_cd[:, 0]
self.gui.node_ids = all_nids
nid_res = GuiResult(subcase_id, 'NodeID', 'NodeID', 'node', all_nids,
mask_value=0,
nlabels=None,
labelsize=None,
ncolors=None,
colormap=colormap,
data_format=None,
uname='GuiResult')
cases[icase] = (nid_res, (0, 'Node ID'))
form0.append(('Node ID', icase, []))
icase += 1
nid_res = GuiResult(subcase_id, 'iNode', 'iNode', 'node',
np.arange(len(all_nids), dtype='int32'),
mask_value=0,
nlabels=None,
labelsize=None,
ncolors=None,
colormap=colormap,
data_format=None,
uname='GuiResult')
cases[icase] = (nid_res, (0, 'Node ID'))
form0.append(('iNode', icase, []))
icase += 1
# this intentionally makes a deepcopy
cds = np.array(nid_cp_cd[:, 2])
if cds.max() > 0:
cd_res = GuiResult(0, header='NodeCd', title='NodeCd',
location='node', scalar=cds, colormap=colormap)
cases[icase] = (cd_res, (0, 'NodeCd'))
form0.append(('NodeCd', icase, []))
icase += 1
if self.gui.nelements > 0:
eids_array = results['eid']
eid_res = GuiResult(subcase_id, 'ElementID', 'ElementID', 'centroid', eids_array,
mask_value=0,
nlabels=None,
labelsize=None,
ncolors=None,
colormap=colormap,
data_format=None,
uname='GuiResult')
cases[icase] = (eid_res, (0, 'ElementID'))
form0.append(('ElementID', icase, []))
icase += 1
eids_array = results['eid']
eid_res = GuiResult(subcase_id, 'iElement', 'iElement', 'centroid',
np.arange(len(eids_array), dtype='int32'),
mask_value=-1,
nlabels=None,
labelsize=None,
ncolors=None,
colormap=colormap,
data_format=None,
uname='GuiResult')
cases[icase] = (eid_res, (0, 'iElement'))
form0.append(('iElement', icase, []))
icase += 1
#is_element_dim = True
dim_array = results['dim']
if len(np.unique(dim_array)) > 1:
dim_res = GuiResult(subcase_id, 'ElementDim', 'ElementDim', 'centroid', dim_array,
mask_value=-1,
nlabels=None,
labelsize=None,
ncolors=None,
colormap=colormap,
data_format=None,
uname='GuiResult')
cases[icase] = (dim_res, (0, 'ElementDim'))
form0.append(('ElementDim', icase, []))
icase += 1
nnodes_array = results['nnodes']
if nnodes_array.max() > -1:
nnodes_res = GuiResult(subcase_id, 'NNodes/Elem', 'NNodes/Elem',
'centroid', nnodes_array,
mask_value=-1,
nlabels=None,
labelsize=None,
ncolors=None,
colormap=colormap,
data_format=None,
uname='GuiResult')
cases[icase] = (nnodes_res, (0, 'NNodes/Elem'))
form0.append(('NNodes/Elem', icase, []))
icase += 1
pids_array = results['pid']
pid_res = GuiResult(0, header='PropertyID', title='PropertyID',
location='centroid', scalar=pids_array, mask_value=0)
cases[icase] = (pid_res, (0, 'PropertyID'))
form0.append(('PropertyID', icase, []))
icase += 1
#upids = np.unique(pids_array)
unused_mid_eids_skip = []
pcomp_nplies = 0
nplies = 1
is_pshell = False
is_pcomp = False
if 'PSHELL' in model.card_count:
nplies = 4
is_pshell = True
for pid in model.get_card_ids_by_card_types(['PCOMP', 'PCOMPG'], combine=True):
prop = model.properties[pid]
pcomp_nplies = max(pcomp_nplies, prop.nplies)
is_pcomp = True
is_pshell_pcomp = (is_pshell, is_pcomp)
nplies = max(nplies, pcomp_nplies + 1)
mids = np.zeros((nelements, nplies), dtype='int32')
thickness = np.full((nelements, nplies), np.nan, dtype='float32')
#rho = np.full((nelements, nplies), np.nan, dtype='float32')
nplies = np.zeros(nelements, dtype='int32')
# materials
upids = np.unique(pids_array)
ipids = np.zeros(len(pids_array), dtype='int32')
iupid = 0
for upid in upids: # upid_old
if upid == 0:
# elements w/o properties
continue
ipid = np.where(pids_array == upid)[0]
ipids[ipid] = iupid
if len(ipid):
try:
prop = model.properties[upid]
except KeyError:
raise KeyError('pid=%r properties=%s' % (upid, str(model.properties)))
if prop.type == 'PSHELL':
nplies[ipid] = 4
thickness[ipid, 0] = prop.Thickness()
elif prop.type in ['PCOMP', 'PCOMPG']:
nplies[ipid] = prop.nplies
for iply in range(prop.nplies):
mids[ipid, iply+1] = prop.Mid(iply)
thickness[ipid, iply+1] = prop.Thickness(iply)
else:
self.log.error(f'skipping setting mids (vectorized) for {prop.type}')
iupid += 1
if len(model.conrod):
#mids[ieid, 0] = 42
pass
pid_res = GuiResult(0, header='iProperty', title='iProperty',
location='centroid', scalar=ipids, colormap=colormap)
cases[icase] = (pid_res, (0, 'iProperty'))
form0.append(('iProperty', icase, []))
icase += 1
#if nplies.max() > 0:
#nplies_res = GuiResult(0, header='Number of Plies', title='nPlies',
#location='centroid', scalar=nplies, mask_value=0)
#cases[icase] = (nplies_res, (0, 'Number of Plies'))
#form0.append(('Number of Plies', icase, []))
#icase += 1
pshell = {
'mids' : mids,
'thickness' : nplies,
}
pcomp = {
'mids' : mids,
'thickness' : nplies,
'nplies' : nplies,
}
icase = _build_materials(model, pshell, pcomp, is_pshell_pcomp,
cases, form0, icase)
#------------------------------------------------------------
# add alternate actors
self.gui._add_alt_actors(self.gui.alt_grids)
# set default representation
self._set_caero_representation(has_control_surface)
for grid_name in geometry_names:
if grid_name in self.gui.geometry_actors:
self.gui.geometry_actors[grid_name].Modified()
#self.gui.grid_mapper.SetResolveCoincidentTopologyToPolygonOffset()
if 0:
if plot:
self.gui._finish_results_io2(model_name, [form], cases, reset_labels=reset_labels)
else:
self.gui._set_results([form], cases)
def _map_elements_vectorized(self, unused_nid_map, model, unused_j, unused_dim_max,
unused_nid_cp_cd, plot=True, xref_loads=True):
"""
Much, much faster way to add elements that directly builds the
VTK objects rather than using for loops.
Parameters
----------
nid_map : ???
???
model : BDF()
the BDF model object
j : int
???
dim_max : float
???
nid_cp_cd : ???
???
plot : bool; default=True
???
xref_loads : bool; default=True
???
Returns
-------
nid_to_pid_map : dict
node to property id map
used to show SPC constraints (we don't want to show constraints
on 456 DOFs)
icase : int
the result number
cases : dict
the GuiResult objects
form : List[???, ???, ???]
the Results sidebar data
TDOO: Not quite done on:
- ???
"""
self.gui.isubcase_name_map = {1: ['Nastran', '']}
grid = self.gui.grid
nelements = self.nelements
if nelements == 0:
return None
idtype = get_numpy_idtype_for_vtk()
log = self.log
cell_types_array, cell_offsets_array, nids_list, eids_array, results = add_vectorized_elements(
model, nelements, idtype, log)
if cell_types_array.min() == 0:
# all the non-elemental cards should be listed
# it's not hugely important, but it cleans up dev error messages
skip_cards = [
'CONM2',
#'CELAS1', 'CELAS2', 'CELAS3', 'CELAS4', 'PLOTEL',
'PARAM',
#'CDAMP1', 'CDAMP2', 'CDAMP3', 'CDAMP4', 'CVISC',
'TABLEM1', 'TABLEM2', 'TABLEM3', 'TABLEM4',
'TABLED1', 'TABLED2', 'TABLED3', 'TABLED4', 'TABLEST',
'MAT1', 'MAT2', 'MAT4', 'MAT5', 'MAT8', 'MAT9', 'MAT10',
'MATT1', 'MATT2', 'MATT8',
'MATS1', 'MATHP',
'PLOAD', 'PLOAD1', 'PLOAD2', 'FORCE', 'PLOAD4', 'LOAD',
'MAT1', 'PSHEAR', 'PSHELL', 'PTUBE', 'PDAMP',
'PELAST', 'PBEND', 'PBEAM', 'PCOMP', 'PCOMPG', 'PBAR', 'PSOLID',
'PLPLANE', 'PLSOLID',
'PROD', 'PELAS', 'PVISC', 'PBUSH1D', 'PBUSH2D',
#'EPOINT',
#'CQUADR', 'CTRIAR', 'SPOINT',
#'CQUAD8', 'CTRIA6',
'ENDDATA',
'CORD2R', 'CORD2C', 'CORD2S', 'CORD1R', 'CORD1C', 'CORD1S',
'GRID', 'SPOINT', 'EPOINT', 'TF',
'RFORCE', 'RFORCE1', 'RFORCE2', 'FORCE', 'FORCE1', 'FORCE2',
'MOMENT', 'MOMENT1', 'MOMENT2', 'PLOAD', 'PLOAD1', 'PLOAD2', 'PLOAD4',
'LOAD', 'TLOAD1', 'TLOAD2', 'DLOAD', 'LSEQ', 'DAREA',
'RLOAD1', 'RLOAD2',
'SUPORT', 'SUPORT1', 'MPC', 'MPCADD', 'RBE1', 'RBE2', 'RBE3', 'RBAR', 'RCROSS',
'SPCADD', 'SPC', 'SPC1', 'SPCD', 'SPCAX', 'DMIG', 'DMI', 'DMIJ', 'DMIJI', 'DMIK',
'AELIST', 'AELINK', 'AESURF', 'AESURFS', 'AERO', 'AEROS', 'TRIM',
'FLUTTER', 'DIVERG',
'CAERO1', 'CAERO2', 'CAERO3', 'CAERO4', 'CAERO5',
'PAERO1', 'PAERO2', 'PAERO3', 'PAERO4', 'PAERO5',
'SPLINE1', 'SPLINE2', 'SPLINE3', 'SPLINE4', 'SPLINE5', 'SPLINE6', 'SPLINE7',
'CLOAD', 'TABLES1', 'NLPARM', 'GRDSET',
]
potential_elements_found = [key for key in model.card_count if key not in skip_cards]
etypes = [
'CELAS1', 'CELAS2', 'CELAS3', 'CELAS4',
'CDAMP1', 'CDAMP2', 'CDAMP3', 'CDAMP4', 'CDAMP5', 'CVISC',
'CBUSH', 'CBUSH1D', 'CBUSH2D',
'CONROD', 'CROD', 'CTUBE', 'PLOTEL',
'CBAR', 'CBEAM', 'CBEND',
'CSHEAR',
'CTRIA3', 'CQUAD4', 'CTRIA6', 'CQUAD8', 'CTRIAR', 'CQUADR',
'CTETRA', 'CPENTA', 'CHEXA', 'CPYRAM',
'CHBDYG', 'CHBDYE', 'CHBDYP',
# nastran 95
'CQUAD1',
]
for key in potential_elements_found:
if key not in etypes:
log.warning('is %s an element?' % key)
msg = (
'Cell Type is not defined (cell_type=0).\n'
' cell_types_array = %s\n'
' potential_elements_found=[%s]\n'
' nelements=%s\n\n'
'%s\n\n' % (
cell_types_array,
', '.join(potential_elements_found),
len(cell_types_array),
'', #str(model.elements2),
)
)
print(str(model.elements2))
#msg += model.get_bdf_stats()
raise RuntimeError(msg)
deep = 1
if len(nids_list) == 1:
nids_array = nids_list[0].ravel()
else:
#raise NotImplementedError(len(nids_list))
nids_array = np.hstack([nid_list.flatten() for nid_list in nids_list])
#nids_array = np.array(nids_list, dtype=dtype)
#-----------------------------------------------------------------
# saving some data members
self.gui.element_ids = eids_array
#-----------------------------------------------------------------
# build the grid
#self.log.info('nids_array = %s' % nids_array)
#self.log.info('cell_offsets_array = %s' % cell_offsets_array)
#self.log.info('cell_types_array = %s' % cell_types_array)
# Create the array of cells
#print('nids_array =', nids_array)
cells_id_type = numpy_to_vtkIdTypeArray(nids_array, deep=1)
vtk_cells = vtk.vtkCellArray()
vtk_cells.SetCells(nelements, cells_id_type)
# Cell types
vtk_cell_types = numpy_to_vtk(
cell_types_array, deep=deep,
array_type=vtk.vtkUnsignedCharArray().GetDataType())
vtk_cell_offsets = numpy_to_vtk(cell_offsets_array, deep=deep,
array_type=vtk.VTK_ID_TYPE)
grid = self.gui.grid
#grid = vtk.vtkUnstructuredGrid()
grid.SetCells(vtk_cell_types, vtk_cell_offsets, vtk_cells)
return results
def _get_model_vectorized(self, bdf_filename):
"""Loads the BDF/OP2 geometry"""
ext = os.path.splitext(bdf_filename)[1].lower()
punch = False
if ext == '.pch':
punch = True
self.model_type = 'nastran'
log = self.log
if ext == '.op2':
from pyNastran.dev.bdf_vectorized2.op2_geom_vectorized import (
OP2Geom as OP2Geom_)
model = OP2Geom_(make_geom=True, debug=False, log=log,
debug_file=None)
model.clear_results()
model.read_op2(op2_filename=bdf_filename)
else: # read the bdf/punch
from pyNastran.dev.bdf_vectorized2.bdf_vectorized import BDF as BDF_
model = BDF_(log=log, debug=True)
# static_elements.bdf
#skip_cards = [
#'CELAS1', 'CELAS2', 'CELAS3', 'CELAS4', 'PLOTEL', 'PARAM',
#'CDAMP1', 'CDAMP2', 'CDAMP3', 'CDAMP4', 'CVISC',
#'TABLEM1', 'TABLEM2', 'TABLEM3', 'TABLEM4',
#'TABLED1', 'TABLED2', 'TABLED3', 'TABLED4',
#'PLOAD', 'PLOAD1', 'PLOAD2', 'FORCE', 'PLOAD4', 'LOAD',
#'SPCADD', 'MAT1', 'PSHEAR', 'PSHELL', 'PTUBE', 'PDAMP',
#'SPC1', 'CONM2', 'PELAST', 'PBEND', 'PBEAM', 'PCOMP', 'PCOMPG', 'PBAR', 'PSOLID',
#'PBUSH1D',
#'EPOINT',
#'CQUADR', 'CTRIAR', 'SPOINT', 'PROD', 'PELAS', 'PVISC',
#'CQUAD8', 'CTRIA6',
#]
#model.disable_cards(skip_cards)
model.read_bdf(bdf_filename,
punch=punch, xref=False,
validate=True)
#print(list(key for key in model.card_count.keys() if key not in skip_cards))
#xref_loads = False
#xref_aero = len(model.caeros) > 0
#model.cross_reference(
#xref=True,
#xref_nodes=True,
#xref_elements=False,
#xref_nodes_with_elements=False,
#xref_properties=True,
#xref_masses=True,
#xref_materials=False,
#xref_loads=xref_loads,
#xref_constraints=False,
#xref_optimization=False,
#xref_aero=False,
#xref_sets=False,
#)
return model
def _points_to_vtkpoints_coords(self, model, xyz_cid0):
"""
helper method for:
- load_nastran_geometry_unvectorized
- load_nastran_geometry_vectorized
"""
points = numpy_to_vtk_points(xyz_cid0)
self.gui.grid.SetPoints(points)
self.xyz_cid0 = xyz_cid0
maxi = xyz_cid0.max(axis=0)
mini = xyz_cid0.min(axis=0)
assert len(maxi) == 3, len(maxi)
xmax, ymax, zmax = maxi
xmin, ymin, zmin = mini
dim_max = max(xmax-xmin, ymax-ymin, zmax-zmin)
#print('_create_nastran_coords')
self._create_nastran_coords(model, dim_max)
#print('done _create_nastran_coords')
self.gui.log_info("xmin=%s xmax=%s dx=%s" % (xmin, xmax, xmax-xmin))
self.gui.log_info("ymin=%s ymax=%s dy=%s" % (ymin, ymax, ymax-ymin))
self.gui.log_info("zmin=%s zmax=%s dz=%s" % (zmin, zmax, zmax-zmin))
return dim_max
def load_nastran_geometry_unvectorized(self, bdf_filename, plot=True):
"""
The entry point for Nastran geometry loading.
Parameters
----------
bdf_filename : str
the Nastran filename to load
plot : bool; default=True
should the model be generated or should we wait until
after the results are loaded
"""
model_name = 'main'
reset_labels = True
if plot:
self.gui.scalar_bar_actor.VisibilityOff()
self.gui.scalar_bar_actor.Modified()
xref_loads = True # should be True
model, xref_nodes = self._get_model_unvectorized(bdf_filename, xref_loads=xref_loads)
nnodes = len(model.nodes)
nspoints = len(model.spoints)
nepoints = len(model.epoints)
ngridb = len(model.gridb)
ncaero_cards = len(model.caeros)
for superelement in model.superelement_models.values():
nnodes += len(superelement.nodes)
nspoints += len(superelement.spoints)
nepoints += len(superelement.epoints)
ngridb += len(superelement.gridb)
ncaero_cards += len(superelement.caeros)
ngui_nodes = nnodes + nspoints + nepoints + ngridb
if ngui_nodes + ncaero_cards == 0:
msg = 'nnodes + nspoints + nepoints = 0\n'
msg += 'card_count = %r' % str(model.card_count)
raise NoGeometry(msg)
nelements = len(model.elements)
nmasses = len(model.masses)
nplotels = len(model.plotels)
nrigid = len(model.rigid_elements)
for superelement in model.superelement_models.values():
nelements += len(superelement.elements)
nmasses += len(superelement.masses)
nplotels += len(superelement.plotels)
nrigid += len(superelement.rigid_elements)
#nmpc = len(model.mpcs) # really should only be allowed if we have it in a subcase
if nelements + nmasses + ncaero_cards + nplotels + nrigid == 0:
msg = 'nelements + nmasses + ncaero_cards + nplotels + nrigid = 0\n'
msg += 'card_count = %r' % str(model.card_count)
raise NoGeometry(msg)
self.nnodes = ngui_nodes
self.nelements = nelements # approximate...
out = self.make_caeros(model, create_secondary_actors=self.create_secondary_actors)
(has_caero, caero_points, ncaeros, ncaeros_sub, ncaeros_cs,
ncaeros_points, ncaero_sub_points,
has_control_surface, box_id_to_caero_element_map, cs_box_ids) = out
self.has_caero = has_caero
#-----------------------------------------------------------------------
self.gui.log_info("nnodes=%d nelements=%d" % (self.nnodes, self.nelements))
msg = model.get_bdf_stats(return_type='string')
self.gui.log_debug(msg)
msg = model.get_bdf_stats(return_type='list')
# this call will break the GUI if there are a lot of lines and
# by a lot I mean 37641. It's fine for a single call.
#for msgi in msg:
#model.log.debug(msgi)
#-----------------------------------------------------------------------
# nodes/coords
#print('get_xyz_in_coord')
dim_max = 1.0
xyz_cid0 = None
nid_cp_cd = None
if self.gui.nnodes:
xyz_cid0, nid_cp_cd = self.get_xyz_in_coord(model, cid=0, fdtype='float32')
dim_max = self._points_to_vtkpoints_coords(model, xyz_cid0)
self.node_ids = nid_cp_cd[:, 0]
#-----------------------------------------------------------------------
nconm2 = _create_masses(self.gui, model, self.gui.node_ids,
create_secondary_actors=self.create_secondary_actors)
# Allocate grids
self.gui.grid.Allocate(self.nelements, 1000)
self._create_caero_actors(ncaeros, ncaeros_sub, ncaeros_cs, has_control_surface)
if nconm2 > 0:
self.gui.alt_grids['conm2'].Allocate(nconm2, 1000)
if self.save_data:
self.model = model
#-----------------------------------------------------------------------
j = 0
nid_map = self.gui.nid_map
idtype = nid_cp_cd.dtype
nid_to_pid_map, icase, cases, form = self.map_elements(
xyz_cid0, nid_cp_cd, nid_map, model, j, dim_max,
plot=plot, xref_loads=xref_loads)
self._create_aero(model, box_id_to_caero_element_map, cs_box_ids,
caero_points, ncaeros_points, ncaero_sub_points,
has_control_surface)
if nconm2 > 0 and xref_nodes:
self._set_conm_grid(nconm2, model)
geometry_names = []
if self.create_secondary_actors and self.make_spc_mpc_supports and xref_nodes:
geometry_names = self.set_spc_mpc_suport_grid(model, nid_to_pid_map,
idtype)
if self.create_secondary_actors and xref_nodes and self.gui.settings.nastran_is_bar_axes:
icase = self._fill_bar_yz(dim_max, model, icase, cases, form)
assert icase is not None
#------------------------------------------------------------
#print('dependent_nodes =', self.dependents_nodes)
icase = self._set_subcases_unvectorized(model, form, cases, icase, xref_nodes, xref_loads)
name = 'main_copy'
self.gui.duplicate_alternate_vtk_grid(
name, 'main', color=(0., 0., 0.), line_width=5,
opacity=0.1, is_visible=False)
#------------------------------------------------------------
# add alternate actors
self.gui._add_alt_actors(self.gui.alt_grids)
# set default representation
self._set_caero_representation(has_control_surface)
for grid_name in geometry_names:
if grid_name in self.gui.geometry_actors:
self.gui.geometry_actors[grid_name].Modified()
#self.grid_mapper.SetResolveCoincidentTopologyToPolygonOffset()
stop_on_failure = IS_TESTING
build_map_centroidal_result(model, nid_map, stop_on_failure=stop_on_failure)
if self.create_secondary_actors and not IS_TESTING and 'dev' in __version__:
self.sidebar_nastran = ModelSidebar(self.gui, nastran_io=self)
self.sidebar_nastran.set_model(model)
self.res_dock_nastran = QDockWidget("Nastran Model", self)
self.res_dock_nastran.setObjectName("nastran_model")
self.res_dock_nastran.setWidget(self.sidebar_nastran)
self.addDockWidget(QtCore.Qt.RightDockWidgetArea, self.res_dock_nastran)
#self.res_dock.setWidget(self.res_widget)
if plot:
self.gui._finish_results_io2(model_name, [form], cases, reset_labels=reset_labels)
else:
self.gui._set_results([form], cases)
def update_caeros(self, obj):
"""the update call for the ModifyMenu"""
model = self.model # type: BDF
xref_errors = {}
model._uncross_reference_aero()
model._cross_reference_aero(check_caero_element_ids=False)
obj.uncross_reference()
obj.safe_cross_reference(model, xref_errors)
out = self.make_caeros(model)
(has_caero, caero_points, ncaeros, ncaeros_sub, ncaeros_cs,
ncaeros_points, ncaero_sub_points,
has_control_surface, box_id_to_caero_element_map, cs_box_ids) = out
self.has_caero = has_caero
self._create_aero(model, box_id_to_caero_element_map, cs_box_ids,
caero_points, ncaeros_points, ncaero_sub_points,
has_control_surface)
self.Render()
def _create_aero(self, model: BDF,
box_id_to_caero_element_map: Dict[int, Any],
cs_box_ids,
caero_points,
ncaeros_points: int,
ncaero_sub_points: int,
has_control_surface: bool):
# fill grids
zfighting_offset0 = 0.001
zfighting_offset = zfighting_offset0
self._create_splines(model, box_id_to_caero_element_map, caero_points)
if 'caero' in self.gui.alt_grids:
self.set_caero_grid(ncaeros_points, model)
self.set_caero_subpanel_grid(ncaero_sub_points, model)
if has_control_surface:
cs_name = 'caero_control_surfaces'
self.set_caero_control_surface_grid(
cs_name, cs_box_ids[cs_name],
box_id_to_caero_element_map, caero_points,
zfighting_offset=zfighting_offset)
zfighting_offset += zfighting_offset0
# sort the control surfaces
labels_to_aesurfs = {aesurf.label: aesurf for aesurf in model.aesurf.values()}
if len(labels_to_aesurfs) != len(model.aesurf):
msg = (
'Expected same number of label->aesurf as aid->aesurf\n'
'labels_to_aesurfs = %r\n'
'model.aesurf = %r\n' % (labels_to_aesurfs, model.aesurf))
raise RuntimeError(msg)
for unused_label, aesurf in sorted(labels_to_aesurfs.items()):
#reset_labels = False
cs_name = '%s_control_surface' % aesurf.label
self.set_caero_control_surface_grid(
cs_name, cs_box_ids[cs_name],
box_id_to_caero_element_map, caero_points, note=aesurf.label,
zfighting_offset=zfighting_offset)
zfighting_offset += zfighting_offset0
def _set_subcases_unvectorized(self, model, form, cases, icase, xref_nodes, xref_loads):
"""helper for ``load_nastran_geometry_unvectorized``"""
settings = self.gui.settings # type: Settings
colormap = settings.colormap
form0 = form[2]
assert icase is not None
nsubcases = len(model.subcases)
for subcase_idi, subcase in sorted(model.subcases.items()):
if not xref_nodes:
continue
subcase_id = subcase_idi
if subcase_id == 0 and nsubcases == 1:
subcase_id = 1
elif subcase_id == 0:
continue
self.gui.log_debug('NastranIOv subcase_id = %s' % subcase_id)
subtitle = ''
if 'SUBTITLE' in subcase:
subtitle, options = subcase.get_parameter('SUBTITLE')
del options
load_str = 'Load Case=%i' % subcase_id if subtitle == '' else 'Load Case=%i; %s' % (
subcase_id, subtitle)
formi = (load_str, None, [])
formii = formi[2]
assert icase is not None
if self.normals is not None and self.plot_applied_loads:
icase = self._plot_applied_loads(
model, cases, formii, icase, subcase_idi, xref_loads=xref_loads,
colormap=colormap,
)
#plot_pressures = False
plot_pressures = True
else:
plot_pressures = True
if plot_pressures: # and self._plot_pressures:
try:
icase = self._plot_pressures(
model, cases, formii, icase, subcase_idi)
except KeyError:
s = StringIO()
traceback.print_exc(file=s)
sout = s.getvalue()
self.gui.log_error(sout)
print(sout)
if len(formii):
form0.append(formi)
return icase
def _create_caero_actors(self, ncaeros, ncaeros_sub, ncaeros_cs, has_control_surface):
"""
This just creates the following actors. It does not fill them.
These include:
- caero
- caero_subpanels
- caero_control_surfaces
"""
if self.has_caero:
gui = self.gui
gui.create_alternate_vtk_grid(
'caero', color=YELLOW_FLOAT, line_width=3, opacity=1.0,
representation='toggle', is_visible=True, is_pickable=False)
gui.create_alternate_vtk_grid(
'caero_subpanels', color=YELLOW_FLOAT, line_width=3, opacity=1.0,
representation='toggle', is_visible=False, is_pickable=False)
gui.alt_grids['caero'].Allocate(ncaeros, 1000)
gui.alt_grids['caero_subpanels'].Allocate(ncaeros_sub, 1000)
if has_control_surface:
gui.alt_grids['caero_control_surfaces'].Allocate(ncaeros_cs, 1000)
def _set_caero_representation(self, has_control_surface: bool) -> None:
"""
Parameters
----------
has_control_surface : bool
is there a control surface
"""
geometry_actors = self.gui.geometry_actors
if 'caero_control_surfaces' in geometry_actors:
self.gui.geometry_properties['caero_control_surfaces'].opacity = 0.5
if 'caero' not in geometry_actors:
return
geometry_actors['caero'].Modified()
geometry_actors['caero_subpanels'].Modified()
if has_control_surface:
geometry_actors['caero_control_surfaces'].Modified()
if hasattr(geometry_actors['caero'], 'Update'):
geometry_actors['caero'].Update()
if hasattr(geometry_actors['caero_subpanels'], 'Update'):
geometry_actors['caero_subpanels'].Update()
if has_control_surface and hasattr(geometry_actors['caero_subpanels'], 'Update'):
geometry_actors['caero_control_surfaces'].Update()
def _create_splines(self, model: BDF, box_id_to_caero_element_map: Dict[int, int], caero_points):
"""
Sets the following actors:
- spline_%s_structure_points % spline_id
- spline_%s_boxes % spline_id
Parameters
----------
model : BDF()
the bdf model
box_id_to_caero_element_map : dict[key] : value
???
caero_points : ???
???
"""
stored_msg = []
if model.splines:
# 0 - caero / caero_subpanel
# 1 - control surface
# 3/5/7/... - spline points
# 2/4/6/... - spline panels
iaero = 2
for spline_id, spline in sorted(model.splines.items()):
setg_ref = spline.setg_ref
if setg_ref is None:
msg = 'error cross referencing SPLINE:\n%s' % spline.rstrip()
#n, filename = log_properties(1)
#print(filename, n)
#stored_msg.append(msg)
self.log.error(msg)
#raise RuntimeError(msg)
continue
else:
structure_points = setg_ref.get_ids()
try:
aero_box_ids = spline.aero_element_ids
except Exception:
print(spline.object_attributes())
print(spline.object_methods())
raise
if spline.type != 'SPLINE3_ZAERO':
assert len(aero_box_ids) > 0, spline
# the control surfaces all lie perfectly on top of each other
# such that we have z fighting, so based on the aero index,
# we calculate a z offset.
zfighting_offset = 0.0001 * (iaero + 1)
grid_name = 'spline_%s_structure_points' % spline_id
self.gui.create_alternate_vtk_grid(
grid_name, color=BLUE_FLOAT, opacity=1.0, point_size=5,
representation='point', is_visible=False)
msg = ', which is required by %r' % grid_name
stored_msgi = self._add_nastran_nodes_to_grid(
grid_name, structure_points, model, msg, store_msg=True)
zfighting_offset = 0.0001 * (iaero + 2)
grid_name = 'spline_%s_boxes' % spline_id
self.gui.create_alternate_vtk_grid(
grid_name, color=BLUE_FLOAT, opacity=0.3,
line_width=4,
representation='toggle', is_visible=False)
stored_msgi2 = self.set_caero_control_surface_grid(
grid_name, aero_box_ids,
box_id_to_caero_element_map, caero_points,
zfighting_offset=zfighting_offset, store_msg=True)
iaero += 2
if stored_msgi:
stored_msg.append(stored_msgi)
if stored_msgi2:
stored_msg.append(stored_msgi2)
if stored_msg:
model.log.warning('\n' + '\n'.join(stored_msg))
def make_caeros(self, model: BDF,
create_secondary_actors=True) -> Tuple[np.ndarray, int, int, int, int, bool,
Dict[int, int], List[int]]:
"""
Creates the CAERO panel inputs including:
- caero
- caero_subpanels
- caero_control_surfaces
- N control surfaces
Parameters
----------
model : BDF()
the bdf model
Returns
-------
caero_points : (N_aero_points, 3) float ndarray
the xyz points for the aero panels
N_aero_points can be 0
ncaeros : int
the number of aero sub-panels?
ncaeros_sub : int
???
ncaeros_cs : int
???
ncaeros_points : int
number of points for the caero coarse grid
ncaero_sub_points : int
number of points for the caero fine/subpanel grid
has_control_surface : bool
is there a control surface
box_id_to_caero_element_map : dict[box_id] = box_index
used to map the CAEROx box id to index in the ???
(aero panel elements) array, which will be used with
cs_box_ids
cs_box_ids : dict[control_surface_name] : List[panel ids]
list of panels used by each aero panel
"""
all_control_surface_name, caero_control_surfaces, out = build_caero_paneling(
model, create_secondary_actors)
if all_control_surface_name:
self.gui.create_alternate_vtk_grid(
'caero_control_surfaces', color=PINK_FLOAT, line_width=5, opacity=1.0,
representation='surface', is_visible=False)
for cs_name in caero_control_surfaces:
self.gui.create_alternate_vtk_grid(
cs_name, color=PINK_FLOAT, line_width=5, opacity=0.5,
representation='surface')
return out
def set_caero_grid(self, ncaeros_points: int, model: BDF) -> None:
"""
Sets the CAERO panel geometry.
Parameters
----------
ncaeros_points : int
number of points used by the 'caero' actor
model : BDF()
the bdf model
"""
gui = self.gui
points = vtk.vtkPoints()
points.SetNumberOfPoints(ncaeros_points)
max_cpoints = []
min_cpoints = []
zfighting_offset = 0.0001
caero_grid = gui.alt_grids['caero']
j = 0
for unused_eid, element in sorted(model.caeros.items()):
if isinstance(element, (CAERO1, CAERO3, CAERO4, CAERO5, CAERO7)):
# wing panel
cpoints = element.get_points()
cpoints[0][2] += zfighting_offset
cpoints[1][2] += zfighting_offset
max_cpoints.append(np.array(cpoints).max(axis=0))
min_cpoints.append(np.array(cpoints).min(axis=0))
elem = vtkQuad()
point_ids = elem.GetPointIds()
point_ids.SetId(0, j)
point_ids.SetId(1, j + 1)
point_ids.SetId(2, j + 2)
point_ids.SetId(3, j + 3)
points.InsertPoint(j, *cpoints[0])
points.InsertPoint(j + 1, *cpoints[1])
points.InsertPoint(j + 2, *cpoints[2])
points.InsertPoint(j + 3, *cpoints[3])
caero_grid.InsertNextCell(elem.GetCellType(), point_ids)
j += 4
elif isinstance(element, (CAERO2, BODY7)):
# slender body
#if 0: # pragma: no cover
# 1D version
#cpoints = element.get_points()
#cpoints[:, 2] += zfighting_offset
#max_cpoints.append(np.array(cpoints).max(axis=0))
#min_cpoints.append(np.array(cpoints).min(axis=0))
#elem = vtk.vtkLine()
#point_ids = elem.GetPointIds()
#point_ids.SetId(0, j)
#point_ids.SetId(1, j + 1)
#points.InsertPoint(j, *cpoints[0])
#points.InsertPoint(j + 1, *cpoints[1])
#j += 2
#caero_grid.InsertNextCell(elem.GetCellType(), point_ids)
#else:
# 3D version
xyz, elems = element.get_points_elements_3d()
assert xyz is not None, element
xyz[:, 2] += zfighting_offset
for elemi in elems:
elem = vtkQuad()
point_ids = elem.GetPointIds()
point_ids.SetId(0, j)
point_ids.SetId(1, j + 1)
point_ids.SetId(2, j + 2)
point_ids.SetId(3, j + 3)
n1, n2, n3, n4 = elemi
points.InsertPoint(j, *xyz[n1])
points.InsertPoint(j + 1, *xyz[n2])
points.InsertPoint(j + 2, *xyz[n3])
points.InsertPoint(j + 3, *xyz[n4])
#cpoints = element.get_points()
#cpoints[0][2] += zfighting_offset
#cpoints[1][2] += zfighting_offset
#max_cpoints.append(np.array(cpoints).max(axis=0))
#min_cpoints.append(np.array(cpoints).min(axis=0))
caero_grid.InsertNextCell(elem.GetCellType(), point_ids)
j += 4
else:
gui.log_info("skipping %s" % element.type)
if ncaeros_points and len(max_cpoints):
gui.log_info('CAERO.max = %s' % np.vstack(max_cpoints).max(axis=0))
gui.log_info('CAERO.min = %s' % np.vstack(min_cpoints).min(axis=0))
caero_grid.SetPoints(points)
#gui.alt_grids['caero']
#edge_mapper.SetResolveCoincidentTopologyToPolygonOffset()
def set_caero_subpanel_grid(self, ncaero_sub_points: int, model: BDF) -> None:
"""
Sets the CAERO sub-panel geometry.
Parameters
----------
ncaero_sub_points : int
number of points used by the 'caero_subpanels' actor
model : BDF()
the bdf model
"""
nodes, elements = get_caero_subpanel_grid(model)
if elements.shape[0] == 0:
return
grid = self.gui.alt_grids['caero_subpanels']
quad_etype = 9
create_vtk_cells_of_constant_element_type(grid, elements, quad_etype)
vtk_points = numpy_to_vtk_points(nodes, points=None, dtype='<f', deep=1)
grid.SetPoints(vtk_points)
return
def set_caero_control_surface_grid(self, name: str, cs_box_ids: List[int],
box_id_to_caero_element_map: Dict[int, Any],
caero_points: np.ndarray,
note: Optional[str]=None,
zfighting_offset: float=0.001,
store_msg: bool=False) -> str:
"""
Creates a single CAERO control surface?
Parameters
----------
name : str
???
aero_box_ids : List[int]
the ids of the box as seen on the AESURF? SET card?
box_id_to_caero_element_map : Dict[key]=value
key : ???
???
value : ???
???
caero_points : (ncaero_points, 3)
the xyz coordinates used by the CAEROx actor
label : str / None
None : no label will be used
str : the name of the control surface card will be placed
at the centroid of the panel
zfighting_offset : float
z-fighting is when two elements "fight" for who is in front
leading. The standard way to fix this is to bump the
element.
Returns
-------
stored_msg : str
???
"""
gui = self.gui
log = self.gui.log
boxes_to_show, stored_msg = check_for_missing_control_surface_boxes(
name, cs_box_ids, box_id_to_caero_element_map, log,
store_msg=store_msg)
#if not boxes_to_show:
#print('*%s' % name)
#print('*%s' % boxes_to_show)
#return
#if name not in gui.alt_grids:
#print('**%s' % name)
#return
grid = gui.alt_grids[name]
grid.Reset()
all_points, elements, centroids, areas = get_caero_control_surface_grid(
grid,
box_id_to_caero_element_map,
caero_points, boxes_to_show, log)
if len(all_points) == 0:
log.error('deleting %r' % name)
# name = spline_1000_boxes
sname = name.split('_')
sname[-1] = 'structure_points'
# points_name = spline_1000_structure_points
points_name = '_'.join(sname)
log.error('deleting %r' % points_name)
gui.remove_alt_grid(name, remove_geometry_property=True)
gui.remove_alt_grid(points_name, remove_geometry_property=True)
return stored_msg
# combine all the points
all_points_array = np.vstack(all_points)
#vtk_etype = 9 # vtkQuad
#create_vtk_cells_of_constant_element_type(grid, elements, vtk_etype)
# shift z to remove z-fighting with caero in surface representation
all_points_array[:, [1, 2]] += zfighting_offset
# get the vtk object
vtk_points = numpy_to_vtk_points(all_points_array, deep=0)
grid.SetPoints(vtk_points)
#if missing_boxes:
#msg = 'Missing CAERO AELIST boxes: ' + str(missing_boxes)
#gui.log_error(msg)
if note:
# points_list (15, 4, 3) = (elements, nodes, 3)
x, y, z = np.average(centroids, weights=areas, axis=0)
text = str(note)
#slot = gui.label_actors[-1]
slot = gui.reset_label_actors(name)
annotation = gui.create_annotation(text, x, y, z)
slot.append(annotation)
return stored_msg
def _set_conm_grid(self, nconm2, model):
"""
creates the mass secondary actor called:
- conm2
which includes:
- CONM2
- CMASS1
- CMASS2
because it's really a "mass" actor
"""
if not self.create_secondary_actors:
return
j = 0
points = vtk.vtkPoints()
points.SetNumberOfPoints(nconm2)
#sphere_size = self._get_sphere_size(dim_max)
alt_grid = self.gui.alt_grids['conm2']
for unused_eid, element in sorted(model.masses.items()):
if isinstance(element, CONM2):
xyz_nid = element.nid_ref.get_position()
centroid = element.offset(xyz_nid)
#centroid_old = element.Centroid()
#assert np.all(np.allclose(centroid_old, centroid)), 'centroid_old=%s new=%s' % (centroid_old, centroid)
#d = norm(xyz - c)
points.InsertPoint(j, *centroid)
#if 1:
elem = vtk.vtkVertex()
point_ids = elem.GetPointIds()
point_ids.SetId(0, j)
#else:
#elem = vtk.vtkSphere()
#elem.SetRadius(sphere_size)
#elem.SetCenter(points.GetPoint(j))
alt_grid.InsertNextCell(elem.GetCellType(), point_ids)
j += 1
elif element.type in ('CMASS1', 'CMASS2'):
centroid = element.Centroid()
#n1 = element.G1()
#n2 = element.G2()
#print('n1=%s n2=%s centroid=%s' % (n1, n2, centroid))
points.InsertPoint(j, *centroid)
elem = vtk.vtkVertex()
point_ids = elem.GetPointIds()
point_ids.SetId(0, j)
alt_grid.InsertNextCell(elem.GetCellType(), point_ids)
j += 1
else:
self.gui.log_info("skipping %s" % element.type)
alt_grid.SetPoints(points)
def set_spc_mpc_suport_grid(self, model, nid_to_pid_map, idtype):
"""
for each subcase, make secondary actors including:
- spc_id=spc_id
- mpc_id=mpc_id (includes rigid elements)
- mpc_dependent_id=mpc_id (includes rigid elements)
- mpc_independent_id=mpc_id (includes rigid elements)
- suport_id=suport1_id (includes SUPORT/SUPORT1)
TODO: consider changing the varying ids to huh???
"""
spc_names = []
mpc_names = []
suport_names = []
#print('getting rigid')
rigid_lines = model._get_rigid()
spc_ids_used = set()
mpc_ids_used = set()
suport1_ids_used = set()
spc_to_subcase = defaultdict(list)
mpc_to_subcase = defaultdict(list)
#suport1_to_subcase = defaultdict(list)
for subcase_id, subcase in sorted(model.subcases.items()):
if 'SPC' in subcase:
spc_id = subcase.get_parameter('SPC')[0]
if spc_id is not None:
nspcs = model.card_count['SPC'] if 'SPC' in model.card_count else 0
nspc1s = model.card_count['SPC1'] if 'SPC1' in model.card_count else 0
nspcds = model.card_count['SPCD'] if 'SPCD' in model.card_count else 0
## TODO: this line seems too loose...
## TODO: why aren't SPCDs included?
if nspcs + nspc1s + nspcds:
spc_to_subcase[spc_id].append(subcase_id)
if 'MPC' in subcase:
mpc_id = subcase.get_parameter('MPC')[0]
if mpc_id is not None:
## TODO: this line seems too loose
nmpcs = model.card_count['MPC'] if 'MPC' in model.card_count else 0
if nmpcs:
mpc_to_subcase[mpc_id].append(subcase_id)
for spc_id in chain(model.spcs, model.spcadds):
spc_name = 'SPC=%i' % (spc_id)
if spc_id in mpc_to_subcase:
subcases = spc_to_subcase[spc_id]
spc_name += ': Subcases='
spc_name += ', '.join(str(subcase_id) for subcase_id in subcases)
spc_names += self._fill_spc(spc_id, spc_name, model, nid_to_pid_map)
for mpc_id in chain(model.mpcs, model.mpcadds):
depname = 'MPC=%i_dependent' % mpc_id
indname = 'MPC=%i_independent' % mpc_id
linename = 'MPC=%i_lines' % mpc_id
if mpc_id in mpc_to_subcase:
subcases = mpc_to_subcase[mpc_id]
mpc_name = ': Subcases='
mpc_name += ', '.join(str(subcase_id) for subcase_id in subcases)
depname += mpc_name
indname += mpc_name
linename += mpc_name
lines = get_mpc_node_ids(model, mpc_id, stop_on_failure=False)
lines2 = list(lines)
mpc_names += self._fill_dependent_independent(
mpc_id, model, lines2,
depname, indname, linename, idtype)
if 0: # pragma: no cover
for subcase_id, subcase in sorted(model.subcases.items()):
if 'SPC' in subcase:
spc_id = subcase.get_parameter('SPC')[0]
if spc_id is not None and spc_id not in spc_ids_used:
spc_ids_used.add(spc_id)
nspcs = model.card_count['SPC'] if 'SPC' in model.card_count else 0
nspc1s = model.card_count['SPC1'] if 'SPC1' in model.card_count else 0
nspcds = model.card_count['SPCD'] if 'SPCD' in model.card_count else 0
## TODO: this line seems too loose...
## TODO: why aren't SPCDs included?
if nspcs + nspc1s + nspcds:
spc_name = 'spc_id=%i' % spc_id
spc_names += self._fill_spc(spc_id, spc_name, model, nid_to_pid_map)
# rigid body elements and MPCs
if 'MPC' in subcase:
mpc_id = subcase.get_parameter('MPC')[0]
if mpc_id is not None and mpc_id not in mpc_ids_used:
mpc_ids_used.add(mpc_id)
## TODO: this line seems too loose
nmpcs = model.card_count['MPC'] if 'MPC' in model.card_count else 0
if nmpcs:
lines = get_mpc_node_ids(model, mpc_id, stop_on_failure=False)
lines2 = list(lines)
depname = 'mpc_id=%i_dependent' % mpc_id
indname = 'mpc_id=%i_independent' % mpc_id
linename = 'mpc_id=%i_lines' % mpc_id
mpc_names += self._fill_dependent_independent(
mpc_id, model, lines2,
depname, indname, linename, idtype)
# SUPORTs are node/dofs that deconstrained to allow rigid body motion
# SUPORT1s are subcase-specific SUPORT cards
if 'SUPORT1' in subcase.params: ## TODO: should this be SUPORT?
suport_id = subcase.get_parameter('SUPORT1')[0]
# TODO: is this line correct???
if 'SUPORT' in model.card_count or 'SUPORT1' in model.card_count:
# TODO: this "if block" seems unnecessary
if suport_id is not None and suport_id not in suport1_ids_used:
# SUPORT1 / SUPORT
suport1_ids_used.add(suport_id)
suport_name = self._fill_suport(suport_id, subcase_id, model)
suport_names.append(suport_name)
# create a SUPORT actor if there are no SUPORT1s
# otherwise, we already included it in suport_id=suport_id
if len(suport_names) == 0 and model.suport:
# handle SUPORT without SUPORT1
ids = []
for suport in model.suport:
idsi = suport.node_ids
ids += idsi
grid_name = 'SUPORT'
self.gui.create_alternate_vtk_grid(
grid_name, color=RED_FLOAT, opacity=1.0, point_size=4,
representation='point', is_visible=True)
if len(rigid_lines):
# handle RBEs without MPCs
mpc_id = 0
depname = 'rigid_dependent'
indname = 'rigid_independent'
linename = 'rigid_lines'
mpc_names += self._fill_dependent_independent(
mpc_id, model, rigid_lines,
depname, indname, linename, idtype)
geometry_names = spc_names + mpc_names + suport_names
return geometry_names
def _fill_spc(self, spc_id, spc_name, model, nid_to_pid_map):
"""creates the spc secondary actors"""
spc_names = [spc_name]
self.gui.create_alternate_vtk_grid(
spc_name, color=PURPLE_FLOAT, line_width=5, opacity=1.,
point_size=5, representation='point', is_visible=False)
# node_ids = model.get_SPCx_node_ids(spc_id)
node_ids_c1 = model.get_SPCx_node_ids_c1(
spc_id, stop_on_failure=False)
node_ids = []
for nid, c1 in node_ids_c1.items():
if nid_to_pid_map is not None:
plot_node = False
pids = nid_to_pid_map[nid]
for pid in pids:
if pid == 0:
# CONROD
continue
if pid is None:
print('pid is None in _fill_spc...')
continue
if pid < 0:
print('pid=%s in _fill_spc...' % pid)
continue
prop = model.properties[pid]
if prop.type not in ['PSOLID', 'PLSOLID']:
plot_node = True
if not plot_node:
# don't include 456 constraints if they're ONLY on solid elemetns
# if we had any bar/plate/etc. elements that use this node, we'll plot the node
if not('1' in c1 or '2' in c1 or '3' in c1):
continue
node_ids.append(nid)
node_ids = np.unique(node_ids)
msg = ', which is required by %r' % spc_name
self._add_nastran_nodes_to_grid(spc_name, node_ids, model, msg)
return spc_names
def create_bar_pin_flag_text(self, unused_pin_flag=None):
"""
Lists the pin flag for each element (that has a pin flag)
self.nid_release_map is set by ``_fill_bar_yz``
TODO: needs a better interface in the gui
"""
nids = []
text = []
#result_name = self.icase
result_name = str('ElementID')
for nid, data in sorted(self.nid_release_map.items()):
sub_release_map = defaultdict(str)
for (eid, pin_flagi) in data:
sub_release_map[pin_flagi] += (str(eid) + ', ')
texti = '\n'.join(['%s-%s' % (pin_flagi, msg.rstrip(', '))
for (pin_flagi, msg) in sorted(sub_release_map.items())])
# super messy
#texti = ', '.join(['%s-%s' % (pin_flagi, eid) for (eid, pin_flagi) in data])
nids.append(nid)
text.append(texti)
self.gui.mark_nodes(nids, result_name, text)
def _fill_bar_yz(self, unused_dim_max, model, icase, cases, form, debug=False):
"""
plots the y, z vectors for CBAR & CBEAM elements
"""
card_types = ['CBAR', 'CBEAM']
out = model.get_card_ids_by_card_types(card_types=card_types)
bar_beam_eids = out['CBAR'] + out['CBEAM']
bar_pid_to_eids = get_beam_sections_map(model, bar_beam_eids)
bar_nids = get_bar_nids(model, bar_beam_eids)
#ugrid_temp = create_3d_beams(model, bar_pid_to_eids)
self.bar_eids = {}
self.bar_lines = {}
if len(bar_beam_eids) == 0:
return icase
scale = 0.15
# TODO: this should be reworked
bar_nids, bar_types, nid_release_map = self._get_bar_yz_arrays(
model, bar_beam_eids, bar_pid_to_eids,
scale, debug)
self.nid_release_map = nid_release_map
bar_nids = list(bar_nids)
self.gui.create_alternate_vtk_grid(
'Bar Nodes', color=RED_FLOAT, line_width=1, opacity=1.,
point_size=5, representation='point', bar_scale=0., is_visible=False)
msg = ", which is required by 'Bar Nodes'"
self._add_nastran_nodes_to_grid('Bar Nodes', bar_nids, model, msg)
geo_form = form[2]
bar_form = ('CBAR / CBEAM', None, [])
#print('geo_form =', geo_form)
#bar_types2 = {}
bar_eids = []
for bar_type, data in sorted(bar_types.items()):
eids, lines_bar_y, lines_bar_z = data
if len(eids):
bar_eids.append(eids)
ibars = 0
if bar_eids:
bar_eids = np.hstack(bar_eids)
ibars = np.searchsorted(self.element_ids, bar_eids)
for bar_type, data in sorted(bar_types.items()):
eids, lines_bar_y, lines_bar_z = data
if len(eids):
if debug: # pragma: no cover
print('bar_type = %r' % bar_type)
print('eids = %r' % eids)
print('all_eids = %r' % self.element_ids.tolist())
# if bar_type not in ['ROD', 'TUBE']:
bar_y = bar_type + '_y'
bar_z = bar_type + '_z'
self.gui.create_alternate_vtk_grid(
bar_y, color=GREEN_FLOAT, line_width=5, opacity=1.,
point_size=5, representation='bar', bar_scale=scale, is_visible=False)
self.gui.create_alternate_vtk_grid(
bar_z, color=BLUE_FLOAT, line_width=5, opacity=1.,
point_size=5, representation='bar', bar_scale=scale, is_visible=False)
self._add_nastran_lines_xyz_to_grid(bar_y, lines_bar_y, eids)
self._add_nastran_lines_xyz_to_grid(bar_z, lines_bar_z, eids)
# form = ['Geometry', None, []]
i = np.searchsorted(self.element_ids, eids)
is_type = np.full(self.element_ids.shape, -1, dtype='int32')
is_type[ibars] = 0
try:
is_type[i] = 1
except Exception:
#print('self.element_ids =', self.element_ids)
#print('eids =', eids)
ii = np.where(i == len(self.element_ids))[0]
print('ii = %s' % ii)
print('failed eids =', eids[ii])
#assert self.element_ids[i] == eids
raise
bar_form[2].append(['is_%s' % bar_type, icase, []])
msg = 'is_%s' % bar_type
type_res = GuiResult(0, header=msg, title=msg,
location='centroid', scalar=is_type, mask_value=-1)
cases[icase] = (type_res, (0, msg))
icase += 1
# print(geo_form)
if len(bar_form[2]):
geo_form.append(bar_form)
return icase
def _add_nastran_lines_xyz_to_grid(self, name, lines, eids):
"""creates the bar orientation vector lines"""
nlines = len(lines)
nnodes = nlines * 2
if nlines == 0:
return
assert name != 'Bar Nodes', name
grid = self.gui.alt_grids[name]
bar_eids = np.asarray(eids, dtype='int32')
bar_lines = np.asarray(lines, dtype='float32').reshape(nlines, 6)
self.bar_eids[name] = bar_eids
self.bar_lines[name] = bar_lines
nodes = bar_lines.reshape(nlines * 2, 3)
points = numpy_to_vtk_points(nodes)
elements = np.arange(0, nnodes, dtype='int32').reshape(nlines, 2)
etype = 3 # vtk.vtkLine().GetCellType()
create_vtk_cells_of_constant_element_type(grid, elements, etype)
grid.SetPoints(points)
def _fill_dependent_independent(self, unused_mpc_id, model, lines,
depname, indname, linename, idtype):
"""creates the mpc actors"""
if not lines:
return []
self.gui.create_alternate_vtk_grid(
depname, color=GREEN_FLOAT, line_width=5, opacity=1.,
point_size=5, representation='point', is_visible=False)
self.gui.create_alternate_vtk_grid(
indname, color=LIGHT_GREEN_FLOAT, line_width=5, opacity=1.,
point_size=5, representation='point', is_visible=False)
self.gui.create_alternate_vtk_grid(
linename, color=LIGHT_GREEN_FLOAT, line_width=5, opacity=1.,
point_size=5, representation='wire', is_visible=False)
lines2 = []
for line in lines:
if line not in lines2:
lines2.append(line)
lines = np.array(lines2, dtype=idtype)
dependent = (lines[:, 0])
independent = np.unique(lines[:, 1])
self.dependents_nodes.update(dependent)
unused_node_ids = np.unique(lines.ravel())
msg = ', which is required by %r' % depname
self._add_nastran_nodes_to_grid(depname, dependent, model, msg)
msg = ', which is required by %r' % indname
self._add_nastran_nodes_to_grid(indname, independent, model, msg)
msg = ', which is required by %r' % linename
self._add_nastran_lines_to_grid(linename, lines, model)
mpc_names = [depname, indname, linename]
return mpc_names
def _add_nastran_nodes_to_grid(self, name, node_ids, model, msg, store_msg=False):
"""used to create MPC independent/dependent nodes"""
nnodes = len(node_ids)
stored_msg = []
if nnodes == 0:
msg = '0 nodes added for %r' % name
out_msg = store_warning(model.log, store_msg, msg)
return out_msg
self.gui.follower_nodes[name] = node_ids
#numpy_to_vtk_points(nodes)
points = vtk.vtkPoints()
points.SetNumberOfPoints(nnodes)
j = 0
nid_map = self.gui.nid_map
alt_grid = self.gui.alt_grids[name]
missing_nodes = []
for nid in sorted(node_ids):
try:
unused_i = nid_map[nid]
except KeyError:
missing_nodes.append(str(nid))
continue
if nid not in model.nodes:
# I think this hits for SPOINTs
missing_nodes.append(str(nid))
continue
# point = self.grid.GetPoint(i)
# points.InsertPoint(j, *point)
node = model.nodes[nid]
point = node.get_position()
points.InsertPoint(j, *point)
#if 1:
elem = vtk.vtkVertex()
point_ids = elem.GetPointIds()
point_ids.SetId(0, j)
#else:
#elem = vtk.vtkSphere()
#dim_max = 1.0
#sphere_size = self._get_sphere_size(dim_max)
#elem.SetRadius(sphere_size)
#elem.SetCenter(points.GetPoint(j))
alt_grid.InsertNextCell(elem.GetCellType(), point_ids)
j += 1
out_msg = ''
if missing_nodes:
stored_msg = 'nids=[%s] do not exist%s' % (', '.join(missing_nodes), msg)
alt_grid.SetPoints(points)
if stored_msg:
out_msg = store_warning(model.log, store_msg, stored_msg)
return out_msg
def _add_nastran_spoints_to_grid(self, spoints, nid_map):
"""used to create SPOINTs"""
if not spoints:
return
spoint_ids = list(spoints.keys())
assert isinstance(spoint_ids, list), type(spoint_ids)
nspoints = len(spoint_ids)
name = 'SPoints'
if nspoints == 0:
self.log.warning('0 spoints added for %r' % name)
return
self.gui.create_alternate_vtk_grid(
name, color=BLUE_FLOAT, line_width=1, opacity=1.,
point_size=5, representation='point', bar_scale=0., is_visible=True)
self.gui.follower_nodes[name] = spoint_ids
points = vtk.vtkPoints()
points.SetNumberOfPoints(nspoints)
j = 0
alt_grid = self.gui.alt_grids[name]
for spointi in sorted(spoint_ids):
try:
unused_i = nid_map[spointi]
except KeyError:
self.log.warning('spointi=%s does not exist' % spointi)
continue
if spointi not in spoints:
self.log.warning('spointi=%s doesnt exist' % spointi)
continue
# point = self.grid.GetPoint(i)
# points.InsertPoint(j, *point)
points.InsertPoint(j, 0., 0., 0.)
elem = vtk.vtkVertex()
point_ids = elem.GetPointIds()
point_ids.SetId(0, j)
alt_grid.InsertNextCell(elem.GetCellType(), point_ids)
j += 1
alt_grid.SetPoints(points)
def _add_nastran_lines_to_grid(self, name, lines, model, nid_to_pid_map=None):
"""used to create MPC lines"""
nlines = lines.shape[0]
#nids = np.unique(lines)
#nnodes = len(nids)
nnodes = nlines * 2
if nnodes == 0:
return
self.gui.follower_nodes[name] = lines.ravel()
points = vtk.vtkPoints()
points.SetNumberOfPoints(nnodes)
j = 0
etype = 3 # vtkLine
nid_map = self.gui.nid_map
alt_grid = self.gui.alt_grids[name]
for nid1, nid2 in lines:
try:
unused_i1 = nid_map[nid1]
except KeyError:
model.log.warning('nid=%s does not exist' % nid1)
continue
try:
unused_i2 = nid_map[nid2]
except KeyError:
model.log.warning('nid=%s does not exist' % nid2)
continue
if nid1 not in model.nodes or nid2 not in model.nodes:
continue
node = model.nodes[nid1]
point = node.get_position()
points.InsertPoint(j, *point)
node = model.nodes[nid2]
point = node.get_position()
points.InsertPoint(j + 1, *point)
elem = vtk.vtkLine()
point_ids = elem.GetPointIds()
point_ids.SetId(0, j)
point_ids.SetId(1, j + 1)
alt_grid.InsertNextCell(etype, point_ids)
j += 2
alt_grid.SetPoints(points)
def _fill_suport(self, suport_id, unused_subcase_id, model):
"""creates SUPORT and SUPORT1 nodes"""
suport_name = 'suport1_id=%i' % suport_id
self.gui.create_alternate_vtk_grid(
suport_name, color=RED_FLOAT, line_width=5, opacity=1., point_size=4,
representation='point', is_visible=False)
suport_nids = get_suport_node_ids(model, suport_id)
msg = ', which is required by %r' % suport_name
self._add_nastran_nodes_to_grid(suport_name, suport_nids, model, msg)
return suport_name
def _get_sphere_size(self, dim_max):
return 0.01 * dim_max
def _map_elements3(self, nid_map, model, unused_j, unused_dim_max,
nid_cp_cd, xref_loads=True):
"""
Much, much faster way to add elements that directly builds the VTK objects
rather than using for loops.
Returns
-------
nid_to_pid_map : dict
node to property id map
used to show SPC constraints (we don't want to show constraints on 456 DOFs)
icase : int
the result number
cases : dict
the GuiResult objects
form : List[???, ???, ???]
the Results sidebar data
TDOO: Not quite done on:
- ???
"""
settings = self.gui.settings # type: Settings
# these normals point inwards
# 4
# / | \
# / | \
# 3-------2
# \ | /
# \ | /
# 1
_ctetra_faces = (
(0, 1, 2), # (1, 2, 3),
(0, 3, 1), # (1, 4, 2),
(0, 3, 2), # (1, 3, 4),
(1, 3, 2), # (2, 4, 3),
)
# these normals point inwards
#
#
#
#
# /4-----3
# / /
# / 5 /
# / \ /
# / \ /
# 1---------2
_cpyram_faces = (
(0, 1, 2, 3), # (1, 2, 3, 4),
(1, 4, 2), # (2, 5, 3),
(2, 4, 3), # (3, 5, 4),
(0, 3, 4), # (1, 4, 5),
(0, 4, 1), # (1, 5, 2),
)
# these normals point inwards
# /6
# / | \
# / | \
# 3\ | \
# | \ /4-----5
# | \/ /
# | / \ /
# | / \ /
# | / \ /
# 1---------2
_cpenta_faces = (
(0, 2, 1), # (1, 3, 2),
(3, 4, 5), # (4, 5, 6),
(0, 1, 4, 3), # (1, 2, 5, 4), # bottom
(1, 2, 5, 4), # (2, 3, 6, 5), # right
(0, 3, 5, 2), # (1, 4, 6, 3), # left
)
# these normals point inwards
# 8----7
# /| /|
# / | / |
# / 5-/--6
# 4-----3 /
# | / | /
# | / | /
# 1-----2
_chexa_faces = (
(4, 5, 6, 7), # (5, 6, 7, 8),
(0, 3, 2, 1), # (1, 4, 3, 2),
(1, 2, 6, 5), # (2, 3, 7, 6),
(2, 3, 7, 6), # (3, 4, 8, 7),
(0, 4, 7, 3), # (1, 5, 8, 4),
(0, 6, 5, 4), # (1, 7, 6, 5),
)
elements, nelements, unused_superelements = get_elements_nelements_unvectorized(model)
xyz_cid0 = self.xyz_cid0
pids_array = np.zeros(nelements, dtype='int32')
eids_array = np.zeros(nelements, dtype='int32')
mcid_array = np.full(nelements, -1, dtype='int32')
material_theta_array = np.full(nelements, np.nan, dtype='float32')
dim_array = np.full(nelements, -1, dtype='int32')
nnodes_array = np.full(nelements, -1, dtype='int32')
# quality
min_interior_angle = np.zeros(nelements, 'float32')
max_interior_angle = np.zeros(nelements, 'float32')
dideal_theta = np.zeros(nelements, 'float32')
max_skew_angle = np.zeros(nelements, 'float32')
max_warp_angle = np.zeros(nelements, 'float32')
max_aspect_ratio = np.zeros(nelements, 'float32')
area = np.zeros(nelements, 'float32')
area_ratio = np.zeros(nelements, 'float32')
taper_ratio = np.zeros(nelements, 'float32')
min_edge_length = np.zeros(nelements, 'float32')
normals = np.full((nelements, 3), np.nan, 'float32')
nids_list = []
ieid = 0
cell_offset = 0
dtype = get_numpy_idtype_for_vtk()
cell_types_array = np.zeros(nelements, dtype=dtype)
cell_offsets_array = np.zeros(nelements, dtype=dtype)
cell_type_point = 1 # vtk.vtkVertex().GetCellType()
cell_type_line = 3 # vtk.vtkLine().GetCellType()
cell_type_tri3 = 5 # vtkTriangle().GetCellType()
cell_type_tri6 = 22 # vtkQuadraticTriangle().GetCellType()
cell_type_quad4 = 9 # vtkQuad().GetCellType()
#cell_type_quad8 = 23 # vtkQuadraticQuad().GetCellType()
cell_type_tetra4 = 10 # vtkTetra().GetCellType()
cell_type_tetra10 = 24 # vtkQuadraticTetra().GetCellType()
cell_type_pyram5 = 14 # vtkPyramid().GetCellType()
#cell_type_pyram13 = 27 # vtk.vtkQuadraticPyramid().GetCellType()
cell_type_penta6 = 13 # vtkWedge().GetCellType()
cell_type_penta15 = 26 # vtkQuadraticWedge().GetCellType()
cell_type_hexa8 = 12 # vtkHexahedron().GetCellType()
cell_type_hexa20 = 25 # vtkQuadraticHexahedron().GetCellType()
# per gui/testing_methods.py/create_vtk_cells_of_constant_element_type
#1 = vtk.vtkVertex().GetCellType()
#3 = vtkLine().GetCellType()
#5 = vtkTriangle().GetCellType()
#9 = vtk.vtkQuad().GetCellType()
#10 = vtkTetra().GetCellType()
#vtkPenta().GetCellType()
#vtkHexa().GetCellType()
#vtkPyram().GetCellType()
skipped_etypes = set()
all_nids = nid_cp_cd[:, 0]
ieid = 0
for eid, elem in sorted(elements.items()):
if ieid % 5000 == 0 and ieid > 0:
print(' map_elements = %i' % ieid)
etype = elem.type
nnodes = None
nids = None
pid = None
cell_type = None
inids = None
dideal_thetai = np.nan
min_thetai = np.nan
max_thetai = np.nan
#max_thetai = np.nan
max_skew = np.nan
max_warp = np.nan
aspect_ratio = np.nan
areai = np.nan
area_ratioi = np.nan
taper_ratioi = np.nan
min_edge_lengthi = np.nan
normali = np.nan
if etype in ['CTRIA3', 'CTRIAR', 'CTRAX3', 'CPLSTN3', 'CPLSTS3']:
nids = elem.nodes
pid = elem.pid
cell_type = cell_type_tri3 # 5
inids = np.searchsorted(all_nids, nids)
p1, p2, p3 = xyz_cid0[inids, :]
out = tri_quality(p1, p2, p3)
(areai, max_skew, aspect_ratio,
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi) = out
normali = np.cross(p1 - p2, p1 - p3)
if isinstance(elem.theta_mcid, float):
material_theta_array[ieid] = elem.theta_mcid
else:
mcid_array[ieid] = elem.theta_mcid
nnodes = 3
dim = 2
elif etype in {'CQUAD4', 'CQUADR', 'CPLSTN4', 'CPLSTS4', 'CQUADX4',
'CQUAD1'}: # nastran95
nids = elem.nodes
pid = elem.pid
cell_type = cell_type_quad4 #9
inids = np.searchsorted(all_nids, nids)
p1, p2, p3, p4 = xyz_cid0[inids, :]
out = quad_quality(elem, p1, p2, p3, p4)
(areai, taper_ratioi, area_ratioi, max_skew, aspect_ratio,
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi, max_warp) = out
normali = np.cross(p1 - p3, p2 - p4)
if isinstance(elem.theta_mcid, float):
material_theta_array[ieid] = elem.theta_mcid
else:
mcid_array[ieid] = elem.theta_mcid
nnodes = 4
dim = 2
elif etype in ['CTRIA6']:
nids = elem.nodes
pid = elem.pid
if None in nids:
cell_type = cell_type_tri3
inids = np.searchsorted(all_nids, nids[:3])
nids = nids[:3]
p1, p2, p3 = xyz_cid0[inids, :]
nnodes = 3
else:
cell_type = cell_type_tri6
inids = np.searchsorted(all_nids, nids)
p1, p2, p3, p4, unused_p5, unused_p6 = xyz_cid0[inids, :]
nnodes = 6
out = tri_quality(p1, p2, p3)
(areai, max_skew, aspect_ratio,
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi) = out
normali = np.cross(p1 - p2, p1 - p3)
if isinstance(elem.theta_mcid, float):
material_theta_array[ieid] = elem.theta_mcid
else:
mcid_array[ieid] = elem.theta_mcid
dim = 2
elif etype == 'CQUAD8':
nids = elem.nodes
pid = elem.pid
if None in nids:
cell_type = cell_type_tri3
inids = np.searchsorted(all_nids, nids[:4])
nids = nids[:4]
p1, p2, p3, p4 = xyz_cid0[inids, :]
nnodes = 4
else:
cell_type = cell_type_tri6
inids = np.searchsorted(all_nids, nids)
p1, p2, p3, p4 = xyz_cid0[inids[:4], :]
nnodes = 8
out = quad_quality(elem, p1, p2, p3, p4)
(areai, taper_ratioi, area_ratioi, max_skew, aspect_ratio,
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi, max_warp) = out
normali = np.cross(p1 - p3, p2 - p4)
if isinstance(elem.theta_mcid, float):
material_theta_array[ieid] = elem.theta_mcid
else:
mcid_array[ieid] = elem.theta_mcid
nnodes = 4
dim = 2
elif etype == 'CSHEAR':
nids = elem.nodes
pid = elem.pid
cell_type = cell_type_quad4 #9
inids = np.searchsorted(all_nids, nids)
p1, p2, p3, p4 = xyz_cid0[inids, :]
out = quad_quality(elem, p1, p2, p3, p4)
(areai, taper_ratioi, area_ratioi, max_skew, aspect_ratio,
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi, max_warp) = out
normali = np.cross(p1 - p3, p2 - p4)
nnodes = 4
dim = 2
elif etype == 'CTETRA':
nids = elem.nodes
pid = elem.pid
if None in nids:
cell_type = cell_type_tetra4
nids = nids[:4]
nnodes = 4
else:
cell_type = cell_type_tetra10
nnodes = 10
inids = np.searchsorted(all_nids, nids)
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi = get_min_max_theta(
_ctetra_faces, nids, nid_map, xyz_cid0)
dim = 3
elif etype == 'CHEXA':
nids = elem.nodes
pid = elem.pid
if None in nids:
cell_type = cell_type_hexa8
nids = nids[:8]
nnodes = 8
else:
cell_type = cell_type_hexa20
nnodes = 20
inids = np.searchsorted(all_nids, nids)
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi = get_min_max_theta(
_chexa_faces, nids, nid_map, xyz_cid0)
dim = 3
elif etype == 'CPENTA':
nids = elem.nodes
pid = elem.pid
if None in nids:
cell_type = cell_type_penta6
nids = nids[:6]
nnodes = 6
else:
cell_type = cell_type_penta15
nnodes = 15
inids = np.searchsorted(all_nids, nids)
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi = get_min_max_theta(
_cpenta_faces, nids, nid_map, xyz_cid0)
dim = 3
elif etype == 'CPYRAM':
# TODO: assuming 5
nids = elem.nodes
pid = elem.pid
if None in nids:
cell_type = cell_type_pyram5
nids = nids[:5]
nnodes = 5
else:
cell_type = cell_type_penta15
nnodes = 15
inids = np.searchsorted(all_nids, nids)
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi = get_min_max_theta(
_cpyram_faces, nids, nid_map, xyz_cid0)
dim = 3
elif etype in ['CELAS2', 'CELAS4', 'CDAMP4']:
# these can have empty nodes and have no property
# CELAS1: 1/2 GRID/SPOINT and pid
# CELAS2: 1/2 GRID/SPOINT, k, ge, and s
# CELAS3: 1/2 SPOINT and pid
# CELAS4: 1/2 SPOINT and k
nids = elem.nodes
assert nids[0] != nids[1]
if None in nids:
assert nids[0] is not None, nids
assert nids[1] is None, nids
nids = [nids[0]]
cell_type = cell_type_point
nnodes = 1
else:
nids = elem.nodes
assert nids[0] != nids[1]
cell_type = cell_type_line
nnodes = 2
inids = np.searchsorted(all_nids, nids)
pid = 0
dim = 0
elif etype in ['CBUSH', 'CBUSH1D', 'CBUSH2D',
'CELAS1', 'CELAS3',
'CDAMP1', 'CDAMP2', 'CDAMP3', 'CDAMP5',
'CFAST', 'CGAP', 'CVISC']:
nids = elem.nodes
assert nids[0] != nids[1]
assert None not in nids, 'nids=%s\n%s' % (nids, elem)
pid = elem.pid
cell_type = cell_type_line
inids = np.searchsorted(all_nids, nids)
nnodes = 2
dim = 0
elif etype in ['CBAR', 'CBEAM']:
nids = elem.nodes
pid = elem.pid
pid_ref = model.Property(pid)
areai = pid_ref.Area()
cell_type = cell_type_line
inids = np.searchsorted(all_nids, nids)
p1, p2 = xyz_cid0[inids, :]
min_edge_lengthi = norm(p2 - p1)
nnodes = 2
dim = 1
elif etype in ['CROD', 'CTUBE']:
nids = elem.nodes
pid = elem.pid
pid_ref = model.Property(pid)
areai = pid_ref.Area()
cell_type = cell_type_line
inids = np.searchsorted(all_nids, nids)
p1, p2 = xyz_cid0[inids, :]
min_edge_lengthi = norm(p2 - p1)
nnodes = 2
dim = 1
elif etype == 'CONROD':
nids = elem.nodes
areai = elem.Area()
pid = 0
cell_type = cell_type_line
inids = np.searchsorted(all_nids, nids)
p1, p2 = xyz_cid0[inids, :]
min_edge_lengthi = norm(p2 - p1)
nnodes = 2
dim = 1
#------------------------------
# rare
#elif etype == 'CIHEX1':
#nids = elem.nodes
#pid = elem.pid
#cell_type = cell_type_hexa8
#inids = np.searchsorted(all_nids, nids)
#min_thetai, max_thetai, dideal_thetai, min_edge_lengthi = get_min_max_theta(
#_chexa_faces, nids, nid_map, xyz_cid0)
#nnodes = 8
#dim = 3
elif etype == 'CHBDYE':
#self.eid_map[eid] = ieid
eid_solid = elem.eid2
side = elem.side
element_solid = model.elements[eid_solid]
mapped_inids = SIDE_MAP[element_solid.type][side]
side_inids = [nid - 1 for nid in mapped_inids]
nodes = element_solid.node_ids
pid = 0
nnodes = len(side_inids)
nids = [nodes[inid] for inid in side_inids]
inids = np.searchsorted(all_nids, nids)
if len(side_inids) == 4:
cell_type = cell_type_quad4
else:
msg = 'element_solid:\n%s' % (str(element_solid))
msg += 'mapped_inids = %s\n' % mapped_inids
msg += 'side_inids = %s\n' % side_inids
msg += 'nodes = %s\n' % nodes
#msg += 'side_nodes = %s\n' % side_nodes
raise NotImplementedError(msg)
elif etype == 'GENEL':
nids = []
if len(elem.ul_nodes):
nids.append(elem.ul_nodes)
if len(elem.ud_nodes):
nids.append(elem.ud_nodes)
nids = np.unique(np.hstack(nids))
#print(elem.get_stats())
nids = nids[:2]
areai = np.nan
pid = 0
cell_type = cell_type_line
inids = np.searchsorted(all_nids, nids)
p1, p2 = xyz_cid0[inids, :]
min_edge_lengthi = norm(p2 - p1)
nnodes = len(nids)
dim = 1
else:
#raise NotImplementedError(elem)
skipped_etypes.add(etype)
nelements -= 1
continue
#for nid in nids:
#assert isinstance(nid, integer_types), 'not an integer. nids=%s\n%s' % (nids, elem)
#assert nid != 0, 'not a positive integer. nids=%s\n%s' % (nids, elem)
assert inids is not None
if not np.array_equal(all_nids[inids], nids):
msg = 'all_nids[inids]=%s nids=%s\n%s' % (all_nids[inids], nids, elem)
raise RuntimeError(msg)
assert cell_type is not None
assert cell_offset is not None
assert eid is not None
assert pid is not None
assert dim is not None
assert nnodes is not None
nids_list.append(nnodes)
nids_list.extend(inids)
normals[ieid] = normali
eids_array[ieid] = eid
pids_array[ieid] = pid
dim_array[ieid] = dim
cell_types_array[ieid] = cell_type
cell_offsets_array[ieid] = cell_offset # I assume the problem is here
cell_offset += nnodes + 1
self.eid_map[eid] = ieid
min_interior_angle[ieid] = min_thetai
max_interior_angle[ieid] = max_thetai
dideal_theta[ieid] = dideal_thetai
max_skew_angle[ieid] = max_skew
max_warp_angle[ieid] = max_warp
max_aspect_ratio[ieid] = aspect_ratio
area[ieid] = areai
area_ratio[ieid] = area_ratioi
taper_ratio[ieid] = taper_ratioi
min_edge_length[ieid] = min_edge_lengthi
ieid += 1
#print('self.eid_map =', self.eid_map)
icells_zero = np.where(cell_types_array == 0)[0]
# TODO: I'd like to get rid of deep=1, but it'll crash the edges
deep = 1
if len(icells_zero):
icells = np.where(cell_types_array != 0)[0]
if len(icells) == 0:
self.log.error('skipped_etypes = %s' % skipped_etypes)
raise RuntimeError('there are no elements...')
eids_array = eids_array[icells]
pids_array = pids_array[icells]
#dim_array = pids_array[dim_array]
cell_types_array = cell_types_array[icells]
cell_offsets_array = cell_offsets_array[icells]
nnodes_array = nnodes_array[icells]
normals = normals[icells, :]
#deep = 1
#print('deep = %s' % deep)
if skipped_etypes:
self.log.error('skipped_etypes = %s' % list(skipped_etypes))
#print('skipped_etypes = %s' % skipped_etypes)
if len(pids_array) != nelements:
msg = 'nelements=%s len(pids_array)=%s' % (nelements, len(pids_array))
raise RuntimeError(msg)
if len(cell_offsets_array) != nelements:
msg = 'nelements=%s len(cell_offsets_array)=%s' % (nelements, len(cell_offsets_array))
raise RuntimeError(msg)
nids_array = np.array(nids_list, dtype=dtype)
#-----------------------------------------------------------------
# saving some data members
self.element_ids = eids_array
#print('cell_types_array* = ', cell_types_array.tolist())
#print('cell_offsets_array* = ', cell_offsets_array.tolist())
#-----------------------------------------------------------------
# build the grid
#self.log.info('nids_array = %s' % nids_array)
#self.log.info('cell_offsets_array = %s' % cell_offsets_array)
#self.log.info('cell_types_array = %s' % cell_types_array)
# Create the array of cells
cells_id_type = numpy_to_vtkIdTypeArray(nids_array, deep=1)
vtk_cells = vtk.vtkCellArray()
vtk_cells.SetCells(nelements, cells_id_type)
# Cell types
vtk_cell_types = numpy_to_vtk(
cell_types_array, deep=deep,
array_type=vtk.vtkUnsignedCharArray().GetDataType())
vtk_cell_offsets = numpy_to_vtk(cell_offsets_array, deep=deep,
array_type=vtk.VTK_ID_TYPE)
grid = self.grid
#grid = vtk.vtkUnstructuredGrid()
grid.SetCells(vtk_cell_types, vtk_cell_offsets, vtk_cells)
#-----------------------------------------------------------------
# fill the results
nid_to_pid_map = None
self.isubcase_name_map = {1: ['Nastran', '']}
icase = 0
cases = OrderedDict()
form = ['Geometry', None, []]
form0 = form[2]
subcase_id = 0
#nids_set = True
#if nids_set:
# this intentionally makes a deepcopy
#nids = np.array(nid_cp_cd[:, 0])
# this intentionally makes a deepcopy
cds = np.array(nid_cp_cd[:, 2])
colormap = settings.colormap
nid_res = GuiResult(subcase_id, 'NodeID', 'NodeID', 'node', all_nids,
mask_value=0,
nlabels=None,
labelsize=None,
ncolors=None,
colormap=colormap,
data_format=None,
uname='GuiResult')
cases[icase] = (nid_res, (0, 'Node ID'))
form0.append(('Node ID', icase, []))
icase += 1
if cds.max() > 0:
cd_res = GuiResult(0, header='NodeCd', title='NodeCd',
location='node', scalar=cds)
cases[icase] = (cd_res, (0, 'NodeCd'))
form0.append(('NodeCd', icase, []))
icase += 1
eid_res = GuiResult(subcase_id, 'ElementID', 'ElementID', 'centroid', eids_array,
mask_value=0,
nlabels=None,
labelsize=None,
ncolors=None,
colormap=colormap,
data_format=None,
uname='GuiResult')
cases[icase] = (eid_res, (0, 'ElementID'))
form0.append(('ElementID', icase, []))
icase += 1
is_element_dim = True
#if len(np.unique(dim_array)) > 1:
#dim_res = GuiResult(subcase_id, 'ElementDim', 'ElementDim', 'centroid', dim_array,
#mask_value=-1,
#nlabels=None,
#labelsize=None,
#ncolors=None,
#colormap=colormap,
#data_format=None,
#uname='GuiResult')
#cases[icase] = (dim_res, (0, 'ElementDim'))
#form0.append(('ElementDim', icase, []))
#icase += 1
if nnodes_array.max() > -1:
nnodes_res = GuiResult(subcase_id, 'NNodes/Elem', 'NNodes/Elem',
'centroid', nnodes_array,
mask_value=0,
nlabels=None,
labelsize=None,
ncolors=None,
colormap=colormap,
data_format=None,
uname='GuiResult')
cases[icase] = (nnodes_res, (0, 'NNodes/Elem'))
form0.append(('NNodes/Elem', icase, []))
icase += 1
#pid_res = GuiResult(subcase_id, 'PropertyID', 'PropertyID', 'centroid', pids_array,
#mask_value=0,
#nlabels=None,
#labelsize=None,
#ncolors=None,
#colormap=colormap,
#data_format=None,
#uname='GuiResult')
#cases[icase] = (pid_res, (0, 'PropertyID'))
#form0.append(('PropertyID', icase, []))
#icase += 1
if len(model.properties) and nelements and settings.nastran_is_properties:
icase, upids, pcomp, pshell, is_pshell_pcomp = self._build_properties(
model, nelements, eids_array, pids_array, cases, form0, icase)
icase = _build_materials(model, pcomp, pshell, is_pshell_pcomp,
cases, form0, icase)
try:
icase = _build_optimization(model, pids_array, upids,
nelements, cases, form0, icase)
except Exception:
#raise
s = StringIO()
traceback.print_exc(file=s)
sout = s.getvalue()
self.gui.log_error(sout)
print(sout)
#if isgreater_int(mcid_array, -1):
#mcid_res = GuiResult(subcase_id, 'Material Coordinate System', 'MaterialCoord',
#'centroid', mcid_array,
#mask_value=-1,
#nlabels=None,
#labelsize=None,
#ncolors=None,
#colormap=colormap,
#data_format=None,
#uname='GuiResult')
#cases[icase] = (mcid_res, (0, 'Material Coordinate System'))
#form0.append(('Material Coordinate System', icase, []))
#icase += 1
#if np.isfinite(theta_array).any():
#print('np.nanmax(theta_array) =', np.nanmax(theta_array))
#theta_res = GuiResult(subcase_id, 'Theta', 'Theta', 'centroid', theta_array,
#mask_value=None,
#nlabels=None,
#labelsize=None,
#ncolors=None,
#colormap=colormap,
#data_format=None,
#uname='GuiResult')
#cases[icase] = (theta_res, (0, 'Theta'))
#form0.append(('Theta', icase, []))
#icase += 1
normal_mag = underflow_norm(normals, axis=1)
assert len(normal_mag) == nelements
normals /= normal_mag.reshape(nelements, 1)
i_not_nan = np.isnan(normal_mag)
#if self.make_offset_normals_dim and nelements:
#material_coord = None
#icase, normals = _build_normals_quality(
#model, self.gui.eid_map, nelements, cases, form0, icase,
#xyz_cid0, material_coord, material_theta,
#min_interior_angle, max_interior_angle, dideal_theta,
#area, max_skew_angle, taper_ratio,
#max_warp_angle, area_ratio, min_edge_length, max_aspect_ratio,
#make_offset_normals_dim=self.make_offset_normals_dim)
#self.normals = normals
#----------------------------------------------------------
is_shell = False
if False in i_not_nan:
#max_normal = np.nanmax(normal_mag[i_not_nan])
#is_shell = np.abs(max_normal) > 0.
is_shell = True
is_solid = isfinite_and_nonzero(max_interior_angle)
#print('is_shell=%s is_solid=%s' % (is_shell, is_solid))
if is_shell:
nx_res = GuiResult(
0, header='NormalX', title='NormalX',
location='centroid', scalar=normals[:, 0], data_format='%.2f')
ny_res = GuiResult(
0, header='NormalY', title='NormalY',
location='centroid', scalar=normals[:, 1], data_format='%.2f')
nz_res = GuiResult(
0, header='NormalZ', title='NormalZ',
location='centroid', scalar=normals[:, 2], data_format='%.2f')
nxyz_res = NormalResult(0, 'Normals', 'Normals',
nlabels=2, labelsize=5, ncolors=2,
colormap=colormap, data_format='%.1f',
uname='NormalResult')
area_res = GuiResult(0, header='Area', title='Area',
location='centroid', scalar=area)
min_edge_length_res = GuiResult(
0, header='Min Edge Length', title='Min Edge Length',
location='centroid', scalar=min_edge_length)
min_theta_res = GuiResult(
0, header='Min Interior Angle', title='Min Interior Angle',
location='centroid', scalar=np.degrees(min_interior_angle))
max_theta_res = GuiResult(
0, header='Max Interior Angle', title='Max Interior Angle',
location='centroid', scalar=np.degrees(max_interior_angle))
dideal_theta_res = GuiResult(
0, header='Delta Ideal Angle', title='Delta Ideal Angle',
location='centroid', scalar=np.degrees(dideal_theta))
skew = np.degrees(max_skew_angle)
skew_res = GuiResult(
0, header='Max Skew Angle', title='MaxSkewAngle',
location='centroid', scalar=skew)
aspect_res = GuiResult(
0, header='Aspect Ratio', title='AspectRatio',
location='centroid', scalar=max_aspect_ratio)
form_checks = []
form0.append(('Element Checks', None, form_checks))
if is_element_dim:
form_checks.append(('ElementDim', icase, []))
if self.make_offset_normals_dim and self.make_nnodes_result and 0: # pragma: no cover
nnodes_res = GuiResult(
0, header='NNodes/Elem', title='NNodes/Elem',
location='centroid', scalar=nnodes_array)
form_checks.append(('NNodes', icase + 1, []))
cases[icase + 1] = (nnodes_res, (0, 'NNodes'))
icase += 1
if self.make_offset_normals_dim or 1:
cases[icase + 1] = (nx_res, (0, 'NormalX'))
cases[icase + 2] = (ny_res, (0, 'NormalY'))
cases[icase + 3] = (nz_res, (0, 'NormalZ'))
cases[icase + 4] = (nxyz_res, (0, 'Normal'))
form_checks.append(('NormalX', icase + 1, []))
form_checks.append(('NormalY', icase + 2, []))
form_checks.append(('NormalZ', icase + 3, []))
form_checks.append(('Normal', icase + 4, []))
cases[icase + 5] = (area_res, (0, 'Area'))
cases[icase + 6] = (min_edge_length_res, (0, 'Min Edge Length'))
cases[icase + 7] = (min_theta_res, (0, 'Min Interior Angle'))
cases[icase + 8] = (max_theta_res, (0, 'Max Interior Angle'))
cases[icase + 9] = (dideal_theta_res, (0, 'Delta Ideal Angle'))
cases[icase + 10] = (skew_res, (0, 'Max Skew Angle'))
cases[icase + 11] = (aspect_res, (0, 'Aspect Ratio'))
form_checks.append(('Area', icase + 5, []))
form_checks.append(('Min Edge Length', icase + 6, []))
form_checks.append(('Min Interior Angle', icase + 7, []))
form_checks.append(('Max Interior Angle', icase + 8, []))
form_checks.append(('Delta Ideal Angle', icase + 9, []))
form_checks.append(('Max Skew Angle', icase + 10, []))
form_checks.append(('Aspect Ratio', icase + 11, []))
icase += 12
if np.any(np.isfinite(area_ratio)) and np.nanmax(area_ratio) > 1.:
arearatio_res = GuiResult(
0, header='Area Ratio', title='Area Ratio',
location='centroid', scalar=area_ratio)
cases[icase] = (arearatio_res, (0, 'Area Ratio'))
form_checks.append(('Area Ratio', icase, []))
icase += 1
if np.any(np.isfinite(taper_ratio)) and np.nanmax(taper_ratio) > 1.:
taperratio_res = GuiResult(
0, header='Taper Ratio', title='Taper Ratio',
location='centroid', scalar=taper_ratio)
cases[icase] = (taperratio_res, (0, 'Taper Ratio'))
form_checks.append(('Taper Ratio', icase, []))
icase += 1
if isfinite_and_nonzero(max_warp_angle):
warp_res = GuiResult(
0, header='Max Warp Angle', title='MaxWarpAngle',
location='centroid', scalar=np.degrees(max_warp_angle))
cases[icase + 4] = (warp_res, (0, 'Max Warp Angle'))
form_checks.append(('Max Warp Angle', icase, []))
icase += 1
#if (np.abs(xoffset).max() > 0.0 or np.abs(yoffset).max() > 0.0 or
#np.abs(zoffset).max() > 0.0):
# offsets
#offset_res = GuiResult(
#0, header='Offset', title='Offset',
#location='centroid', scalar=offset, data_format='%g')
#offset_x_res = GuiResult(
#0, header='OffsetX', title='OffsetX',
#location='centroid', scalar=xoffset, data_format='%g')
#offset_y_res = GuiResult(
#0, header='OffsetY', title='OffsetY',
#location='centroid', scalar=yoffset, data_format='%g')
#offset_z_res = GuiResult(
#0, header='OffsetZ', title='OffsetZ',
#location='centroid', scalar=zoffset, data_format='%g')
#cases[icase] = (offset_res, (0, 'Offset'))
#cases[icase + 1] = (offset_x_res, (0, 'OffsetX'))
#cases[icase + 2] = (offset_y_res, (0, 'OffsetY'))
#cases[icase + 3] = (offset_z_res, (0, 'OffsetZ'))
#form_checks.append(('Offset', icase, []))
#form_checks.append(('OffsetX', icase + 1, []))
#form_checks.append(('OffsetY', icase + 2, []))
#form_checks.append(('OffsetZ', icase + 3, []))
#icase += 4
if self.make_xyz or IS_TESTING:
x_res = GuiResult(
0, header='X', title='X',
location='node', scalar=xyz_cid0[:, 0], data_format='%g')
y_res = GuiResult(
0, header='Y', title='Y',
location='node', scalar=xyz_cid0[:, 1], data_format='%g')
z_res = GuiResult(
0, header='Z', title='Z',
location='node', scalar=xyz_cid0[:, 2], data_format='%g')
cases[icase] = (x_res, (0, 'X'))
cases[icase + 1] = (y_res, (0, 'Y'))
cases[icase + 2] = (z_res, (0, 'Z'))
form_checks.append(('X', icase + 0, []))
form_checks.append(('Y', icase + 1, []))
form_checks.append(('Z', icase + 2, []))
icase += 3
elif is_solid:
# only solid elements
form_checks = []
form0.append(('Element Checks', None, form_checks))
min_edge_length_res = GuiResult(
0, header='Min Edge Length', title='Min Edge Length',
location='centroid', scalar=min_edge_length)
min_theta_res = GuiResult(
0, header='Min Interior Angle', title='Min Interior Angle',
location='centroid', scalar=np.degrees(min_interior_angle))
max_theta_res = GuiResult(
0, header='Max Interior Angle', title='Max Interior Angle',
location='centroid', scalar=np.degrees(max_interior_angle))
skew = 90. - np.degrees(max_skew_angle)
#skew_res = GuiResult(0, header='Max Skew Angle', title='MaxSkewAngle',
#location='centroid', scalar=skew)
if is_element_dim:
form_checks.append(('ElementDim', icase, []))
form_checks.append(('Min Edge Length', icase + 1, []))
form_checks.append(('Min Interior Angle', icase + 2, []))
form_checks.append(('Max Interior Angle', icase + 3, []))
form_checks.append(('Max Skew Angle', icase + 4, []))
cases[icase + 1] = (min_edge_length_res, (0, 'Min Edge Length'))
cases[icase + 2] = (min_theta_res, (0, 'Min Interior Angle'))
cases[icase + 3] = (max_theta_res, (0, 'Max Interior Angle'))
#cases[icase + 4] = (skew_res, (0, 'Max Skew Angle'))
icase += 4
else:
form0.append(('ElementDim', icase, []))
icase += 1
if isgreater_int(mcid_array, -1):
material_coord_res = GuiResult(
0, header='MaterialCoord', title='MaterialCoord',
location='centroid',
scalar=mcid_array, mask_value=-1, data_format='%i')
cases[icase] = (material_coord_res, (0, 'MaterialCoord'))
form0.append(('MaterialCoord', icase, []))
icase += 1
if isfinite(material_theta_array):
material_theta_res = GuiResult(
0, header='MaterialTheta', title='MaterialTheta',
location='centroid',
scalar=material_theta_array, data_format='%.3f')
cases[icase] = (material_theta_res, (0, 'MaterialTheta'))
form0.append(('MaterialTheta', icase, []))
icase += 1
#print(normals)
#----------------------------------------------------------
# finishing up vtk
if nelements and isfinite(min_edge_length):
mean_edge_length = np.nanmean(min_edge_length)
self.set_glyph_scale_factor(mean_edge_length * 2.5) # was 1.5
grid.Modified()
#----------------------------------------------------------
# finishing up parameters
self.node_ids = all_nids
self.normals = normals
return nid_to_pid_map, icase, cases, form
def map_elements(self, xyz_cid0, nid_cp_cd, nid_map, model, j, dim_max,
plot=True, xref_loads=True):
"""
Creates the elements
nid_cp_cd : (nnodes, 3) int ndarray
the node_id and coordinate systems corresponding to xyz_cid0
used for setting the NodeID and CD coordinate results
xyz_cid0 : (nnodes, 3) float ndarray
the global xyz locations
nid_map : dict[nid] : nid_index
nid : int
the GRID/SPOINT/EPOINT id
nid_index : int
the index for the GRID/SPOINT/EPOINT in xyz_cid0
model : BDF()
the model object
j : int
???
dim_max : float
the max(dx, dy, dz) dimension
use for ???
"""
grid = self.gui.grid
settings = self.gui.settings
if IS_TESTING:
self._map_elements3(nid_map, model, j, dim_max,
nid_cp_cd, xref_loads=xref_loads)
if settings.nastran_is_element_quality:
out = self._map_elements1_quality(model, xyz_cid0, nid_cp_cd, dim_max, nid_map, j)
else:
out = self._map_elements1_no_quality(model, xyz_cid0, nid_cp_cd, dim_max, nid_map, j)
(nid_to_pid_map, xyz_cid0, superelements, pids, nelements,
material_coord, material_theta,
area, min_interior_angle, max_interior_angle, max_aspect_ratio,
max_skew_angle, taper_ratio, dideal_theta,
area_ratio, min_edge_length, max_warp_angle) = out
#self.grid_mapper.SetResolveCoincidentTopologyToPolygonOffset()
grid.Modified()
cases = OrderedDict()
self.gui.isubcase_name_map = {1: ['Nastran', '']}
icase = 0
form = ['Geometry', None, []]
form0 = form[2]
#new_cases = True
# set to True to enable node_ids as an result
nids_set = True
if nids_set and self.gui.nnodes > 0:
# this intentionally makes a deepcopy
nids = np.array(nid_cp_cd[:, 0])
cds = np.array(nid_cp_cd[:, 2])
nid_res = GuiResult(0, header='NodeID', title='NodeID',
location='node', scalar=nids)
cases[icase] = (nid_res, (0, 'NodeID'))
form0.append(('NodeID', icase, []))
icase += 1
if len(np.unique(cds)) > 1:
cd_res = GuiResult(0, header='NodeCd', title='NodeCd',
location='node', scalar=cds)
cases[icase] = (cd_res, (0, 'NodeCd'))
form0.append(('NodeCd', icase, []))
icase += 1
self.node_ids = nids
# set to True to enable elementIDs as a result
eids_set = True
if eids_set and nelements:
eids = np.zeros(nelements, dtype=nid_cp_cd.dtype)
eid_map = self.gui.eid_map
for (eid, eid2) in eid_map.items():
eids[eid2] = eid
eid_res = GuiResult(0, header='ElementID', title='ElementID',
location='centroid', scalar=eids, mask_value=0)
cases[icase] = (eid_res, (0, 'ElementID'))
form0.append(('ElementID', icase, []))
icase += 1
self.element_ids = eids
if superelements is not None:
nid_res = GuiResult(0, header='SuperelementID', title='SuperelementID',
location='centroid', scalar=superelements)
cases[icase] = (nid_res, (0, 'SuperelementID'))
form0.append(('SuperelementID', icase, []))
icase += 1
# subcase_id, resultType, vector_size, location, dataFormat
if len(model.properties) and nelements and settings.nastran_is_properties:
icase, upids, pcomp, pshell, is_pshell_pcomp = self._build_properties(
model, nelements, eids, pids, cases, form0, icase)
icase = _build_materials(model, pcomp, pshell, is_pshell_pcomp,
cases, form0, icase)
try:
icase = _build_optimization(model, pids, upids, nelements,
cases, form0, icase)
except Exception:
if IS_TESTING or self.is_testing_flag:
raise
s = StringIO()
traceback.print_exc(file=s)
sout = s.getvalue()
self.gui.log_error(sout)
print(sout)
#traceback.print_exc(file=sys.stdout)
#etype, value, tb = sys.exc_info
#print(etype, value, tb)
#raise RuntimeError('Optimization Parsing Error') from e
#traceback.print_tb(e)
#print(e)
#print('nelements=%s eid_map=%s' % (nelements, self.eid_map))
if nelements and isfinite(min_edge_length):
mean_edge_length = np.nanmean(min_edge_length) * 2.5
self.gui.set_glyph_scale_factor(mean_edge_length) # was 1.5
if (self.make_offset_normals_dim or settings.nastran_is_element_quality) and nelements:
icase, normals = _build_normals_quality(
settings, model, self.gui.eid_map, nelements, cases, form0, icase,
xyz_cid0,
material_coord, material_theta,
min_interior_angle, max_interior_angle, dideal_theta,
area, max_skew_angle, taper_ratio,
max_warp_angle, area_ratio, min_edge_length, max_aspect_ratio,
make_offset_normals_dim=self.make_offset_normals_dim)
self.normals = normals
return nid_to_pid_map, icase, cases, form
def _build_mcid_vectors(self, model: BDF, nplies: int):
"""creates the shell material coordinate vectors"""
etype = 3 # vtkLine
nodes, bars = export_mcids_all(model, eids=None, log=None, debug=False)
for iply, nodesi in nodes.items():
barsi = bars[iply]
if iply == -1:
name = 'element material coord'
else:
name = f'mcid ply={iply+1}'
nbars = len(barsi)
if nbars == 0:
# isotropic
continue
assert nbars > 0, model.card_count
is_visible = False
self.gui.create_alternate_vtk_grid(
name, color=RED_FLOAT, line_width=3, opacity=1.0,
representation='surface', is_visible=is_visible, is_pickable=False)
grid = self.gui.alt_grids[name]
grid.Allocate(nbars, 1000)
nodes_array = np.array(nodesi, dtype='float32')
elements = np.array(barsi, dtype='int32')
assert elements.min() == 0, elements.min()
points = numpy_to_vtk_points(nodes_array, points=None, dtype='<f', deep=1)
grid.SetPoints(points)
create_vtk_cells_of_constant_element_type(grid, elements, etype)
return
def _build_plotels(self, model):
"""creates the plotel actor"""
nplotels = len(model.plotels)
if nplotels:
# sorting these don't matter, but why not?
#lines = [element.node_ids for unused_eid, element in sorted(model.plotels.items())]
lines = []
for unused_eid, element in sorted(model.plotels.items()):
node_ids = element.node_ids
lines.append(node_ids)
lines = np.array(lines, dtype='int32')
self.gui.create_alternate_vtk_grid(
'plotel', color=RED_FLOAT, line_width=2, opacity=0.8,
point_size=5, representation='wire', is_visible=True)
self._add_nastran_lines_to_grid('plotel', lines, model)
def _map_elements1_no_quality(self, model, xyz_cid0, nid_cp_cd, unused_dim_max, nid_map, j):
"""
Helper for map_elements
No element quality
"""
print('_map_elements1_no_quality')
assert nid_map is not None
min_interior_angle = None
max_interior_angle = None
max_aspect_ratio = None
max_skew_angle = None
taper_ratio = None
dideal_theta = None
area_ratio = None
min_edge_length = None
max_warp_angle = None
area = None
if xyz_cid0 is None:
superelements = None
nid_to_pid_map = None
pids = None
nelements = None
material_coord = None
material_theta = None
out = (
nid_to_pid_map, xyz_cid0, superelements, pids, nelements,
material_coord, material_theta,
area, min_interior_angle, max_interior_angle, max_aspect_ratio,
max_skew_angle, taper_ratio, dideal_theta,
area_ratio, min_edge_length, max_warp_angle,
)
return out
xyz_cid0 = self.xyz_cid0
nids = nid_cp_cd[:, 0]
#sphere_size = self._get_sphere_size(dim_max)
# :param i: the element id in grid
# :param j: the element id in grid2
i = 0
#nids = self.eid_to_nid_map[eid]
self.eid_to_nid_map = {}
# the list of all pids
#pids = []
# pid = pids_dict[eid]
pids_dict = {}
elements, nelements, superelements = get_elements_nelements_unvectorized(model)
pids = np.zeros(nelements, 'int32')
material_coord = np.full(nelements, -1, dtype='int32')
material_theta = np.full(nelements, np.nan, dtype='float32')
# pids_good = []
# pids_to_keep = []
# pids_btm = []
# pids_to_drop = []
# 3
# | \
# | \
# | \
# 1------2
# these normals point inwards
# 4
# / | \
# / | \
# 3-------2
# \ | /
# \ | /
# 1
#_ctetra_faces = (
#(0, 1, 2), # (1, 2, 3),
#(0, 3, 1), # (1, 4, 2),
#(0, 3, 2), # (1, 3, 4),
#(1, 3, 2), # (2, 4, 3),
#)
# these normals point inwards
#
#
#
#
# /4-----3
# / /
# / 5 /
# / \ /
# / \ /
# 1---------2
#_cpyram_faces = (
#(0, 1, 2, 3), # (1, 2, 3, 4),
#(1, 4, 2), # (2, 5, 3),
#(2, 4, 3), # (3, 5, 4),
#(0, 3, 4), # (1, 4, 5),
#(0, 4, 1), # (1, 5, 2),
#)
# these normals point inwards
# /6
# / | \
# / | \
# 3\ | \
# | \ /4-----5
# | \/ /
# | / \ /
# | / \ /
# | / \ /
# 1---------2
#_cpenta_faces = (
#(0, 2, 1), # (1, 3, 2),
#(3, 4, 5), # (4, 5, 6),
#(0, 1, 4, 3), # (1, 2, 5, 4), # bottom
#(1, 2, 5, 4), # (2, 3, 6, 5), # right
#(0, 3, 5, 2), # (1, 4, 6, 3), # left
#)
# these normals point inwards
# 8----7
# /| /|
# / | / |
# / 5-/--6
# 4-----3 /
# | / | /
# | / | /
# 1-----2
#_chexa_faces = (
#(4, 5, 6, 7), # (5, 6, 7, 8),
#(0, 3, 2, 1), # (1, 4, 3, 2),
#(1, 2, 6, 5), # (2, 3, 7, 6),
#(2, 3, 7, 6), # (3, 4, 8, 7),
#(0, 4, 7, 3), # (1, 5, 8, 4),
#(0, 6, 5, 4), # (1, 7, 6, 5),
#)
line_type = 3 # vtk.vtkLine().GetCellType()
nid_to_pid_map = defaultdict(list)
pid = 0
log = self.log
grid = self.gui.grid
self._build_plotels(model)
#print("map_elements...")
eid_to_nid_map = self.eid_to_nid_map
eid_map = self.gui.eid_map
for (eid, element) in sorted(elements.items()):
eid_map[eid] = i
if i % 5000 == 0 and i > 0:
print(' map_elements (no quality) = %i' % i)
etype = element.type
# if element.Pid() >= 82:
# continue
# if element.Pid() in pids_to_drop:
# continue
# if element.Pid() not in pids_to_keep:
# continue
# if element.pid.type == 'PSOLID':
# continue
pid = np.nan
if isinstance(element, (CTRIA3, CTRIAR, CTRAX3, CPLSTN3, CPLSTS3)):
if isinstance(element, (CTRIA3, CTRIAR)):
mcid, theta = get_shell_material_coord(element)
material_coord[i] = mcid
material_theta[i] = theta
elem = vtkTriangle()
node_ids = element.node_ids
pid = element.Pid()
eid_to_nid_map[eid] = node_ids
_set_nid_to_pid_map_or_blank(nid_to_pid_map, pid, node_ids)
n1, n2, n3 = [nid_map[nid] for nid in node_ids]
#p1 = xyz_cid0[n1, :]
#p2 = xyz_cid0[n2, :]
#p3 = xyz_cid0[n3, :]
point_ids = elem.GetPointIds()
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
point_ids.SetId(2, n3)
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif isinstance(element, (CTRIA6, CPLSTN6, CPLSTS6, CTRIAX)):
# the CTRIAX is a standard 6-noded element
if isinstance(element, CTRIA6):
mcid, theta = get_shell_material_coord(element)
material_coord[i] = mcid
material_theta[i] = theta
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map_or_blank(nid_to_pid_map, pid, node_ids)
if None not in node_ids:
elem = vtkQuadraticTriangle()
point_ids = elem.GetPointIds()
point_ids.SetId(3, nid_map[node_ids[3]])
point_ids.SetId(4, nid_map[node_ids[4]])
point_ids.SetId(5, nid_map[node_ids[5]])
eid_to_nid_map[eid] = node_ids
else:
elem = vtkTriangle()
point_ids = elem.GetPointIds()
eid_to_nid_map[eid] = node_ids[:3]
n1, n2, n3 = [nid_map[nid] for nid in node_ids[:3]]
#p1 = xyz_cid0[n1, :]
#p2 = xyz_cid0[n2, :]
#p3 = xyz_cid0[n3, :]
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
point_ids.SetId(2, n3)
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif isinstance(element, CTRIAX6):
# the CTRIAX6 is not a standard second-order triangle
#
# 5
# |\
# | \
# 6 4
# | \
# | \
# 1----2----3
#
#material_coord[i] = element.theta # TODO: no mcid
# midside nodes are required, nodes out of order
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map_or_blank(nid_to_pid_map, pid, node_ids)
if None not in node_ids:
elem = vtkQuadraticTriangle()
point_ids = elem.GetPointIds()
point_ids.SetId(3, nid_map[node_ids[1]])
point_ids.SetId(4, nid_map[node_ids[3]])
point_ids.SetId(5, nid_map[node_ids[5]])
eid_to_nid_map[eid] = [node_ids[0], node_ids[2], node_ids[4],
node_ids[1], node_ids[3], node_ids[5]]
else:
elem = vtkTriangle()
point_ids = elem.GetPointIds()
eid_to_nid_map[eid] = [node_ids[0], node_ids[2], node_ids[4]]
n1 = nid_map[node_ids[0]]
n2 = nid_map[node_ids[2]]
n3 = nid_map[node_ids[4]]
#p1 = xyz_cid0[n1, :]
#p2 = xyz_cid0[n2, :]
#p3 = xyz_cid0[n3, :]
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
point_ids.SetId(2, n3)
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif isinstance(element, CTRSHL): # nastran95
# the CTRIAX6 is not a standard second-order triangle
#
# 5
# |\
# | \
# 6 4
# | \
# | \
# 1----2----3
#
#material_coord[i] = element.theta # TODO: no mcid
# midside nodes are required, nodes out of order
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map_or_blank(nid_to_pid_map, pid, node_ids)
if None not in node_ids and 0:
elem = vtkQuadraticTriangle()
point_ids = elem.GetPointIds()
point_ids.SetId(3, nid_map[node_ids[1]])
point_ids.SetId(4, nid_map[node_ids[3]])
point_ids.SetId(5, nid_map[node_ids[5]])
else:
elem = vtkTriangle()
point_ids = elem.GetPointIds()
n1 = nid_map[node_ids[0]]
n2 = nid_map[node_ids[2]]
n3 = nid_map[node_ids[4]]
#p1 = xyz_cid0[n1, :]
#p2 = xyz_cid0[n2, :]
#p3 = xyz_cid0[n3, :]
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
point_ids.SetId(2, n3)
eid_to_nid_map[eid] = [node_ids[0], node_ids[2], node_ids[4]]
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif isinstance(element, (CQUAD4, CSHEAR, CQUADR, CPLSTN4, CPLSTS4, CQUADX4, CQUAD1)):
if isinstance(element, (CQUAD4, CQUADR, CQUAD1)):
mcid, theta = get_shell_material_coord(element)
material_coord[i] = mcid
material_theta[i] = theta
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map(nid_to_pid_map, pid, node_ids)
eid_to_nid_map[eid] = node_ids
try:
n1, n2, n3, n4 = [nid_map[nid] for nid in node_ids]
except KeyError: # pragma: no cover
print("node_ids =", node_ids)
print(str(element))
#print('nid_map = %s' % nid_map)
raise
#continue
#p1 = xyz_cid0[n1, :]
#p2 = xyz_cid0[n2, :]
#p3 = xyz_cid0[n3, :]
#p4 = xyz_cid0[n4, :]
elem = vtkQuad()
point_ids = elem.GetPointIds()
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
point_ids.SetId(2, n3)
point_ids.SetId(3, n4)
grid.InsertNextCell(9, point_ids)
elif isinstance(element, (CQUAD8, CPLSTN8, CPLSTS8, CQUADX8)):
if isinstance(element, CQUAD8):
mcid, theta = get_shell_material_coord(element)
material_coord[i] = mcid
material_theta[i] = theta
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map_or_blank(nid_to_pid_map, pid, node_ids)
n1, n2, n3, n4 = [nid_map[nid] for nid in node_ids[:4]]
#p1 = xyz_cid0[n1, :]
#p2 = xyz_cid0[n2, :]
#p3 = xyz_cid0[n3, :]
#p4 = xyz_cid0[n4, :]
if None not in node_ids:
elem = vtkQuadraticQuad()
point_ids = elem.GetPointIds()
point_ids.SetId(4, nid_map[node_ids[4]])
point_ids.SetId(5, nid_map[node_ids[5]])
point_ids.SetId(6, nid_map[node_ids[6]])
point_ids.SetId(7, nid_map[node_ids[7]])
self.eid_to_nid_map[eid] = node_ids
else:
elem = vtkQuad()
point_ids = elem.GetPointIds()
self.eid_to_nid_map[eid] = node_ids[:4]
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
point_ids.SetId(2, n3)
point_ids.SetId(3, n4)
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif isinstance(element, (CQUAD, CQUADX)):
# CQUAD, CQUADX are 9 noded quads
mcid, theta = get_shell_material_coord(element)
material_coord[i] = mcid
material_theta[i] = theta
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map_or_blank(nid_to_pid_map, pid, node_ids)
n1, n2, n3, n4 = [nid_map[nid] for nid in node_ids[:4]]
#p1 = xyz_cid0[n1, :]
#p2 = xyz_cid0[n2, :]
#p3 = xyz_cid0[n3, :]
#p4 = xyz_cid0[n4, :]
if None not in node_ids:
elem = vtk.vtkBiQuadraticQuad()
point_ids = elem.GetPointIds()
point_ids.SetId(4, nid_map[node_ids[4]])
point_ids.SetId(5, nid_map[node_ids[5]])
point_ids.SetId(6, nid_map[node_ids[6]])
point_ids.SetId(7, nid_map[node_ids[7]])
point_ids.SetId(8, nid_map[node_ids[8]])
self.eid_to_nid_map[eid] = node_ids
else:
elem = vtkQuad()
point_ids = elem.GetPointIds()
self.eid_to_nid_map[eid] = node_ids[:4]
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
point_ids.SetId(2, n3)
point_ids.SetId(3, n4)
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif isinstance(element, CTETRA4):
elem = vtkTetra()
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map(nid_to_pid_map, pid, node_ids)
eid_to_nid_map[eid] = node_ids[:4]
point_ids = elem.GetPointIds()
point_ids.SetId(0, nid_map[node_ids[0]])
point_ids.SetId(1, nid_map[node_ids[1]])
point_ids.SetId(2, nid_map[node_ids[2]])
point_ids.SetId(3, nid_map[node_ids[3]])
grid.InsertNextCell(10, point_ids)
#elem_nid_map = {nid:nid_map[nid] for nid in node_ids[:4]}
elif isinstance(element, CTETRA10):
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map_or_blank(nid_to_pid_map, pid, node_ids)
if None not in node_ids:
elem = vtkQuadraticTetra()
point_ids = elem.GetPointIds()
point_ids.SetId(4, nid_map[node_ids[4]])
point_ids.SetId(5, nid_map[node_ids[5]])
point_ids.SetId(6, nid_map[node_ids[6]])
point_ids.SetId(7, nid_map[node_ids[7]])
point_ids.SetId(8, nid_map[node_ids[8]])
point_ids.SetId(9, nid_map[node_ids[9]])
eid_to_nid_map[eid] = node_ids
else:
elem = vtkTetra()
point_ids = elem.GetPointIds()
eid_to_nid_map[eid] = node_ids[:4]
point_ids.SetId(0, nid_map[node_ids[0]])
point_ids.SetId(1, nid_map[node_ids[1]])
point_ids.SetId(2, nid_map[node_ids[2]])
point_ids.SetId(3, nid_map[node_ids[3]])
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif isinstance(element, CPENTA6):
elem = vtkWedge()
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map(nid_to_pid_map, pid, node_ids)
eid_to_nid_map[eid] = node_ids[:6]
point_ids = elem.GetPointIds()
point_ids.SetId(0, nid_map[node_ids[0]])
point_ids.SetId(1, nid_map[node_ids[1]])
point_ids.SetId(2, nid_map[node_ids[2]])
point_ids.SetId(3, nid_map[node_ids[3]])
point_ids.SetId(4, nid_map[node_ids[4]])
point_ids.SetId(5, nid_map[node_ids[5]])
grid.InsertNextCell(13, point_ids)
elif isinstance(element, CPENTA15):
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map_or_blank(nid_to_pid_map, pid, node_ids)
if None not in node_ids:
elem = vtkQuadraticWedge()
point_ids = elem.GetPointIds()
point_ids.SetId(6, nid_map[node_ids[6]])
point_ids.SetId(7, nid_map[node_ids[7]])
point_ids.SetId(8, nid_map[node_ids[8]])
point_ids.SetId(9, nid_map[node_ids[9]])
point_ids.SetId(10, nid_map[node_ids[10]])
point_ids.SetId(11, nid_map[node_ids[11]])
point_ids.SetId(12, nid_map[node_ids[12]])
point_ids.SetId(13, nid_map[node_ids[13]])
point_ids.SetId(14, nid_map[node_ids[14]])
eid_to_nid_map[eid] = node_ids
else:
elem = vtkWedge()
point_ids = elem.GetPointIds()
eid_to_nid_map[eid] = node_ids[:6]
point_ids.SetId(0, nid_map[node_ids[0]])
point_ids.SetId(1, nid_map[node_ids[1]])
point_ids.SetId(2, nid_map[node_ids[2]])
point_ids.SetId(3, nid_map[node_ids[3]])
point_ids.SetId(4, nid_map[node_ids[4]])
point_ids.SetId(5, nid_map[node_ids[5]])
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif isinstance(element, (CHEXA8, CIHEX1)):
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map(nid_to_pid_map, pid, node_ids)
eid_to_nid_map[eid] = node_ids[:8]
elem = vtkHexahedron()
point_ids = elem.GetPointIds()
point_ids.SetId(0, nid_map[node_ids[0]])
point_ids.SetId(1, nid_map[node_ids[1]])
point_ids.SetId(2, nid_map[node_ids[2]])
point_ids.SetId(3, nid_map[node_ids[3]])
point_ids.SetId(4, nid_map[node_ids[4]])
point_ids.SetId(5, nid_map[node_ids[5]])
point_ids.SetId(6, nid_map[node_ids[6]])
point_ids.SetId(7, nid_map[node_ids[7]])
grid.InsertNextCell(12, point_ids)
elif isinstance(element, (CHEXA20, CIHEX2)):
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map_or_blank(nid_to_pid_map, pid, node_ids)
if None not in node_ids:
elem = vtkQuadraticHexahedron()
point_ids = elem.GetPointIds()
point_ids.SetId(8, nid_map[node_ids[8]])
point_ids.SetId(9, nid_map[node_ids[9]])
point_ids.SetId(10, nid_map[node_ids[10]])
point_ids.SetId(11, nid_map[node_ids[11]])
# these two blocks are flipped
point_ids.SetId(12, nid_map[node_ids[16]])
point_ids.SetId(13, nid_map[node_ids[17]])
point_ids.SetId(14, nid_map[node_ids[18]])
point_ids.SetId(15, nid_map[node_ids[19]])
point_ids.SetId(16, nid_map[node_ids[12]])
point_ids.SetId(17, nid_map[node_ids[13]])
point_ids.SetId(18, nid_map[node_ids[14]])
point_ids.SetId(19, nid_map[node_ids[15]])
eid_to_nid_map[eid] = node_ids
else:
elem = vtkHexahedron()
eid_to_nid_map[eid] = node_ids[:8]
point_ids = elem.GetPointIds()
point_ids.SetId(0, nid_map[node_ids[0]])
point_ids.SetId(1, nid_map[node_ids[1]])
point_ids.SetId(2, nid_map[node_ids[2]])
point_ids.SetId(3, nid_map[node_ids[3]])
point_ids.SetId(4, nid_map[node_ids[4]])
point_ids.SetId(5, nid_map[node_ids[5]])
point_ids.SetId(6, nid_map[node_ids[6]])
point_ids.SetId(7, nid_map[node_ids[7]])
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif isinstance(element, CPYRAM5):
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map(nid_to_pid_map, pid, node_ids)
eid_to_nid_map[eid] = node_ids[:5]
elem = vtkPyramid()
point_ids = elem.GetPointIds()
point_ids.SetId(0, nid_map[node_ids[0]])
point_ids.SetId(1, nid_map[node_ids[1]])
point_ids.SetId(2, nid_map[node_ids[2]])
point_ids.SetId(3, nid_map[node_ids[3]])
point_ids.SetId(4, nid_map[node_ids[4]])
# etype = 14
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif isinstance(element, CPYRAM13):
node_ids = element.node_ids
pid = element.Pid()
if None not in node_ids:
elem = vtkQuadraticPyramid()
point_ids = elem.GetPointIds()
#etype = 27
_nids = [nid_map[node_ids[i]] for i in range(13)]
point_ids.SetId(0, _nids[0])
point_ids.SetId(1, _nids[1])
point_ids.SetId(2, _nids[2])
point_ids.SetId(3, _nids[3])
point_ids.SetId(4, _nids[4])
point_ids.SetId(5, _nids[5])
point_ids.SetId(6, _nids[6])
point_ids.SetId(7, _nids[7])
point_ids.SetId(8, _nids[8])
point_ids.SetId(9, _nids[9])
point_ids.SetId(10, _nids[10])
point_ids.SetId(11, _nids[11])
point_ids.SetId(12, _nids[12])
eid_to_nid_map[eid] = node_ids
else:
elem = vtkPyramid()
point_ids = elem.GetPointIds()
eid_to_nid_map[eid] = node_ids[:5]
point_ids.SetId(0, nid_map[node_ids[0]])
point_ids.SetId(1, nid_map[node_ids[1]])
point_ids.SetId(2, nid_map[node_ids[2]])
point_ids.SetId(3, nid_map[node_ids[3]])
point_ids.SetId(4, nid_map[node_ids[4]])
#print('*node_ids =', node_ids[:5])
#if min(node_ids) > 0:
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif etype in {'CBUSH', 'CBUSH1D', 'CFAST',
'CELAS1', 'CELAS2', 'CELAS3', 'CELAS4',
'CDAMP1', 'CDAMP2', 'CDAMP3', 'CDAMP4', 'CDAMP5',
'CVISC', 'CGAP'}:
# TODO: verify
# CBUSH, CBUSH1D, CFAST, CELAS1, CELAS3
# CDAMP1, CDAMP3, CDAMP4, CDAMP5, CVISC
if hasattr(element, 'pid'):
pid = element.pid
else:
# CELAS2, CELAS4?
pid = 0
node_ids = element.node_ids
_set_nid_to_pid_map_or_blank(nid_to_pid_map, pid, node_ids)
if node_ids[0] is None and node_ids[1] is None: # CELAS2
log.warning('removing CELASx eid=%i -> no node %s' % (eid, node_ids[0]))
del self.eid_map[eid]
continue
if None in node_ids: # used to be 0...
if node_ids[0] is None:
slot = 1
elif node_ids[1] is None:
slot = 0
#print('node_ids=%s slot=%s' % (str(node_ids), slot))
eid_to_nid_map[eid] = node_ids[slot]
nid = node_ids[slot]
if nid not in nid_map:
# SPOINT
log.warning('removing CELASx eid=%i -> SPOINT %i' % (eid, nid))
continue
#c = nid_map[nid]
#if 1:
#print(str(element))
elem = vtk.vtkVertex()
point_ids = elem.GetPointIds()
point_ids.SetId(0, j)
#else:
#elem = vtk.vtkSphere()
#elem = vtk.vtkSphereSource()
#if d == 0.:
#d = sphere_size
#elem.SetRadius(sphere_size)
grid.InsertNextCell(elem.GetCellType(), point_ids)
else:
# 2 points
#d = norm(element.nodes[0].get_position() - element.nodes[1].get_position())
eid_to_nid_map[eid] = node_ids
elem = vtk.vtkLine()
point_ids = elem.GetPointIds()
try:
point_ids.SetId(0, nid_map[node_ids[0]])
point_ids.SetId(1, nid_map[node_ids[1]])
except KeyError:
print("node_ids =", node_ids)
print(str(element))
continue
grid.InsertNextCell(line_type, point_ids)
elif etype in ('CBAR', 'CBEAM', 'CROD', 'CONROD', 'CTUBE'):
if etype == 'CONROD':
pid = 0
#areai = element.Area()
else:
pid = element.Pid()
#try:
#areai = element.pid_ref.Area()
#except Exception:
#print(element)
#raise
node_ids = element.node_ids
_set_nid_to_pid_map(nid_to_pid_map, pid, node_ids)
# 2 points
n1, n2 = np.searchsorted(nids, element.nodes)
#xyz1 = xyz_cid0[n1, :]
#xyz2 = xyz_cid0[n2, :]
eid_to_nid_map[eid] = node_ids
elem = vtk.vtkLine()
try:
n1, n2 = [nid_map[nid] for nid in node_ids]
except KeyError: # pragma: no cover
print("node_ids =", node_ids)
print(str(element))
print('nid_map = %s' % nid_map)
raise
point_ids = elem.GetPointIds()
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
grid.InsertNextCell(line_type, point_ids)
elif etype == 'CBEND':
pid = element.Pid()
node_ids = element.node_ids
_set_nid_to_pid_map(nid_to_pid_map, pid, node_ids)
# 2 points
n1, n2 = np.searchsorted(nids, element.nodes)
#xyz1 = xyz_cid0[n1, :]
#xyz2 = xyz_cid0[n2, :]
eid_to_nid_map[eid] = node_ids
if 0:
g0 = element.g0 #_vector
if not isinstance(g0, integer_types):
msg = 'CBEND: g0 must be an integer; g0=%s x=%s\n%s' % (
g0, element.x, element)
raise NotImplementedError(msg)
# only supports g0 as an integer
elem = vtk.vtkQuadraticEdge()
point_ids = elem.GetPointIds()
point_ids.SetId(2, nid_map[g0])
else:
elem = vtk.vtkLine()
point_ids = elem.GetPointIds()
point_ids.SetId(0, nid_map[node_ids[0]])
point_ids.SetId(1, nid_map[node_ids[1]])
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif etype == 'CHBDYG':
node_ids = element.node_ids
pid = 0
#pid = element.Pid()
_set_nid_to_pid_map_or_blank(nid_to_pid_map, pid, node_ids)
if element.surface_type in ['AREA4', 'AREA8']:
eid_to_nid_map[eid] = node_ids[:4]
n1, n2, n3, n4 = [nid_map[nid] for nid in node_ids[:4]]
#p1 = xyz_cid0[n1, :]
#p2 = xyz_cid0[n2, :]
#p3 = xyz_cid0[n3, :]
#p4 = xyz_cid0[n4, :]
if element.surface_type == 'AREA4' or None in node_ids:
elem = vtkQuad()
point_ids = elem.GetPointIds()
else:
elem = vtkQuadraticQuad()
point_ids = elem.GetPointIds()
point_ids.SetId(4, nid_map[node_ids[4]])
point_ids.SetId(5, nid_map[node_ids[5]])
point_ids.SetId(6, nid_map[node_ids[6]])
point_ids.SetId(7, nid_map[node_ids[7]])
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
point_ids.SetId(2, n3)
point_ids.SetId(3, n4)
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif element.surface_type in ['AREA3', 'AREA6']:
eid_to_nid_map[eid] = node_ids[:3]
if element.surface_type == 'AREA3' or None in node_ids:
elem = vtkTriangle()
point_ids = elem.GetPointIds()
else:
elem = vtkQuadraticTriangle()
point_ids = elem.GetPointIds()
point_ids.SetId(3, nid_map[node_ids[3]])
point_ids.SetId(4, nid_map[node_ids[4]])
point_ids.SetId(5, nid_map[node_ids[5]])
n1, n2, n3 = [nid_map[nid] for nid in node_ids[:3]]
#p1 = xyz_cid0[n1, :]
#p2 = xyz_cid0[n2, :]
#p3 = xyz_cid0[n3, :]
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
point_ids.SetId(2, n3)
grid.InsertNextCell(elem.GetCellType(), point_ids)
else:
#print('removing\n%s' % (element))
self.log.warning('removing eid=%s; %s' % (eid, element.type))
del self.eid_map[eid]
self.gui.log_info("skipping %s" % element.type)
continue
#elif etype == 'CBYDYP':
elif etype == 'CHBDYE':
eid_solid = element.eid2
side = element.side
element_solid = model.elements[eid_solid]
try:
mapped_inids = SIDE_MAP[element_solid.type][side]
except KeyError: # pragma: no cover
log.warning('removing\n%s' % (element))
log.warning('removing eid=%s; %s' % (eid, element.type))
del self.eid_map[eid]
self.gui.log_info("skipping %s" % element.type)
continue
side_inids = [nid - 1 for nid in mapped_inids]
nodes = element_solid.node_ids
pid = 0
unused_nnodes = len(side_inids)
node_ids = [nodes[inid] for inid in side_inids]
#inids = np.searchsorted(all_nids, node_ids)
if len(side_inids) == 3:
n1, n2, n3 = [nid_map[nid] for nid in node_ids[:3]]
#p1 = xyz_cid0[n1, :]
#p2 = xyz_cid0[n2, :]
#p3 = xyz_cid0[n3, :]
elem = vtkTriangle()
point_ids = elem.GetPointIds()
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
point_ids.SetId(2, n3)
elif len(side_inids) == 4:
n1, n2, n3, n4 = [nid_map[nid] for nid in node_ids[:4]]
#p1 = xyz_cid0[n1, :]
#p2 = xyz_cid0[n2, :]
#p3 = xyz_cid0[n3, :]
#p4 = xyz_cid0[n4, :]
elem = vtkQuad()
point_ids = elem.GetPointIds()
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
point_ids.SetId(2, n3)
point_ids.SetId(3, n4)
else:
msg = 'element_solid:\n%s' % (str(element_solid))
msg += 'mapped_inids = %s\n' % mapped_inids
msg += 'side_inids = %s\n' % side_inids
msg += 'nodes = %s\n' % nodes
#msg += 'side_nodes = %s\n' % side_nodes
raise NotImplementedError(msg)
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif etype == 'GENEL':
node_ids = element.node_ids
pid = 0
elem = vtk.vtkLine()
point_ids = elem.GetPointIds()
point_ids.SetId(0, nid_map[node_ids[0]])
point_ids.SetId(1, nid_map[node_ids[1]])
elif isinstance(element, CHEXA1):
node_ids = element.node_ids
pid = 0
#mid = element.Mid()
_set_nid_to_pid_map(nid_to_pid_map, pid, node_ids)
eid_to_nid_map[eid] = node_ids[:8]
elem = vtkHexahedron()
point_ids = elem.GetPointIds()
point_ids.SetId(0, nid_map[node_ids[0]])
point_ids.SetId(1, nid_map[node_ids[1]])
point_ids.SetId(2, nid_map[node_ids[2]])
point_ids.SetId(3, nid_map[node_ids[3]])
point_ids.SetId(4, nid_map[node_ids[4]])
point_ids.SetId(5, nid_map[node_ids[5]])
point_ids.SetId(6, nid_map[node_ids[6]])
point_ids.SetId(7, nid_map[node_ids[7]])
grid.InsertNextCell(12, point_ids)
elif isinstance(element, CHEXA2):
node_ids = element.node_ids
pid = 0
_set_nid_to_pid_map_or_blank(nid_to_pid_map, pid, node_ids)
if None not in node_ids:
elem = vtkQuadraticHexahedron()
point_ids = elem.GetPointIds()
point_ids.SetId(8, nid_map[node_ids[8]])
point_ids.SetId(9, nid_map[node_ids[9]])
point_ids.SetId(10, nid_map[node_ids[10]])
point_ids.SetId(11, nid_map[node_ids[11]])
# these two blocks are flipped
point_ids.SetId(12, nid_map[node_ids[16]])
point_ids.SetId(13, nid_map[node_ids[17]])
point_ids.SetId(14, nid_map[node_ids[18]])
point_ids.SetId(15, nid_map[node_ids[19]])
point_ids.SetId(16, nid_map[node_ids[12]])
point_ids.SetId(17, nid_map[node_ids[13]])
point_ids.SetId(18, nid_map[node_ids[14]])
point_ids.SetId(19, nid_map[node_ids[15]])
eid_to_nid_map[eid] = node_ids
else:
elem = vtkHexahedron()
point_ids = elem.GetPointIds()
eid_to_nid_map[eid] = node_ids[:8]
point_ids.SetId(0, nid_map[node_ids[0]])
point_ids.SetId(1, nid_map[node_ids[1]])
point_ids.SetId(2, nid_map[node_ids[2]])
point_ids.SetId(3, nid_map[node_ids[3]])
point_ids.SetId(4, nid_map[node_ids[4]])
point_ids.SetId(5, nid_map[node_ids[5]])
point_ids.SetId(6, nid_map[node_ids[6]])
point_ids.SetId(7, nid_map[node_ids[7]])
grid.InsertNextCell(elem.GetCellType(), point_ids)
else:
log.warning('removing\n%s' % (element))
log.warning('removing eid=%s; %s' % (eid, element.type))
del self.eid_map[eid]
self.gui.log_info("skipping %s" % element.type)
continue
# what about MPCs, RBE2s (rigid elements)?
# are they plotted as elements?
# and thus do they need a property?
if pid is None:
# CONROD
#print(element)
#pids[i] = 0
#pids_dict[eid] = 0
pass
else:
pids[i] = pid
pids_dict[eid] = pid
#print(eid, min_thetai, max_thetai, '\n', element)
i += 1
#assert len(self.eid_map) > 0, self.eid_map
#print('mapped elements')
nelements = i
self.gui.nelements = nelements
#print('nelements=%s pids=%s' % (nelements, list(pids)))
pids = pids[:nelements]
out = (
nid_to_pid_map, xyz_cid0, superelements, pids, nelements,
material_coord, material_theta,
area, min_interior_angle, max_interior_angle, max_aspect_ratio,
max_skew_angle, taper_ratio, dideal_theta,
area_ratio, min_edge_length, max_warp_angle,
)
return out
def _map_elements1_quality(self, model, xyz_cid0, nid_cp_cd, unused_dim_max, nid_map, j):
"""
Helper for map_elements
element checks
http://www.altairuniversity.com/wp-content/uploads/2012/04/Student_Guide_211-233.pdf
Skew:
Skew in trias is calculated by finding the minimum angle
between the vector from each node to the opposing mid-side
and the vector between the two adjacent mid-sides at each
node of the element. Ninety degrees minus the minimum angle
found is reported.
Skew in quads is calculated by finding the minimum angle
between two lines joining opposite midsides of the element.
Ninety degrees minus the minimum angle found is reported.
Aspect Ratio:
Aspect ratio in two-dimensional elements is calculated by
dividing the maximum length side of an element by the minimum
length side of the element. The aspect ratio check is
performed in the same fashion on all faces of 3D elements.
Warpage:
Warpage in two-dimensional elements is calculated by splitting
a quad into two trias and finding the angle between the two
planes which the trias form. The quad is then split again,
this time using the opposite corners and forming the second
set of trias. The angle between the two planes which the trias
form is then found. The maximum angle found between the planes
is the warpage of the element.
Warpage in three-dimensional elements is performed in the same
fashion on all faces of the element.
Jacobian:
determinant of Jacobian matrix (-1.0 to 1.0; 1.0 is ideal)
2D Checks:
Warp angle:
Warp angle is the out of plane angle
Ideal value = 0 degrees (Acceptable < 100).
Warp angle is not applicable for triangular elements.
It is defined as the angle between the normals to two planes
formed by splitting the quad element along the diagonals.
The maximum angle of the two possible angles is reported as
the warp angle.
Aspect Ratio:
Aspect = maximum element edge length / minimum element edge length
Ideal value = 1 (Acceptable < 5).
Skew:
Ideal value = 0 degrees (Acceptable < 45)
Skew for quadrilateral element = 90
minus the minimum angle between the two lines joining the
opposite mid-sides of the element (alpha).
Skew for triangular element = 90
minus the minimum angle between the lines from each node to
the opposing mid-side and between the two adjacent mid-sides
at each node of the element
Jacobian:
Ideal value = 1.0 (Acceptable > 0.6)
In simple terms, the jacobian is a scale factor arising
because of the transformation of the coordinate system.
Elements are tansformed from the global coordinates to
local coordinates (defined at the centroid of every
element), for faster analysis times.
Distortion:
Ideal value = 1.0 (Acceptable > 0.6)
Distortion is defined as:
d = |Jacobian| * AreaLCS / AreaGCS
LCS - Local Coordinate system
GCS - Global Coordinate system
Stretch:
Ideal value: 1.0 (Acceptable > 0.2)
For quadrilateral elements stretch = Lmin * sqrt(2) / dmax
Stretch for triangular element = R * sqrt(12) / Lmax
Included angles:
Skew is based on the overall shape of the element and it does
not take into account the individual angles of a quadrilateral
or triangular element. Included or interior angle check is
applied for individual angles.
Quad: Ideal value = 90 (Acceptable = 45 < theta <135)
Tria: Ideal value = 60 (Acceptable = 20 < theta < 120)
Taper:
Ideal value = 0 (Acceptable < 0.5)
Taper = sum( | (Ai - Aavg) / Aavg |)
Aavg = (A1 + A2 + A3 + A4) / 4
A1,A2 are one split form of the CQUAD4 and A3,A4 are the quad
split in the other direction.
"""
assert nid_map is not None
if xyz_cid0 is None:
nid_to_pid_map = None
superelements = None
pids = None
nelements = None
material_coord = None
material_theta = None
area = None
min_interior_angle = None
max_interior_angle = None
max_aspect_ratio = None
max_skew_angle = None
taper_ratio = None
dideal_theta = None
area_ratio = None
min_edge_length = None
max_warp_angle = None
out = (
nid_to_pid_map, xyz_cid0, superelements, pids, nelements, material_coord,
area, min_interior_angle, max_interior_angle, max_aspect_ratio,
max_skew_angle, taper_ratio, dideal_theta,
area_ratio, min_edge_length, max_warp_angle,
)
return out
xyz_cid0 = self.xyz_cid0
nids = nid_cp_cd[:, 0]
#sphere_size = self._get_sphere_size(dim_max)
# :param i: the element id in grid
# :param j: the element id in grid2
i = 0
#nids = self.eid_to_nid_map[eid]
self.eid_to_nid_map = {}
# the list of all pids
#pids = []
# pid = pids_dict[eid]
pids_dict = {}
elements, nelements, superelements = get_elements_nelements_unvectorized(model)
pids = np.zeros(nelements, 'int32')
material_coord = np.full(nelements, -1, dtype='int32')
material_theta = np.full(nelements, np.nan, dtype='float32')
min_interior_angle = np.zeros(nelements, 'float32')
max_interior_angle = np.zeros(nelements, 'float32')
dideal_theta = np.zeros(nelements, 'float32')
max_skew_angle = np.zeros(nelements, 'float32')
max_warp_angle = np.zeros(nelements, 'float32')
max_aspect_ratio = np.zeros(nelements, 'float32')
area = np.zeros(nelements, 'float32')
area_ratio = np.zeros(nelements, 'float32')
taper_ratio = np.zeros(nelements, 'float32')
min_edge_length = np.zeros(nelements, 'float32')
# pids_good = []
# pids_to_keep = []
# pids_btm = []
# pids_to_drop = []
# 3
# | \
# | \
# | \
# 1------2
# these normals point inwards
# 4
# / | \
# / | \
# 3-------2
# \ | /
# \ | /
# 1
_ctetra_faces = (
(0, 1, 2), # (1, 2, 3),
(0, 3, 1), # (1, 4, 2),
(0, 3, 2), # (1, 3, 4),
(1, 3, 2), # (2, 4, 3),
)
# these normals point inwards
#
#
#
#
# /4-----3
# / /
# / 5 /
# / \ /
# / \ /
# 1---------2
_cpyram_faces = (
(0, 1, 2, 3), # (1, 2, 3, 4),
(1, 4, 2), # (2, 5, 3),
(2, 4, 3), # (3, 5, 4),
(0, 3, 4), # (1, 4, 5),
(0, 4, 1), # (1, 5, 2),
)
# these normals point inwards
# /6
# / | \
# / | \
# 3\ | \
# | \ /4-----5
# | \/ /
# | / \ /
# | / \ /
# | / \ /
# 1---------2
_cpenta_faces = (
(0, 2, 1), # (1, 3, 2),
(3, 4, 5), # (4, 5, 6),
(0, 1, 4, 3), # (1, 2, 5, 4), # bottom
(1, 2, 5, 4), # (2, 3, 6, 5), # right
(0, 3, 5, 2), # (1, 4, 6, 3), # left
)
# these normals point inwards
# 8----7
# /| /|
# / | / |
# / 5-/--6
# 4-----3 /
# | / | /
# | / | /
# 1-----2
_chexa_faces = (
(4, 5, 6, 7), # (5, 6, 7, 8),
(0, 3, 2, 1), # (1, 4, 3, 2),
(1, 2, 6, 5), # (2, 3, 7, 6),
(2, 3, 7, 6), # (3, 4, 8, 7),
(0, 4, 7, 3), # (1, 5, 8, 4),
(0, 6, 5, 4), # (1, 7, 6, 5),
)
nid_to_pid_map = defaultdict(list)
pid = 0
log = self.log
grid = self.gui.grid
self._build_plotels(model)
#print("map_elements...")
eid_to_nid_map = self.eid_to_nid_map
eid_map = self.gui.eid_map
for (eid, element) in sorted(elements.items()):
eid_map[eid] = i
if i % 5000 == 0 and i > 0:
print(' map_elements = %i' % i)
etype = element.type
# if element.Pid() >= 82:
# continue
# if element.Pid() in pids_to_drop:
# continue
# if element.Pid() not in pids_to_keep:
# continue
# if element.pid.type == 'PSOLID':
# continue
pid = np.nan
dideal_thetai = np.nan
min_thetai = np.nan
max_thetai = np.nan
#max_thetai = np.nan
max_skew = np.nan
#max_warp = np.nan
max_warp = np.nan
aspect_ratio = np.nan
areai = np.nan
area_ratioi = np.nan
taper_ratioi = np.nan
min_edge_lengthi = np.nan
if isinstance(element, (CTRIA3, CTRIAR, CTRAX3, CPLSTN3)):
if isinstance(element, (CTRIA3, CTRIAR)):
mcid, theta = get_shell_material_coord(element)
material_coord[i] = mcid
material_theta[i] = theta
elem = vtkTriangle()
node_ids = element.node_ids
pid = element.Pid()
eid_to_nid_map[eid] = node_ids
_set_nid_to_pid_map(nid_to_pid_map, pid, node_ids) # or blank?
n1, n2, n3 = [nid_map[nid] for nid in node_ids]
p1 = xyz_cid0[n1, :]
p2 = xyz_cid0[n2, :]
p3 = xyz_cid0[n3, :]
out = tri_quality(p1, p2, p3)
(areai, max_skew, aspect_ratio,
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi) = out
point_ids = elem.GetPointIds()
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
point_ids.SetId(2, n3)
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif isinstance(element, (CTRIA6, CPLSTN6, CTRIAX)):
# the CTRIAX is a standard 6-noded element
if isinstance(element, CTRIA6):
mcid, theta = get_shell_material_coord(element)
material_coord[i] = mcid
material_theta[i] = theta
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map_or_blank(nid_to_pid_map, pid, node_ids)
if None not in node_ids:
elem = vtkQuadraticTriangle()
point_ids = elem.GetPointIds()
point_ids.SetId(3, nid_map[node_ids[3]])
point_ids.SetId(4, nid_map[node_ids[4]])
point_ids.SetId(5, nid_map[node_ids[5]])
eid_to_nid_map[eid] = node_ids
else:
elem = vtkTriangle()
point_ids = elem.GetPointIds()
eid_to_nid_map[eid] = node_ids[:3]
n1, n2, n3 = [nid_map[nid] for nid in node_ids[:3]]
p1 = xyz_cid0[n1, :]
p2 = xyz_cid0[n2, :]
p3 = xyz_cid0[n3, :]
out = tri_quality(p1, p2, p3)
(areai, max_skew, aspect_ratio,
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi) = out
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
point_ids.SetId(2, n3)
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif isinstance(element, (CTRIAX6, CTRSHL)):
# the CTRIAX6 is not a standard second-order triangle
#
# 5
# |\
# | \
# 6 4
# | \
# | \
# 1----2----3
#
#material_coord[i] = element.theta # TODO: no mcid
# midside nodes are required, nodes out of order
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map_or_blank(nid_to_pid_map, pid, node_ids)
if None not in node_ids:
elem = vtkQuadraticTriangle()
point_ids = elem.GetPointIds()
point_ids.SetId(3, nid_map[node_ids[1]])
point_ids.SetId(4, nid_map[node_ids[3]])
point_ids.SetId(5, nid_map[node_ids[5]])
else:
elem = vtkTriangle()
point_ids = elem.GetPointIds()
n1 = nid_map[node_ids[0]]
n2 = nid_map[node_ids[2]]
n3 = nid_map[node_ids[4]]
p1 = xyz_cid0[n1, :]
p2 = xyz_cid0[n2, :]
p3 = xyz_cid0[n3, :]
out = tri_quality(p1, p2, p3)
(areai, max_skew, aspect_ratio,
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi) = out
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
point_ids.SetId(2, n3)
eid_to_nid_map[eid] = [node_ids[0], node_ids[2], node_ids[4]]
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif isinstance(element, (CQUAD4, CSHEAR, CQUADR, CPLSTN4, CQUADX4, CQUAD1)):
if isinstance(element, (CQUAD4, CQUADR, CQUAD1)):
mcid, theta = get_shell_material_coord(element)
material_coord[i] = mcid
material_theta[i] = theta
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map(nid_to_pid_map, pid, node_ids) # or blank?
eid_to_nid_map[eid] = node_ids
try:
n1, n2, n3, n4 = [nid_map[nid] for nid in node_ids]
except KeyError: # pragma: no cover
print("node_ids =", node_ids)
print(str(element))
#print('nid_map = %s' % nid_map)
raise
#continue
p1 = xyz_cid0[n1, :]
p2 = xyz_cid0[n2, :]
p3 = xyz_cid0[n3, :]
p4 = xyz_cid0[n4, :]
out = quad_quality(element, p1, p2, p3, p4)
(areai, taper_ratioi, area_ratioi, max_skew, aspect_ratio,
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi, max_warp) = out
elem = vtkQuad()
point_ids = elem.GetPointIds()
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
point_ids.SetId(2, n3)
point_ids.SetId(3, n4)
grid.InsertNextCell(9, point_ids)
elif isinstance(element, (CQUAD8, CPLSTN8, CQUADX8)):
if isinstance(element, CQUAD8):
mcid, theta = get_shell_material_coord(element)
material_coord[i] = mcid
material_theta[i] = theta
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map_or_blank(nid_to_pid_map, pid, node_ids)
n1, n2, n3, n4 = [nid_map[nid] for nid in node_ids[:4]]
p1 = xyz_cid0[n1, :]
p2 = xyz_cid0[n2, :]
p3 = xyz_cid0[n3, :]
p4 = xyz_cid0[n4, :]
out = quad_quality(element, p1, p2, p3, p4)
(areai, taper_ratioi, area_ratioi, max_skew, aspect_ratio,
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi, max_warp) = out
if None not in node_ids:
elem = vtkQuadraticQuad()
point_ids = elem.GetPointIds()
point_ids.SetId(4, nid_map[node_ids[4]])
point_ids.SetId(5, nid_map[node_ids[5]])
point_ids.SetId(6, nid_map[node_ids[6]])
point_ids.SetId(7, nid_map[node_ids[7]])
self.eid_to_nid_map[eid] = node_ids
else:
elem = vtkQuad()
point_ids = elem.GetPointIds()
self.eid_to_nid_map[eid] = node_ids[:4]
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
point_ids.SetId(2, n3)
point_ids.SetId(3, n4)
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif isinstance(element, (CQUAD, CQUADX)):
# CQUAD, CQUADX are 9 noded quads
mcid, theta = get_shell_material_coord(element)
material_coord[i] = mcid
material_theta[i] = theta
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map_or_blank(nid_to_pid_map, pid, node_ids)
n1, n2, n3, n4 = [nid_map[nid] for nid in node_ids[:4]]
p1 = xyz_cid0[n1, :]
p2 = xyz_cid0[n2, :]
p3 = xyz_cid0[n3, :]
p4 = xyz_cid0[n4, :]
out = quad_quality(element, p1, p2, p3, p4)
(areai, taper_ratioi, area_ratioi, max_skew, aspect_ratio,
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi, max_warp) = out
if None not in node_ids:
elem = vtk.vtkBiQuadraticQuad()
point_ids = elem.GetPointIds()
point_ids.SetId(4, nid_map[node_ids[4]])
point_ids.SetId(5, nid_map[node_ids[5]])
point_ids.SetId(6, nid_map[node_ids[6]])
point_ids.SetId(7, nid_map[node_ids[7]])
point_ids.SetId(8, nid_map[node_ids[8]])
self.eid_to_nid_map[eid] = node_ids
else:
elem = vtkQuad()
point_ids = elem.GetPointIds()
self.eid_to_nid_map[eid] = node_ids[:4]
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
point_ids.SetId(2, n3)
point_ids.SetId(3, n4)
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif isinstance(element, CTETRA4):
elem = vtkTetra()
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map(nid_to_pid_map, pid, node_ids)
eid_to_nid_map[eid] = node_ids[:4]
point_ids = elem.GetPointIds()
point_ids.SetId(0, nid_map[node_ids[0]])
point_ids.SetId(1, nid_map[node_ids[1]])
point_ids.SetId(2, nid_map[node_ids[2]])
point_ids.SetId(3, nid_map[node_ids[3]])
grid.InsertNextCell(10, point_ids)
#elem_nid_map = {nid:nid_map[nid] for nid in node_ids[:4]}
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi = get_min_max_theta(
_ctetra_faces, node_ids[:4], nid_map, xyz_cid0)
elif isinstance(element, CTETRA10):
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map_or_blank(nid_to_pid_map, pid, node_ids)
if None not in node_ids:
elem = vtkQuadraticTetra()
point_ids = elem.GetPointIds()
point_ids.SetId(4, nid_map[node_ids[4]])
point_ids.SetId(5, nid_map[node_ids[5]])
point_ids.SetId(6, nid_map[node_ids[6]])
point_ids.SetId(7, nid_map[node_ids[7]])
point_ids.SetId(8, nid_map[node_ids[8]])
point_ids.SetId(9, nid_map[node_ids[9]])
eid_to_nid_map[eid] = node_ids
else:
elem = vtkTetra()
point_ids = elem.GetPointIds()
eid_to_nid_map[eid] = node_ids[:4]
point_ids.SetId(0, nid_map[node_ids[0]])
point_ids.SetId(1, nid_map[node_ids[1]])
point_ids.SetId(2, nid_map[node_ids[2]])
point_ids.SetId(3, nid_map[node_ids[3]])
grid.InsertNextCell(elem.GetCellType(), point_ids)
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi = get_min_max_theta(
_ctetra_faces, node_ids[:4], nid_map, xyz_cid0)
elif isinstance(element, CPENTA6):
elem = vtkWedge()
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map(nid_to_pid_map, pid, node_ids)
eid_to_nid_map[eid] = node_ids[:6]
point_ids = elem.GetPointIds()
point_ids.SetId(0, nid_map[node_ids[0]])
point_ids.SetId(1, nid_map[node_ids[1]])
point_ids.SetId(2, nid_map[node_ids[2]])
point_ids.SetId(3, nid_map[node_ids[3]])
point_ids.SetId(4, nid_map[node_ids[4]])
point_ids.SetId(5, nid_map[node_ids[5]])
grid.InsertNextCell(13, point_ids)
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi = get_min_max_theta(
_cpenta_faces, node_ids[:6], nid_map, xyz_cid0)
elif isinstance(element, CPENTA15):
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map_or_blank(nid_to_pid_map, pid, node_ids)
if None not in node_ids:
elem = vtkQuadraticWedge()
point_ids = elem.GetPointIds()
point_ids.SetId(6, nid_map[node_ids[6]])
point_ids.SetId(7, nid_map[node_ids[7]])
point_ids.SetId(8, nid_map[node_ids[8]])
point_ids.SetId(9, nid_map[node_ids[9]])
point_ids.SetId(10, nid_map[node_ids[10]])
point_ids.SetId(11, nid_map[node_ids[11]])
point_ids.SetId(12, nid_map[node_ids[12]])
point_ids.SetId(13, nid_map[node_ids[13]])
point_ids.SetId(14, nid_map[node_ids[14]])
eid_to_nid_map[eid] = node_ids
else:
elem = vtkWedge()
point_ids = elem.GetPointIds()
eid_to_nid_map[eid] = node_ids[:6]
point_ids.SetId(0, nid_map[node_ids[0]])
point_ids.SetId(1, nid_map[node_ids[1]])
point_ids.SetId(2, nid_map[node_ids[2]])
point_ids.SetId(3, nid_map[node_ids[3]])
point_ids.SetId(4, nid_map[node_ids[4]])
point_ids.SetId(5, nid_map[node_ids[5]])
grid.InsertNextCell(elem.GetCellType(), point_ids)
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi = get_min_max_theta(
_cpenta_faces, node_ids[:6], nid_map, xyz_cid0)
elif isinstance(element, (CHEXA8, CIHEX1, CHEXA1)):
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map(nid_to_pid_map, pid, node_ids)
eid_to_nid_map[eid] = node_ids[:8]
elem = vtkHexahedron()
point_ids = elem.GetPointIds()
point_ids.SetId(0, nid_map[node_ids[0]])
point_ids.SetId(1, nid_map[node_ids[1]])
point_ids.SetId(2, nid_map[node_ids[2]])
point_ids.SetId(3, nid_map[node_ids[3]])
point_ids.SetId(4, nid_map[node_ids[4]])
point_ids.SetId(5, nid_map[node_ids[5]])
point_ids.SetId(6, nid_map[node_ids[6]])
point_ids.SetId(7, nid_map[node_ids[7]])
grid.InsertNextCell(12, point_ids)
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi = get_min_max_theta(
_chexa_faces, node_ids[:8], nid_map, xyz_cid0)
elif isinstance(element, (CHEXA20, CIHEX2)):
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map_or_blank(nid_to_pid_map, pid, node_ids)
if None not in node_ids:
elem = vtkQuadraticHexahedron()
point_ids = elem.GetPointIds()
point_ids.SetId(8, nid_map[node_ids[8]])
point_ids.SetId(9, nid_map[node_ids[9]])
point_ids.SetId(10, nid_map[node_ids[10]])
point_ids.SetId(11, nid_map[node_ids[11]])
# these two blocks are flipped
point_ids.SetId(12, nid_map[node_ids[16]])
point_ids.SetId(13, nid_map[node_ids[17]])
point_ids.SetId(14, nid_map[node_ids[18]])
point_ids.SetId(15, nid_map[node_ids[19]])
point_ids.SetId(16, nid_map[node_ids[12]])
point_ids.SetId(17, nid_map[node_ids[13]])
point_ids.SetId(18, nid_map[node_ids[14]])
point_ids.SetId(19, nid_map[node_ids[15]])
eid_to_nid_map[eid] = node_ids
else:
elem = vtkHexahedron()
point_ids = elem.GetPointIds()
eid_to_nid_map[eid] = node_ids[:8]
point_ids.SetId(0, nid_map[node_ids[0]])
point_ids.SetId(1, nid_map[node_ids[1]])
point_ids.SetId(2, nid_map[node_ids[2]])
point_ids.SetId(3, nid_map[node_ids[3]])
point_ids.SetId(4, nid_map[node_ids[4]])
point_ids.SetId(5, nid_map[node_ids[5]])
point_ids.SetId(6, nid_map[node_ids[6]])
point_ids.SetId(7, nid_map[node_ids[7]])
grid.InsertNextCell(elem.GetCellType(), point_ids)
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi = get_min_max_theta(
_chexa_faces, node_ids[:8], nid_map, xyz_cid0)
elif isinstance(element, CPYRAM5):
node_ids = element.node_ids
pid = element.Pid()
_set_nid_to_pid_map(nid_to_pid_map, pid, node_ids)
eid_to_nid_map[eid] = node_ids[:5]
elem = vtkPyramid()
point_ids = elem.GetPointIds()
point_ids.SetId(0, nid_map[node_ids[0]])
point_ids.SetId(1, nid_map[node_ids[1]])
point_ids.SetId(2, nid_map[node_ids[2]])
point_ids.SetId(3, nid_map[node_ids[3]])
point_ids.SetId(4, nid_map[node_ids[4]])
# etype = 14
grid.InsertNextCell(elem.GetCellType(), point_ids)
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi = get_min_max_theta(
_cpyram_faces, node_ids[:5], nid_map, xyz_cid0)
elif isinstance(element, CPYRAM13):
node_ids = element.node_ids
pid = element.Pid()
if None not in node_ids:
#print(' node_ids =', node_ids)
elem = vtkQuadraticPyramid()
point_ids = elem.GetPointIds()
# etype = 27
point_ids.SetId(5, nid_map[node_ids[5]])
point_ids.SetId(6, nid_map[node_ids[6]])
point_ids.SetId(7, nid_map[node_ids[7]])
point_ids.SetId(8, nid_map[node_ids[8]])
point_ids.SetId(9, nid_map[node_ids[9]])
point_ids.SetId(10, nid_map[node_ids[10]])
point_ids.SetId(11, nid_map[node_ids[11]])
point_ids.SetId(12, nid_map[node_ids[12]])
eid_to_nid_map[eid] = node_ids
else:
elem = vtkPyramid()
point_ids = elem.GetPointIds()
eid_to_nid_map[eid] = node_ids[:5]
#print('*node_ids =', node_ids[:5])
point_ids.SetId(0, nid_map[node_ids[0]])
point_ids.SetId(1, nid_map[node_ids[1]])
point_ids.SetId(2, nid_map[node_ids[2]])
point_ids.SetId(3, nid_map[node_ids[3]])
point_ids.SetId(4, nid_map[node_ids[4]])
grid.InsertNextCell(elem.GetCellType(), point_ids)
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi = get_min_max_theta(
_cpyram_faces, node_ids[:5], nid_map, xyz_cid0)
elif etype in ('CBUSH', 'CBUSH1D', 'CFAST',
'CELAS1', 'CELAS2', 'CELAS3', 'CELAS4',
'CDAMP1', 'CDAMP2', 'CDAMP3', 'CDAMP4', 'CDAMP5',
'CVISC', 'CGAP'):
# TODO: verify
# CBUSH, CBUSH1D, CFAST, CELAS1, CELAS3
# CDAMP1, CDAMP3, CDAMP4, CDAMP5, CVISC
if hasattr(element, 'pid'):
pid = element.pid
else:
# CELAS2, CELAS4?
pid = 0
node_ids = element.node_ids
_set_nid_to_pid_map_or_blank(nid_to_pid_map, pid, node_ids)
if node_ids[0] is None and node_ids[1] is None: # CELAS2
log.warning('removing CELASx eid=%i -> no node %s' % (eid, node_ids[0]))
del self.eid_map[eid]
continue
if None in node_ids: # used to be 0...
if node_ids[0] is None:
slot = 1
elif node_ids[1] is None:
slot = 0
#print('node_ids=%s slot=%s' % (str(node_ids), slot))
eid_to_nid_map[eid] = node_ids[slot]
nid = node_ids[slot]
if nid not in nid_map:
# SPOINT
log.warning('removing CELASx eid=%i -> SPOINT %i' % (eid, nid))
continue
#c = nid_map[nid]
#if 1:
elem = vtk.vtkVertex()
point_ids = elem.GetPointIds()
point_ids.SetId(0, j)
#else:
#elem = vtk.vtkSphere()
#elem = vtk.vtkSphereSource()
#if d == 0.:
#d = sphere_size
#elem.SetRadius(sphere_size)
else:
# 2 points
#d = norm(element.nodes[0].get_position() - element.nodes[1].get_position())
eid_to_nid_map[eid] = node_ids
elem = vtk.vtkLine()
point_ids = elem.GetPointIds()
try:
point_ids.SetId(0, nid_map[node_ids[0]])
point_ids.SetId(1, nid_map[node_ids[1]])
except KeyError:
print("node_ids =", node_ids)
print(str(element))
continue
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif etype in ('CBAR', 'CBEAM', 'CROD', 'CONROD', 'CTUBE'):
if etype == 'CONROD':
pid = 0
areai = element.Area()
else:
pid = element.Pid()
try:
areai = element.pid_ref.Area()
except Exception:
print(element)
raise
node_ids = element.node_ids
_set_nid_to_pid_map(nid_to_pid_map, pid, node_ids)
# 2 points
#min_edge_lengthi = norm(element.nodes_ref[0].get_position() -
#element.nodes_ref[1].get_position())
try:
n1, n2 = np.searchsorted(nids, element.nodes)
except Exception:
print(element.get_stats())
n1i, n2i = element.nodes
print('nids =', nids)
assert n1i in nids, 'n1=%s could not be found' % n1i
assert n2i in nids, 'n2=%s could not be found' % n2i
raise
xyz1 = xyz_cid0[n1, :]
xyz2 = xyz_cid0[n2, :]
min_edge_lengthi = norm(xyz2 - xyz1)
eid_to_nid_map[eid] = node_ids
elem = vtk.vtkLine()
try:
n1, n2 = [nid_map[nid] for nid in node_ids]
except KeyError: # pragma: no cover
print("node_ids =", node_ids)
print(str(element))
print('nid_map = %s' % nid_map)
raise
point_ids = elem.GetPointIds()
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif etype == 'CBEND':
pid = element.Pid()
node_ids = element.node_ids
_set_nid_to_pid_map(nid_to_pid_map, pid, node_ids)
# 2 points
n1, n2 = np.searchsorted(nids, element.nodes)
xyz1 = xyz_cid0[n1, :]
xyz2 = xyz_cid0[n2, :]
#min_edge_lengthi = norm(element.nodes_ref[0].get_position() -
#element.nodes_ref[1].get_position())
eid_to_nid_map[eid] = node_ids
g0 = element.g0 #_vector
if not isinstance(g0, integer_types):
msg = 'CBEND: g0 must be an integer; g0=%s x=%s\n%s' % (
g0, element.x, element)
raise NotImplementedError(msg)
# only supports g0 as an integer
elem = vtk.vtkQuadraticEdge()
point_ids = elem.GetPointIds()
point_ids.SetId(0, nid_map[node_ids[0]])
point_ids.SetId(1, nid_map[node_ids[1]])
point_ids.SetId(2, nid_map[g0])
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif etype == 'CHBDYG':
node_ids = element.node_ids
pid = 0
#pid = element.Pid()
_set_nid_to_pid_map_or_blank(nid_to_pid_map, pid, node_ids)
if element.surface_type in ('AREA4', 'AREA8'):
eid_to_nid_map[eid] = node_ids[:4]
n1, n2, n3, n4 = [nid_map[nid] for nid in node_ids[:4]]
p1 = xyz_cid0[n1, :]
p2 = xyz_cid0[n2, :]
p3 = xyz_cid0[n3, :]
p4 = xyz_cid0[n4, :]
out = quad_quality(element, p1, p2, p3, p4)
(areai, taper_ratioi, area_ratioi, max_skew, aspect_ratio,
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi, max_warp) = out
if element.surface_type == 'AREA4' or None in node_ids:
elem = vtkQuad()
point_ids = elem.GetPointIds()
else:
elem = vtkQuadraticQuad()
point_ids = elem.GetPointIds()
point_ids.SetId(4, nid_map[node_ids[4]])
point_ids.SetId(5, nid_map[node_ids[5]])
point_ids.SetId(6, nid_map[node_ids[6]])
point_ids.SetId(7, nid_map[node_ids[7]])
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
point_ids.SetId(2, n3)
point_ids.SetId(3, n4)
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif element.surface_type in ['AREA3', 'AREA6']:
eid_to_nid_map[eid] = node_ids[:3]
if element.Type == 'AREA3' or None in node_ids:
elem = vtkTriangle()
point_ids = elem.GetPointIds()
else:
elem = vtkQuadraticTriangle()
point_ids = elem.GetPointIds()
point_ids.SetId(3, nid_map[node_ids[3]])
point_ids.SetId(4, nid_map[node_ids[4]])
point_ids.SetId(5, nid_map[node_ids[5]])
n1, n2, n3 = [nid_map[nid] for nid in node_ids[:3]]
p1 = xyz_cid0[n1, :]
p2 = xyz_cid0[n2, :]
p3 = xyz_cid0[n3, :]
out = tri_quality(p1, p2, p3)
(areai, max_skew, aspect_ratio,
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi) = out
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
point_ids.SetId(2, n3)
grid.InsertNextCell(elem.GetCellType(), point_ids)
else:
#print('removing\n%s' % (element))
log.warning('removing eid=%s; %s' % (eid, element.type))
del self.eid_map[eid]
self.gui.log_info("skipping %s" % element.type)
continue
elif etype == 'CHBDYP':
#| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
#| CHBDYP | EID | PID | TYPE | IVIEWF | IVIEWB | G1 | G2 | G0 |
#| | RADMIDF | RADMIDB | GMID | CE | E1 | E2 | E3 | |
pid = 0 # element.pid
node_ids = element.node_ids
if element.Type == 'LINE':
n1, n2 = [nid_map[nid] for nid in node_ids[:2]]
p1 = xyz_cid0[n1, :]
p2 = xyz_cid0[n2, :]
elem = vtk.vtkLine()
point_ids = elem.GetPointIds()
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
else:
msg = 'element_solid:\n%s' % (str(element_solid))
msg += 'mapped_inids = %s\n' % mapped_inids
msg += 'side_inids = %s\n' % side_inids
msg += 'nodes = %s\n' % nodes
#msg += 'side_nodes = %s\n' % side_nodes
raise NotImplementedError(msg)
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif etype == 'CHBDYE':
#| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
#| CHBDYE | EID | EID2 | SIDE | IVIEWF | IVIEWB | RADMIDF | RADMIDB |
eid_solid = element.eid2
side = element.side
element_solid = model.elements[eid_solid]
try:
mapped_inids = SIDE_MAP[element_solid.type][side]
except KeyError: # pragma: no cover
log.warning('removing\n%s' % (element))
log.warning('removing eid=%s; %s' % (eid, element.type))
del self.eid_map[eid]
self.gui.log_info("skipping %s" % element.type)
continue
side_inids = [nid - 1 for nid in mapped_inids]
nodes = element_solid.node_ids
pid = 0
unused_nnodes = len(side_inids)
node_ids = [nodes[inid] for inid in side_inids]
#inids = np.searchsorted(all_nids, node_ids)
#if len(side_inids) == 2:
#n1, n2 = [nid_map[nid] for nid in node_ids[:2]]
#p1 = xyz_cid0[n1, :]
#p2 = xyz_cid0[n2, :]
#elem = vtk.vtkLine()
#point_ids = elem.GetPointIds()
#point_ids.SetId(0, n1)
#point_ids.SetId(1, n2)
if len(side_inids) == 3:
n1, n2, n3 = [nid_map[nid] for nid in node_ids[:3]]
p1 = xyz_cid0[n1, :]
p2 = xyz_cid0[n2, :]
p3 = xyz_cid0[n3, :]
out = tri_quality(p1, p2, p3)
(areai, max_skew, aspect_ratio,
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi) = out
elem = vtkTriangle()
point_ids = elem.GetPointIds()
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
point_ids.SetId(2, n3)
elif len(side_inids) == 4:
n1, n2, n3, n4 = [nid_map[nid] for nid in node_ids[:4]]
p1 = xyz_cid0[n1, :]
p2 = xyz_cid0[n2, :]
p3 = xyz_cid0[n3, :]
p4 = xyz_cid0[n4, :]
out = quad_quality(element, p1, p2, p3, p4)
(areai, taper_ratioi, area_ratioi, max_skew, aspect_ratio,
min_thetai, max_thetai, dideal_thetai, min_edge_lengthi, max_warp) = out
elem = vtkQuad()
point_ids = elem.GetPointIds()
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
point_ids.SetId(2, n3)
point_ids.SetId(3, n4)
else:
msg = 'element_solid:\n%s' % (str(element_solid))
msg += 'mapped_inids = %s\n' % mapped_inids
msg += 'side_inids = %s\n' % side_inids
msg += 'nodes = %s\n' % nodes
#msg += 'side_nodes = %s\n' % side_nodes
raise NotImplementedError(msg)
grid.InsertNextCell(elem.GetCellType(), point_ids)
elif etype == 'GENEL':
genel_nids = []
if len(element.ul_nodes):
genel_nids.append(element.ul_nodes)
if len(element.ud_nodes):
genel_nids.append(element.ud_nodes)
node_ids = np.unique(np.hstack(genel_nids))
node_ids = node_ids[:2]
del genel_nids
elem = vtk.vtkLine()
try:
n1, n2 = [nid_map[nid] for nid in node_ids]
except KeyError: # pragma: no cover
print("node_ids =", node_ids)
print(str(element))
print('nid_map = %s' % nid_map)
raise
point_ids = elem.GetPointIds()
point_ids.SetId(0, n1)
point_ids.SetId(1, n2)
grid.InsertNextCell(elem.GetCellType(), point_ids)
#areai = np.nan
pid = 0
#cell_type = cell_type_line
#inids = np.searchsorted(all_nids, nids)
#p1, p2 = xyz_cid0[inids, :]
#min_edge_lengthi = norm(p2 - p1)
#nnodes = len(nids)
#dim = 1
else:
log.warning('removing\n%s' % (element))
log.warning('removing eid=%s; %s' % (eid, element.type))
del self.eid_map[eid]
self.gui.log_info("skipping %s" % element.type)
continue
# what about MPCs, RBE2s (rigid elements)?
# are they plotted as elements?
# and thus do they need a property?
if pid is None:
# CONROD
#print(element)
#pids[i] = 0
#pids_dict[eid] = 0
pass
else:
pids[i] = pid
pids_dict[eid] = pid
if np.isnan(max_thetai) and etype not in NO_THETA:
print('eid=%s theta=%s...setting to 360. deg' % (eid, max_thetai))
print(element.rstrip())
if isinstance(element.nodes[0], integer_types):
print(' nodes = %s' % element.nodes)
else:
for node in element.nodes:
print(str(node).rstrip())
max_thetai = 2 * np.pi
#print(eid, min_thetai, max_thetai, '\n', element)
min_interior_angle[i] = min_thetai
max_interior_angle[i] = max_thetai
dideal_theta[i] = dideal_thetai
max_skew_angle[i] = max_skew
max_warp_angle[i] = max_warp
max_aspect_ratio[i] = aspect_ratio
area[i] = areai
area_ratio[i] = area_ratioi
taper_ratio[i] = taper_ratioi
min_edge_length[i] = min_edge_lengthi
i += 1
#assert len(self.eid_map) > 0, self.eid_map
#print('mapped elements')
nelements = i
self.gui.nelements = nelements
#print('nelements=%s pids=%s' % (nelements, list(pids)))
pids = pids[:nelements]
out = (
nid_to_pid_map, xyz_cid0, superelements, pids, nelements,
material_coord, material_theta,
area, min_interior_angle, max_interior_angle, max_aspect_ratio,
max_skew_angle, taper_ratio, dideal_theta,
area_ratio, min_edge_length, max_warp_angle,
)
return out
def _build_properties(self, model: BDF, nelements: int, eids, pids,
cases, form0, icase: int) -> int:
"""
creates:
- PropertyID
TODO: CONROD
"""
settings = self.gui.settings
upids = None
pcomp = None
pshell = None
is_pcomp = False
is_pshell = False
mids_pcomp = None
thickness_pcomp = None
nplies_pcomp = None
pcomp = {
'mids' : mids_pcomp,
'thickness' : thickness_pcomp,
'nplies' : nplies_pcomp,
}
mids = None
thickness = None
pshell = {
'mids' : mids,
'thickness' : thickness,
}
if not isfinite_and_greater_than(pids, 0):
return icase, upids, pcomp, pshell, (is_pshell, is_pcomp)
prop_types_with_mid = (
'PSOLID',
'PROD', 'PTUBE', 'PBAR', 'PBARL', 'PBEAM', 'PBEAML',
'PBEND',
)
prop_types_without_mid = ('PVISC', 'PELAS', 'PBUSH', 'PDAMP', 'PDAMPT')
pid_res = GuiResult(0, header='PropertyID', title='PropertyID',
location='centroid', scalar=pids, mask_value=0)
cases[icase] = (pid_res, (0, 'PropertyID'))
form0.append(('PropertyID', icase, []))
icase += 1
upids = np.unique(pids)
mid_eids_skip = []
#mids_pshell = None
#thickness_pshell = None
if 'PSHELL' in model.card_count:
is_pshell = True
composite_properties = ['PCOMP', 'PCOMPG', 'PCOMPS', 'PCOMPLS']
pids_pcomp = model.get_card_ids_by_card_types(composite_properties, combine=True)
properties = model.properties
for superelement in model.superelement_models.values():
properties.update(superelement.properties)
if pids_pcomp:
npliesi = 0
pcomp_nplies = 0
for pid in pids_pcomp:
prop = properties[pid]
pcomp_nplies = max(pcomp_nplies, prop.nplies + 1)
npliesi = max(npliesi, pcomp_nplies)
nplies_pcomp = np.zeros(nelements, dtype='int32')
mids_pcomp = np.zeros((nelements, npliesi), dtype='int32')
thickness_pcomp = np.full((nelements, npliesi), np.nan, dtype='float32')
mids_pcomp = np.zeros((nelements, npliesi), dtype='int32')
is_pcomp = True
#rho = np.full((nelements, nplies), np.nan, dtype='float32')
mids = np.zeros((nelements, 4), dtype='int32')
thickness = np.full((nelements, 4), np.nan, dtype='float32')
for pid in upids:
if pid == 0:
print('skipping pid=0')
continue
elif pid < 0:
continue
try:
prop = properties[pid]
except KeyError:
print('skipping pid=%i' % pid)
continue
if prop.type in prop_types_with_mid:
# simple types
i = np.where(pids == pid)[0]
mid = prop.mid_ref.mid
mids[i, 0] = mid
elif prop.type == 'PSHEAR':
i = np.where(pids == pid)[0]
mid = prop.mid_ref.mid
mids[i, 0] = mid
thickness[i, 0] = prop.Thickness()
elif prop.type == 'PSHELL':
i = np.where(pids == pid)[0]
mid1 = prop.Mid1()
mid2 = prop.Mid2()
mid3 = prop.Mid3()
mid4 = prop.Mid4()
mids[i, 0] = mid1 if mid1 is not None else 0
mids[i, 1] = mid2 if mid2 is not None else 0
mids[i, 2] = mid3 if mid3 is not None else 0
mids[i, 3] = mid4 if mid4 is not None else 0
thickness[i, 0] = prop.Thickness()
thickness[i, 1] = prop.twelveIt3
thickness[i, 2] = prop.tst
elif prop.type in ['PCOMP', 'PCOMPG', 'PCOMPS', 'PCOMPLS']:
i = np.where(pids == pid)[0]
npliesi = prop.nplies
nplies_pcomp[i] = npliesi
thickness_pcomp[i, 0] = 0.
for iply in range(npliesi):
mids_pcomp[i, iply+1] = prop.Mid(iply)
thickniess_ply = prop.Thickness(iply)
thickness_pcomp[i, iply+1] = thickniess_ply
thickness_pcomp[i, 0] += thickniess_ply
#mids[i, 0] = mids[i, 1]
#elif prop.type == 'PSHEAR': # element has the thickness
#i = np.where(pids == pid)[0]
#mids[i, 0] = prop.Mid()
#thickness[i, 0] = elem.Thickness()
elif prop.type in prop_types_without_mid:
i = np.where(pids == pid)[0]
mid_eids_skip.append(i)
else:
print('material for pid=%s type=%s not considered' % (pid, prop.type))
#print('mids =', mids)
if len(mid_eids_skip):
mid_eids_skip = np.hstack(mid_eids_skip)
if mids.min() == 0:
i = np.where(mids == 0)[0]
diff_ids = np.setdiff1d(i, mid_eids_skip)
#eids_missing_material_id = eids[i]
not_skipped_eids_missing_material_id = eids[diff_ids]
if len(not_skipped_eids_missing_material_id):
print('eids=%s dont have materials' %
not_skipped_eids_missing_material_id)
pcomp = {
'mids' : mids_pcomp,
'thickness' : thickness_pcomp,
'nplies' : nplies_pcomp,
}
pshell = {
'mids' : mids,
'thickness' : thickness,
}
nplies = None
if is_pshell:
nplies = 1
if is_pcomp:
nplies = nplies_pcomp.max()
if settings.nastran_is_shell_mcids and nplies is not None:
self._build_mcid_vectors(model, nplies)
return icase, upids, pcomp, pshell, (is_pshell, is_pcomp)
def _plot_pressures(self, model: BDF, cases, form0, icase: int, subcase_id: int) -> int:
"""
pressure act normal to a shell (as opposed to anti-normal to a solid face)
"""
fdtype = 'float32'
# quit out if we're going to make pressure plots anyways
#if self.plot_applied_loads:
#return icase
# quit out if we don't have pressures
if not any(['PLOAD' in model.card_count, 'PLOAD2' in model.card_count,
'PLOAD4' in model.card_count]):
return icase
subcase = model.subcases[subcase_id]
try:
load_case_id = subcase.get_parameter('LOAD')[0]
except KeyError:
#self.gui.log.warning('LOAD not found in subcase_id=%s' % (subcase_id))
return icase
if load_case_id not in model.loads and load_case_id not in model.load_combinations:
self.gui.log.warning('LOAD=%s not found' % load_case_id)
return icase
is_pressure, pressures = get_pressure_array(
model, load_case_id, eids=self.element_ids, stop_on_failure=False, fdtype=fdtype)
if not is_pressure:
return icase
# if there is no applied pressure, don't make a plot
if np.abs(pressures).max():
case_name = 'Pressure'
# print('iload=%s' % iload)
# print(case_name)
pressure_res = GuiResult(
subcase_id, header='Pressure', title='Pressure',
location='centroid', scalar=pressures)
cases[icase] = (pressure_res, (0, 'Pressure'))
form0.append((case_name, icase, []))
icase += 1
return icase
def _plot_applied_loads(self, model, cases, form0, icase, subcase_id,
xref_loads=True, colormap='jet'):
"""
Applied loads include:
----------------------
- Centroidal Pressure
- Fx, Fy, Fz
- SPCDx, SPCDy, SPCDz, SPCDxyz
- Temperature(MATERIAL)
- Temperature(INITIAL)
- Temperature(LOAD)
- Temperature(BOTH)
"""
#if not self.plot_applied_loads:
#model.log.debug('self.plot_applied_loads=False')
#return icase
if not xref_loads:
model.log.debug('returning from plot_applied_loads_early')
return icase
try:
#form = []
out = get_load_arrays(
model, subcase_id,
eid_map=self.eid_map, node_ids=self.node_ids,
normals=self.normals, nid_map=self.nid_map,)
is_loads, is_temperatures, temperature_data, load_data = out
#self.log.info('subcase_id=%s is_loads=%s is_temperatures=%s' % (
#subcase_id, is_loads, is_temperatures))
if is_loads:
centroidal_pressures, forces, moments, spcd = load_data
if np.abs(centroidal_pressures).max():
pressure_res = GuiResult(subcase_id, header='Pressure', title='Pressure',
location='centroid', scalar=centroidal_pressures)
cases[icase] = (pressure_res, (0, 'Pressure'))
form0.append(('Pressure', icase, []))
icase += 1
if np.abs(forces.max() - forces.min()) > 0.0:
fxyz = forces[:, :3]
fscalar = np.linalg.norm(fxyz, axis=1)
if fscalar.max() > 0:
titles = ['Force XYZ']
headers = titles
assert fxyz.shape[1] == 3, fxyz.shape
assert fxyz.shape[0] == len(fscalar)
scales = [1.0]
force_xyz_res = ForceTableResults(
subcase_id, titles, headers, fxyz, fscalar,
scales, data_formats=None,
nlabels=None, labelsize=None, ncolors=None, colormap=colormap,
set_max_min=False, uname='NastranGeometry')
force_xyz_res.save_defaults()
cases[icase] = (force_xyz_res, (0, 'Force XYZ'))
form0.append(('Force XYZ', icase, []))
icase += 1
if np.abs(moments.max() - moments.min()) > 0.0:
mxyz = moments[:, :3]
mscalar = np.linalg.norm(mxyz, axis=1)
if mscalar.max() > 0:
titles = ['Moment XYZ']
headers = titles
assert mxyz.shape[1] == 3, mxyz.shape
assert mxyz.shape[0] == len(mscalar)
scales = [1.0]
moment_xyz_res = ForceTableResults(
subcase_id, titles, headers, mxyz, mscalar,
scales, data_formats=None,
nlabels=None, labelsize=None, ncolors=None, colormap=colormap,
set_max_min=False, uname='NastranGeometry')
moment_xyz_res.save_defaults()
cases[icase] = (moment_xyz_res, (0, 'Moment XYZ'))
form0.append(('Moment XYZ', icase, []))
icase += 1
if np.abs(spcd.max() - spcd.min()) > 0.0:
t123 = spcd[:, :3]
tnorm = norm(t123, axis=1)
assert len(tnorm) == len(spcd[:, 2]), len(spcd[:, 2])
assert len(tnorm) == len(self.nid_map)
spcd_x_res = GuiResult(subcase_id, header='SPCDx', title='SPCDx',
location='node', scalar=forces[:, 0])
spcd_y_res = GuiResult(subcase_id, header='SPCDy', title='SPCDy',
location='node', scalar=forces[:, 1])
spcd_z_res = GuiResult(subcase_id, header='SPCDz', title='SPCDz',
location='node', scalar=forces[:, 2])
spcd_xyz_res = GuiResult(subcase_id, header='SPCD XYZ', title='SPCD XYZ',
location='node', scalar=tnorm)
cases[icase] = (spcd_x_res, (0, 'SPCDx'))
form0.append(('SPCDx', icase, []))
icase += 1
cases[icase] = (spcd_y_res, (0, 'SPCDy'))
form0.append(('SPCDy', icase, []))
icase += 1
cases[icase] = (spcd_z_res, (0, 'SPCDz'))
form0.append(('SPCDz', icase, []))
icase += 1
cases[icase] = (spcd_xyz_res, (0, 'SPCD XYZ'))
form0.append(('SPCD XYZ', icase, []))
icase += 1
if is_temperatures:
temperature_key, temperatures = temperature_data
assert len(temperatures) == len(self.nid_map)
temperature_res = GuiResult(
subcase_id, header=temperature_key, title=temperature_key,
location='node', scalar=temperatures)
cases[icase] = (temperature_res, (0, temperature_key))
form0.append((temperature_key, icase, []))
icase += 1
except KeyError:
stringio = StringIO()
traceback.print_exc(file=stringio)
sout = stringio.getvalue()
self.gui.log_error(sout)
print(sout)
return icase
def load_nastran_results(self, results_filename):
"""
Loads the Nastran results into the GUI
"""
model_name = 'main'
self.scalar_bar_actor.VisibilityOn()
self.scalar_bar_actor.Modified()
log = self.gui.log
if isinstance(results_filename, str):
print("trying to read...%s" % results_filename)
ext = os.path.splitext(results_filename)[1].lower()
if ext == '.op2':
op2_filename = results_filename
try:
mode = self.model.nastran_format
except AttributeError:
mode = None
model = OP2(log=log, mode=mode, debug=True)
model.IS_TESTING = False
if 0: # pragma: no cover
model._results.saved = set()
all_results = model.get_all_results()
for result in DESIRED_RESULTS:
if result in all_results:
model._results.saved.add(result)
model.read_op2(op2_filename, combine=False)
if not IS_TESTING or self.is_testing_flag:
log.info(model.get_op2_stats())
# print(model.get_op2_stats())
elif ext == '.nod':
self.gui.load_patran_nod(results_filename)
self.gui.cycle_results_explicit() # start at icase=0
return
elif ext == '.h5' and IS_H5PY:
model = OP2(log=log, debug=True)
hdf5_filename = results_filename
model.load_hdf5_filename(hdf5_filename, combine=False)
#elif ext == '.pch':
#raise NotImplementedError('*.pch is not implemented; filename=%r' % op2_filename)
#elif ext == '.f06':
#model = F06(log=log, debug=True)
#model.set_vectorization(True)
#model.read_f06(op2_filename)
else:
#print("error...")
msg = 'extension=%r is not supported; filename=%r' % (ext, op2_filename)
raise NotImplementedError(msg)
else:
model = op2_filename
op2_filename = op2_filename.filename
if self.save_data:
self.model_results = model
#print(model.print_results())
#self.isubcase_name_map[self.isubcase] = [Subtitle, Label]
# tansform displacements into global coordinates
try:
icd_transform = self.icd_transform
#transforms = self.transforms
except AttributeError:
log.error('Skipping displacment transformation')
else:
model.transform_displacements_to_global(
icd_transform, self.model.coords, xyz_cid0=self.xyz_cid0)
#if 0:
#cases = OrderedDict()
#self.isubcase_name_map = {}
#form = []
#icase = 0
#else:
cases = self.result_cases
form = self.get_form()
icase = len(cases)
# form = self.res_widget.get_form()
#subcase_ids = model.isubcase_name_map.keys()
#self.isubcase_name_map = model.isubcase_name_map
# self.isubcase_name_map = model.subcase_key
#print(self.isubcase_name_map)
for isubcase, values in model.isubcase_name_map.items():
if not isinstance(isubcase, integer_types):
print('isubcase type =', type(isubcase))
continue
if isinstance(values, str):
# eigenvalue???
label = values
log.debug('label_str = %r' % label)
elif isinstance(values, list):
log.debug(str(values))
subtitle, superelement_adaptivity, analysis_code, label = values
del analysis_code
else:
log.debug(str(values))
log.debug(str(type(values)))
raise RuntimeError(values)
if superelement_adaptivity:
subcase_name = '%s: %s' % (subtitle, superelement_adaptivity)
else:
subcase_name = subtitle
self.isubcase_name_map[isubcase] = [subcase_name, label]
del subtitle, label
# self.isubcase_name_map = {subcase_id : label for
# in model.isubcase_name_map.items()}
form = self._fill_op2_output(results_filename, cases, model, form, icase, log)
self.gui._finish_results_io2(model_name, form, cases)
#name = 'spike'
#eids = np.arange(10, 40)
#self.create_group_with_name(name, eids)
#self.post_group_by_name(name)
def _fill_op2_output(self, op2_filename, cases, model, form, icase, log):
"""
SOL 101 (Static)
----------------
Subcase 1
- DisplacementXYZ
- SPCForceX
- ...
- Stress
- oxx
- Strain
SOL 103 (modal)
---------------
Subcase 1
- mode 1; eigr=123.4
- EigenvectorXYZ
- Stress
- mode 2: eigr=156.3
- EigenvectorXYZ
- Stress
SOL 109 (Freq)
--------------
Subcase 1
- freq=123.4
- DisplacementXYZ
- Stress
SOL 105 (Buckling)
------------------
Subcase 1
- Preload
- DisplacementXYZ
- mode 1; eigr=123.4
- EigenvectorXYZ
- Stress
"""
keys = model.get_key_order()
assert keys is not None, keys
#print('keys_order =', keys)
disp_dict = defaultdict(list)
stress_dict = defaultdict(list)
strain_dict = defaultdict(list)
force_dict = defaultdict(list)
strain_energy_dict = defaultdict(list)
gpstress_dict = defaultdict(list)
header_dict = {}
keys_map = {}
key_itime = []
icase, form_optimization = fill_responses(cases, model, icase)
for key in keys:
unused_is_data, unused_is_static, unused_is_real, times = _get_times(model, key)
if times is None:
# we dynamically created the keys and created extra ones
continue
#assert times is not None # gen22x_modes
#print('--------------')
#print('key = %r' % str(key))
self.stress[key] = StressObject(model, key, self.element_ids, is_stress=True)
self.strain[key] = StressObject(model, key, self.element_ids, is_stress=False)
#header_dict[(key, 0)] = '; Static'
unused_formi = []
unused_form_time = []
ncases_old = icase
icase = self._fill_op2_oug_oqg(cases, model, key, icase,
disp_dict, header_dict, keys_map,
log)
icase = self._fill_grid_point_forces(cases, model, key, icase,
disp_dict, header_dict, keys_map)
# stress
icase = self._fill_op2_centroidal_stress(
cases, model, times, key, icase,
stress_dict, header_dict, keys_map)
# stress
icase = self._fill_op2_centroidal_strain(
cases, model, times, key, icase,
strain_dict, header_dict, keys_map)
# force
icase = self._fill_op2_centroidal_force(
cases, model, times, key, icase,
force_dict, header_dict, keys_map)
# strain energy
icase = self._fill_op2_centroidal_strain_energy(
cases, model, times, key, icase,
strain_energy_dict, header_dict, keys_map)
# force
icase = self._fill_op2_gpstress(
cases, model, times, key, icase,
gpstress_dict, header_dict, keys_map)
ncases = icase - ncases_old
#print('ncases=%s icase=%s' % (ncases, icase))
#assert ncases > 0, ncases
if ncases:
for itime, unused_dt in enumerate(times):
new_key = (key, itime)
key_itime.append(new_key)
# ----------------------------------------------------------------------
#print('Key,itime:')
#for key_itimei in key_itime:
#print(' %s' % str(key_itimei))
unused_form_out = []
form_resultsi = form_optimization
basename = os.path.basename(op2_filename).rstrip()
form_results = (basename + '-Results', None, form_optimization)
if len(key_itime) == 0:
#print('header_dict =', header_dict)
#print('key_itime =', key_itime)
if form_optimization:
form.append(form_results)
else:
log.error('No OP2 results were found')
return form
form = _build_sort1_table(
key_itime, keys_map, header_dict,
form, form_results, form_resultsi,
disp_dict, stress_dict, strain_dict, force_dict,
strain_energy_dict, gpstress_dict,
log)
return form
def clear_nastran(self):
"""cleans up variables specific to Nastran"""
self.eid_map = {}
self.nid_map = {}
self.eid_to_nid_map = {}
self.element_ids = None
self.node_ids = None
class NastranIO(NastranIO_):
"""Defines the GUI class for Nastran."""
def __init__(self):
super().__init__()
#def __init__(self, gui):
#super(NastranIO, self).__init__()
#self.gui = gui # make sure to comment out the property on line 124
#self.nid_release_map = {}
#self.stress = {}
#self.strain = {}
def _cleanup_nastran_tools_and_menu_items(self):
"""
hides the Nastran toolbar when loading another format
"""
self.nastran_tools_menu.setVisible(False)
#self.menu_help.menuAction().setVisible(True)
#self.menu_help2.menuAction().setVisible(False)
self.nastran_toolbar.setVisible(False)
self.actions['nastran'].setVisible(False)
def _create_nastran_tools_and_menu_items(self):
"""
creates the Nastran toolbar when loading a Nastran file
"""
tools = [
#('about_nastran', 'About Nastran GUI', 'tabout.png', 'CTRL+H',
#'About Nastran GUI and help on shortcuts', self.about_dialog),
#('about', 'About Orig GUI', 'tabout.png', 'CTRL+H',
#'About Nastran GUI and help on shortcuts', self.about_dialog),
]
#self.gui.menu_help2 = self.gui.menubar.addMenu('&HelpMenuNew')
#self.gui.menu_help.menuAction().setVisible(False)
if hasattr(self, 'nastran_toolbar'):
self.nastran_tools_menu.setVisible(True)
self.gui.nastran_toolbar.setVisible(True)
self.gui.actions['nastran'].setVisible(True)
else:
#self.menubar.addMenu('&File')
self.create_nastran_tools_menu(self.gui)
self.gui.nastran_toolbar = self.addToolBar('Nastran Toolbar')
self.gui.nastran_toolbar.setObjectName('nastran_toolbar')
#self.gui.nastran_toolbar.setStatusTip("Show/Hide nastran toolbar")
self.gui.actions['nastran'] = self.nastran_toolbar.toggleViewAction()
self.gui.actions['nastran'].setStatusTip("Show/Hide application toolbar")
#self.gui.file.menuAction().setVisible(False)
#self.gui.menu_help.
#self.gui.actions['about'].Disable()
menu_items = {}
menu_items['nastran_toolbar'] = (self.gui.nastran_toolbar,
('caero', 'caero_subpanels', 'conm2'))
#menu_items = [
#(self.menu_help2, ('about_nastran',)),
#(self.gui.nastran_toolbar, ('caero', 'caero_subpanels', 'conm2'))
#(self.menu_window, tuple(menu_window)),
#(self.menu_help, ('load_geometry', 'load_results', 'script', '', 'exit')),
#(self.menu_help2, ('load_geometry', 'load_results', 'script', '', 'exit')),
return tools, menu_items
def create_nastran_tools_menu(self, gui):
#if 'dev' not in __version__:
#return
if not hasattr(self, 'shear_moment_torque_obj'):
return
tools = [
#('script', 'Run Python Script...', 'python48.png', None, 'Runs pyNastranGUI in batch mode', self.on_run_script),
('shear_moment_torque', 'Shear, Moment, Torque...', 'python48.png', None,
'Creates a Shear, Moment, Torque Plot', self.shear_moment_torque_obj.set_shear_moment_torque_menu),
('create_coord', 'Create Coordinate System...', 'coord.png', None, 'Creates a Coordinate System', self.on_create_coord),
]
items = (
'shear_moment_torque',
'create_coord',
)
nastran_tools_menu = gui.menubar.addMenu('Tools')
gui.nastran_tools_menu = nastran_tools_menu
menu_items = {
'nastran_tools' : (nastran_tools_menu, items),
}
icon_path = ''
gui._prepare_actions_helper(icon_path, tools, self.actions, checkables=None)
gui._populate_menu(menu_items, actions=self.actions)
def toggle_caero_panels(self):
"""
Toggle the visibility of the CAERO panels. The visibility of the
sub panels or panels will be set according to the current
show_caero_sub_panels state.
"""
if not self.has_caero:
return
self.show_caero_actor = not self.show_caero_actor
names = ['caero', 'caero_subpanels', 'caero_control_surfaces']
geometry_properties = self.gui._get_geometry_properties_by_name(names)
if self.show_caero_actor:
try:
geometry_properties['caero_control_surfaces'].is_visible = True
except KeyError:
pass
if self.show_caero_sub_panels:
geometry_properties['caero_subpanels'].is_visible = True
else:
geometry_properties['caero'].is_visible = True
else:
try:
geometry_properties['caero_control_surfaces'].is_visible = False
except KeyError:
pass
geometry_properties['caero'].is_visible = False
geometry_properties['caero_subpanels'].is_visible = False
self.gui.on_update_geometry_properties_override_dialog(geometry_properties)
def jsonify(comment_lower: str) -> str:
"""pyNastran: SPOINT={'id':10, 'xyz':[10.,10.,10.]}"""
sline = comment_lower.split('=')
rhs = sline[1].rstrip()
return rhs.replace("'", '"').replace('}', ',}').replace(',,}', ',}')
def _build_sort1_table(key_itime, keys_map, header_dict,
form, form_results, form_resultsi,
disp_dict, stress_dict, strain_dict, force_dict,
strain_energy_dict, gpstress_dict, log):
"""combines the SORT1-based OP2 results into a SORT1 table"""
is_results = False
form_resultsi_subcase = []
#for key, value in header_dict.items():
#print(key, value)
# (isubcase, analysis_code, sort_method,
# count, ogs, superelement_adaptivity_index) = key
key_itime0 = key_itime[0]
key0 = key_itime0[0]
# (isubcase, analysis_code, sort_method,
# count, ogs, superelement_adaptivity_index, pval_step) = key
subcase_id_old = key0[0]
count_old = key0[3]
ogs_old = key0[4]
subtitle_old = key0[5]
subtitle_old, label_old, superelement_adaptivity_index_old, unused_pval_step_old = keys_map[key0]
del label_old
del superelement_adaptivity_index_old
# now that we have the data built, we put it in the form
# in sorted order
#
# TODO: consider pval_step
for key, itime in key_itime:
# (isubcase, analysis_code, sort_method,
# count, ogs, superelement_adaptivity_index, pval_step) = key
#print('key =', key)
subcase_id = key[0]
count = key[3]
ogs = key[4]
#print('*ogs =', ogs)
#subtitle = key[4]
try:
subtitle, unused_label, superelement_adaptivity_index, unused_pval_step = keys_map[key]
except Exception:
subcase_id = subcase_id_old
subtitle = subtitle_old + '?'
superelement_adaptivity_index = '?'
raise
#print('key =', key)
if subcase_id != subcase_id_old or subtitle != subtitle_old or ogs != ogs_old:
count_str = '' if count == 0 else ' ; opt_count=%s' % count_old
ogs_str = '' if ogs == 0 else '; OGS=%s' % ogs_old
subcase_str = 'Subcase %s; %s%s%s%s' % (
subcase_id_old, subtitle_old, superelement_adaptivity_index, count_str, ogs_str)
#print(subcase_str)
res = (
subcase_str.rstrip('; '),
None,
form_resultsi_subcase
)
form_resultsi.append(res)
form_resultsi_subcase = []
subcase_id_old = subcase_id
subtitle_old = subtitle
count_old = count
ogs_old = ogs
try:
header = header_dict[(key, itime)]
except KeyError: # this hits for strain energy
msg = 'Missing (key, itime) in header_dict\n'
msg += ' key=%s\n' % str(key)
(subcase, analysis_code, sort_method,
count, ogs, superelement_adaptivity_index, pval_step) = key
msg += f' subcase={subcase}\n'
msg += f' analysis_code={analysis_code}\n'
msg += f' sort_method={sort_method}\n'
msg += f' count={count}\n'
msg += f' ogs={ogs}\n'
msg += f' superelement_adaptivity_index={superelement_adaptivity_index!r}\n'
msg += f' pval_step={pval_step!r}\n'
msg += ' itime=%s\n' % itime
msg += ' %s\n' % str((key, itime))
msg += 'Possible (key, time):\n'
for keyi in header_dict:
msg += ' %s\n' % str(keyi)
#print(msg.rstrip())
#print('expected = (%s, %r)\n' % (str(key), itime))
log.error(msg.rstrip() + '\n')
#self.log.error('expected = (%s, %r)\n' % (str(key), itime))
continue
#raise KeyError(msg)
try:
header = header.strip()
except Exception:
print('header = %r' % header)
raise
form_outi = []
form_out = (header, None, form_outi)
disp_formi = disp_dict[(key, itime)]
stress_formi = stress_dict[(key, itime)]
strain_formi = strain_dict[(key, itime)]
force_formi = force_dict[(key, itime)]
strain_energy_formi = strain_energy_dict[(key, itime)]
gpstress_formi = gpstress_dict[(key, itime)]
if disp_formi:
form_outi += disp_formi
#form_outi.append(('Disp', None, disp_formi))
if stress_formi:
form_outi.append(('Stress', None, stress_formi))
is_results = True
if strain_formi:
form_outi.append(('Strain', None, strain_formi))
is_results = True
if force_formi:
form_outi.append(('Force', None, force_formi))
is_results = True
if strain_energy_formi:
form_outi.append(('Strain Energy', None, strain_energy_formi))
is_results = True
if gpstress_formi:
form_outi.append(('Grid Point Stresses', None, gpstress_formi))
is_results = True
if form_outi:
is_results = True
form_resultsi_subcase.append(form_out)
#break
#print("subcase_id = ", subcase_id)
if subcase_id:
count_str = '' if count == 0 else ' ; opt_count=%s' % count_old
ogs_str = '' if ogs == 0 else '; OGS=%s' % ogs_old
subcase_str = 'Subcase %s; %s%s%s' % (subcase_id, subtitle, count_str, ogs_str)
#print('*', subcase_str)
res = (
subcase_str.strip('; '),
None,
form_resultsi_subcase
)
form_resultsi.append(res)
assert len(form_out) > 0, form_out
form_resultsi_subcase = []
if is_results:
form.append(form_results)
assert len(form_out) > 0, form_out
#print('formi =', formi)
#print('form_out =', form_out)
#print('form_resultsi =', form_resultsi)
#print('form_results =', form_results)
#print(form)
#if len(formi):
#form.append(form0)
#print(form)
#aa
#print('form', form)
#print('form_results =', form_results)
return form
def _build_normals_quality(settings: Settings,
model: BDF, eid_map, nelements: int, cases, form0, icase: int,
xyz_cid0,
material_coord, material_theta,
min_interior_angle, max_interior_angle, dideal_theta,
area, max_skew_angle, taper_ratio,
max_warp_angle, area_ratio, min_edge_length, max_aspect_ratio,
make_offset_normals_dim=True,
make_xyz=False, make_nnodes_result=False) -> Tuple[int, Any]:
"""
Creates some nastran specific results
creates:
- ElementDim
- Normal X/Y/Z
- NNodes/Elem
- Area
- Min/Max Interior Angle
- Skew Angle
- Taper Ratio
- Area Ratio
- MaterialCoord
- MaterialTheta
"""
colormap = settings.colormap
#ielement = 0
#nelements = self.element_ids.shape[0]
normals = None
offset = None
xoffset = None
yoffset = None
zoffset = None
element_dim = None
nnodes_array = None
if make_offset_normals_dim:
out = build_offset_normals_dims(model, eid_map, nelements)
normals, offset, xoffset, yoffset, zoffset, element_dim, nnodes_array = out
# if not a flat plate
#if min(nxs) == max(nxs) and min(nxs) != 0.0:
#is_element_dim = element_dim is not None and np.max(element_dim) != np.min(element_dim)
is_element_dim = element_dim is not None
if is_element_dim and isfinite_and_greater_than(element_dim, -1):
eid_dim_res = GuiResult(0, header='ElementDim', title='ElementDim',
location='centroid', scalar=element_dim, mask_value=-1)
cases[icase] = (eid_dim_res, (0, 'ElementDim'))
#is_shell = normals is not None and np.abs(normals).max() > 0. # NaN -> 2.0
is_shell = normals is not None and isfinite(normals) # using NaNs
# we have to add the 2nd/3rd lines to make sure bars are getting into this check
is_solid = (
isfinite_and_nonzero(min_interior_angle) and
isfinite_and_nonzero(max_interior_angle)
)
#print('is_shell=%s is_solid=%s' % (is_shell, is_solid))
if is_shell:
if make_offset_normals_dim:
nx_res = GuiResult(
0, header='NormalX', title='NormalX',
location='centroid', scalar=normals[:, 0], data_format='%.2f')
ny_res = GuiResult(
0, header='NormalY', title='NormalY',
location='centroid', scalar=normals[:, 1], data_format='%.2f')
nz_res = GuiResult(
0, header='NormalZ', title='NormalZ',
location='centroid', scalar=normals[:, 2], data_format='%.2f')
nxyz_res = NormalResult(0, 'Normals', 'Normals',
nlabels=2, labelsize=5, ncolors=2,
colormap=colormap, data_format='%.1f',
uname='NormalResult')
if settings.nastran_is_element_quality:
area_res = GuiResult(0, header='Area', title='Area',
location='centroid', scalar=area)
min_edge_length_res = GuiResult(
0, header='Min Edge Length', title='Min Edge Length',
location='centroid', scalar=min_edge_length)
min_theta_res = GuiResult(
0, header='Min Interior Angle', title='Min Interior Angle',
location='centroid', scalar=np.degrees(min_interior_angle))
max_theta_res = GuiResult(
0, header='Max Interior Angle', title='Max Interior Angle',
location='centroid', scalar=np.degrees(max_interior_angle))
dideal_theta_res = GuiResult(
0, header='Delta Ideal Angle', title='Delta Ideal Angle',
location='centroid', scalar=np.degrees(dideal_theta))
skew = np.degrees(max_skew_angle)
skew_res = GuiResult(
0, header='Max Skew Angle', title='MaxSkewAngle',
location='centroid', scalar=skew)
aspect_res = GuiResult(
0, header='Aspect Ratio', title='AspectRatio',
location='centroid', scalar=max_aspect_ratio)
form_checks = []
form0.append(('Element Checks', None, form_checks))
if is_element_dim:
form_checks.append(('ElementDim', icase, []))
if make_offset_normals_dim and make_nnodes_result:
nnodes_res = GuiResult(
0, header='NNodes/Elem', title='NNodes/Elem',
location='centroid', scalar=nnodes_array)
form_checks.append(('NNodes', icase + 1, []))
cases[icase + 1] = (nnodes_res, (0, 'NNodes'))
icase += 1
if make_offset_normals_dim:
# 0 is element_dim
cases[icase + 1] = (nx_res, (0, 'NormalX'))
cases[icase + 2] = (ny_res, (0, 'NormalY'))
cases[icase + 3] = (nz_res, (0, 'NormalZ'))
cases[icase + 4] = (nxyz_res, (0, 'Normal'))
form_checks.append(('NormalX', icase + 1, []))
form_checks.append(('NormalY', icase + 2, []))
form_checks.append(('NormalZ', icase + 3, []))
form_checks.append(('Normal', icase + 4, []))
icase += 5
if settings.nastran_is_element_quality:
cases[icase] = (area_res, (0, 'Area'))
cases[icase + 1] = (min_edge_length_res, (0, 'Min Edge Length'))
cases[icase + 2] = (min_theta_res, (0, 'Min Interior Angle'))
cases[icase + 3] = (max_theta_res, (0, 'Max Interior Angle'))
cases[icase + 4] = (dideal_theta_res, (0, 'Delta Ideal Angle'))
cases[icase + 5] = (skew_res, (0, 'Max Skew Angle'))
cases[icase + 6] = (aspect_res, (0, 'Aspect Ratio'))
form_checks.append(('Area', icase, []))
form_checks.append(('Min Edge Length', icase + 1, []))
form_checks.append(('Min Interior Angle', icase + 2, []))
form_checks.append(('Max Interior Angle', icase + 3, []))
form_checks.append(('Delta Ideal Angle', icase + 4, []))
form_checks.append(('Max Skew Angle', icase + 5, []))
form_checks.append(('Aspect Ratio', icase + 6, []))
icase += 7
if np.any(np.isfinite(area_ratio)) and np.nanmax(area_ratio) > 1.:
arearatio_res = GuiResult(
0, header='Area Ratio', title='Area Ratio',
location='centroid', scalar=area_ratio)
cases[icase] = (arearatio_res, (0, 'Area Ratio'))
form_checks.append(('Area Ratio', icase, []))
icase += 1
if np.any(np.isfinite(taper_ratio)) and np.nanmax(taper_ratio) > 1.:
taperratio_res = GuiResult(
0, header='Taper Ratio', title='Taper Ratio',
location='centroid', scalar=taper_ratio)
cases[icase] = (taperratio_res, (0, 'Taper Ratio'))
form_checks.append(('Taper Ratio', icase, []))
icase += 1
if isfinite_and_nonzero(max_warp_angle):
warp_res = GuiResult(
0, header='Max Warp Angle', title='MaxWarpAngle',
location='centroid', scalar=np.degrees(max_warp_angle))
cases[icase] = (warp_res, (0, 'Max Warp Angle'))
form_checks.append(('Max Warp Angle', icase, []))
icase += 1
#if (np.abs(xoffset).max() > 0.0 or np.abs(yoffset).max() > 0.0 or
#np.abs(zoffset).max() > 0.0):
#if isfinite(max_warp_angle):
# offsets
if make_offset_normals_dim and np.any(np.isfinite(xoffset)):
offset_res = GuiResult(
0, header='Offset', title='Offset',
location='centroid', scalar=offset, data_format='%g')
offset_x_res = GuiResult(
0, header='OffsetX', title='OffsetX',
location='centroid', scalar=xoffset, data_format='%g')
offset_y_res = GuiResult(
0, header='OffsetY', title='OffsetY',
location='centroid', scalar=yoffset, data_format='%g')
offset_z_res = GuiResult(
0, header='OffsetZ', title='OffsetZ',
location='centroid', scalar=zoffset, data_format='%g')
cases[icase] = (offset_res, (0, 'Offset'))
cases[icase + 1] = (offset_x_res, (0, 'OffsetX'))
cases[icase + 2] = (offset_y_res, (0, 'OffsetY'))
cases[icase + 3] = (offset_z_res, (0, 'OffsetZ'))
form_checks.append(('Offset', icase, []))
form_checks.append(('OffsetX', icase + 1, []))
form_checks.append(('OffsetY', icase + 2, []))
form_checks.append(('OffsetZ', icase + 3, []))
icase += 4
if 0: # pragma: no cover
xyz_offset = np.vstack([xoffset, yoffset, zoffset]).T
titles = ['Offset XYZ']
headers = titles
assert xyz_offset.shape[1] == 3, xyz_offset.shape
assert xyz_offset.shape[0] == len(offset)
scales = [1.0]
subcase_id = 0
#methods = ['magnitude', 'x', 'y', 'z']
offset_xyz_res = ElementalTableResults(
subcase_id, titles, headers, xyz_offset, offset, scales,
#methods,
)
offset_xyz_res.save_defaults()
cases[icase] = (offset_z_res, (0, 'OffsetZ'))
form_checks.append(('OffsetXYZ', icase, []))
icase += 1
if make_xyz or IS_TESTING:
x_res = GuiResult(
0, header='X', title='X',
location='node', scalar=xyz_cid0[:, 0], data_format='%g')
y_res = GuiResult(
0, header='Y', title='Y',
location='node', scalar=xyz_cid0[:, 1], data_format='%g')
z_res = GuiResult(
0, header='Z', title='Z',
location='node', scalar=xyz_cid0[:, 2], data_format='%g')
cases[icase] = (x_res, (0, 'X'))
cases[icase + 1] = (y_res, (0, 'Y'))
cases[icase + 2] = (z_res, (0, 'Z'))
form_checks.append(('X', icase + 0, []))
form_checks.append(('Y', icase + 1, []))
form_checks.append(('Z', icase + 2, []))
icase += 3
elif is_solid:
# only solid elements
form_checks = []
form0.append(('Element Checks', None, form_checks))
if is_element_dim:
form_checks.append(('ElementDim', icase, []))
icase += 1
if settings.nastran_is_element_quality:
min_edge_length_res = GuiResult(
0, header='Min Edge Length', title='Min Edge Length',
location='centroid', scalar=min_edge_length)
min_theta_res = GuiResult(
0, header='Min Interior Angle', title='Min Interior Angle',
location='centroid', scalar=np.degrees(min_interior_angle))
max_theta_res = GuiResult(
0, header='Max Interior Angle', title='Max Interior Angle',
location='centroid', scalar=np.degrees(max_interior_angle))
#skew = 90. - np.degrees(max_skew_angle)
#skew_res = GuiResult(0, header='Max Skew Angle', title='MaxSkewAngle',
#location='centroid', scalar=skew)
form_checks.append(('Min Edge Length', icase, []))
form_checks.append(('Min Interior Angle', icase + 1, []))
form_checks.append(('Max Interior Angle', icase + 2, []))
#form_checks.append(('Max Skew Angle', icase + 3, []))
cases[icase] = (min_edge_length_res, (0, 'Min Edge Length'))
cases[icase + 1] = (min_theta_res, (0, 'Min Interior Angle'))
cases[icase + 2] = (max_theta_res, (0, 'Max Interior Angle'))
#cases[icase + 3] = (skew_res, (0, 'Max Skew Angle'))
icase += 3
else:
form0.append(('ElementDim', icase, []))
icase += 1
if isgreater_int(material_coord, -1):
material_coord_res = GuiResult(
0, header='MaterialCoord', title='MaterialCoord',
location='centroid',
scalar=material_coord, mask_value=-1, data_format='%i')
cases[icase] = (material_coord_res, (0, 'MaterialCoord'))
form0.append(('MaterialCoord', icase, []))
icase += 1
if isfinite(material_theta):
material_theta_res = GuiResult(
0, header='MaterialTheta', title='MaterialTheta',
location='centroid',
scalar=material_theta, data_format='%.3f')
cases[icase] = (material_theta_res, (0, 'MaterialTheta'))
form0.append(('MaterialTheta', icase, []))
icase += 1
return icase, normals
def _build_materials(model, pcomp, pshell, is_pshell_pcomp,
cases, form0, icase):
"""
creates:
- Thickness
- nPlies (composite only)
- Material ID
- E_11
- E_22
- E_33
- Is Isotropic?
"""
for i, pshell_pcompi in enumerate([pshell, pcomp]):
mids = pshell_pcompi['mids']
thickness = pshell_pcompi['thickness']
if 'nplies' in pshell_pcompi:
nplies = pshell_pcompi['nplies']
if nplies is not None and nplies.max() > 0:
nplies_res = GuiResult(0, header='Number of Plies', title='nPlies',
location='centroid', scalar=nplies, mask_value=0)
cases[icase] = (nplies_res, (0, 'Number of Plies'))
form0.append(('Number of Plies', icase, []))
icase += 1
if mids is None:
continue
nlayers = mids.shape[1]
for ilayer in range(nlayers):
if len(thickness.shape) == 2:
thicknessi = thickness[:, ilayer]
else:
## TODO: I think this is used by a non-PSHELL/PCOMP case
#print('B-shape...i=%s ilayer=%s' % (i, ilayer))
thicknessi = thickness
form_layer = []
#if i == 1 and ilayer == 0:
#print('thicknessi = ', thicknessi)
if isfinite_and_nonzero(thicknessi):
if i == 1 and ilayer == 0:
tword = 'Total Thickness' # thickness is nan
elif i == 0 and ilayer == 1:
tword = '12/t^3'
elif i == 0 and ilayer == 2:
tword = 'ts/t'
elif i == 0 and ilayer == 3:
tword = 'mid4'
else:
tword = 'Thickness'
if tword != 'mid4':
t_res = GuiResult(0, header=tword, title=tword,
location='centroid', scalar=thicknessi)
cases[icase] = (t_res, (0, tword))
form_layer.append((tword, icase, []))
icase += 1
midsi = mids[:, ilayer]
if midsi.max() == 0:
pass
#if not(i == 1 and ilayer == 0):
#print('cant find anything in ilayer=%s' % ilayer)
#continue
else:
imids_masked = midsi == 0
has_mat8, has_mat11, e11, e22, e33 = get_material_arrays(model, midsi)
mid_res = GuiResult(0, header='MaterialID', title='MaterialID',
location='centroid', scalar=midsi, mask_value=0)
cases[icase] = (mid_res, (0, 'MaterialID'))
form_layer.append(('MaterialID', icase, []))
icase += 1
if has_mat11: # also implicitly has_mat8
is_orthotropic = not (np.array_equal(e11, e22) and np.array_equal(e11, e33))
elif has_mat8:
is_orthotropic = not np.array_equal(e11, e22)
else:
is_orthotropic = False
# np.nanmax(e11) > 0. can fail if e11=[nan, nan]
e112 = np.fmax.reduce(e11)
is_e11 = True
if np.isnan(e112):
is_e11 = False
#
if is_orthotropic:
e11_res = GuiResult(0, header='E_11', title='E_11',
location='centroid', scalar=e11, data_format='%.3e')
e22_res = GuiResult(0, header='E_22', title='E_22',
location='centroid', scalar=e22, data_format='%.3e')
cases[icase] = (e11_res, (0, 'E_11'))
cases[icase + 1] = (e22_res, (0, 'E_22'))
form_layer.append(('E_11', icase, []))
form_layer.append(('E_22', icase + 1, []))
icase += 2
is_isotropic = np.zeros(len(e11), dtype='int8')
is_isotropic[imids_masked] = -1
if has_mat11:
is_isotropic[(e11 == e22) | (e11 == e33)] = 1
e33_res = GuiResult(0, header='E_33', title='E_33',
location='centroid', scalar=e33, data_format='%.3e')
cases[icase] = (e33_res, (0, 'E_33'))
form_layer.append(('E_33', icase, []))
icase += 1
else:
is_isotropic[e11 == e22] = 1
iso_res = GuiResult(
0, header='IsIsotropic?', title='IsIsotropic?',
location='centroid', scalar=is_isotropic, data_format='%i',
mask_value=-1)
cases[icase] = (iso_res, (0, 'Is Isotropic?'))
form_layer.append(('Is Isotropic?', icase, []))
icase += 1
elif is_e11:
# isotropic
assert np.nanmax(e11) > 0, np.nanmax(e11)
e11_res = GuiResult(0, header='E', title='E',
location='centroid', scalar=e11, data_format='%.3e')
cases[icase] = (e11_res, (0, 'E'))
form_layer.append(('E', icase, []))
icase += 1
#print('form_layer =', form_layer)
if form_layer:
if nlayers == 1:
form0 += form_layer
else:
word = get_nastran_gui_layer_word(i, ilayer, is_pshell_pcomp)
form0.append((word, None, form_layer))
return icase
def _build_optimization(model: BDF, pids: np.ndarray, upids: np.ndarray, nelements: int,
cases, form0, icase: int) -> int:
"""
Creates the optimization visualization. Supports:
- DVPREL1/2 shell thickness:
- DV Region
- DVPREL Init - t
- DVPREL Min - t
- DVPREL Max - t
"""
if upids is None:
return icase
if len(model.properties) and len(model.dvprels):
# len(model.dvprels) + len(model.dvcrels) + len(model.dvmrels) + len(model.desvars)
#dvmrel_init = np.zeros(nelements, dtype='int32')
#dvgrel_init = np.zeros(nelements, dtype='int32')
out_dict = model._get_dvprel_ndarrays(nelements, pids)
optimization_cases = []
for key, dvprel_data in out_dict.items():
design_region, dvprel_init, dvprel_min, dvprel_max = dvprel_data
if np.nanmax(design_region) == 0:
continue
region_res = GuiResult(
0, header='DV Region', title='DV Region',
location='centroid', scalar=design_region, mask_value=0)
t_init_res = GuiResult(
0, header='DVPREL Init - %s' % key, title='DVPREL Init - %s' % key,
location='centroid', scalar=dvprel_init)
opt_cases = []
cases[icase] = (region_res, (0, 'DV Region'))
cases[icase + 1] = (t_init_res, (0, 'DVPREL Init - %s' % key))
opt_cases.append(('DV Region', icase, []))
opt_cases.append(('DVPREL Init - %s' % key, icase + 1, []))
icase += 2
if np.any(np.isfinite(dvprel_min)):
t_min_res = GuiResult(
0, header='DVPREL Min - %s' % key, title='DVPREL Min - %s' % key,
location='centroid', scalar=dvprel_min)
cases[icase] = (t_min_res, (0, 'DVPREL Min - %s' % key))
opt_cases.append(('DVPREL Min - %s' % key, icase, []))
icase += 1
if np.any(np.isfinite(dvprel_max)):
t_max_res = GuiResult(
0, header='DVPREL Max - %s' % key, title='DVPREL Max - %s' % key,
location='centroid', scalar=dvprel_max)
cases[icase] = (t_max_res, (0, 'DVPREL Max - %s' % key))
opt_cases.append(('DVPREL Max - %s' % key, icase, []))
icase += 1
optimization_cases.append((key, None, opt_cases))
if optimization_cases:
form0.append(('Optimization', None, optimization_cases))
return icase
def build_superelement_model(model: BDF, cid: int=0, fdtype: str='float32'):
models = {0 : model}
models.update(model.superelement_models)
#nmodels = len(models)
xyz_cid0 = {}
nid_cp_cd = {}
icd_transform = {}
#nid_map = {}
#inode = 0
for super_id, modeli in sorted(models.items()):
out = modeli.get_displacement_index_xyz_cp_cd(
fdtype=fdtype, idtype='int32', sort_ids=True)
icd_transformi, icp_transformi, xyz_cpi, nid_cp_cdi = out
icd_transform[super_id] = icd_transformi
xyz_cid0i = modeli.transform_xyzcp_to_xyz_cid(
xyz_cpi, nid_cp_cdi[:, 0], icp_transformi, cid=cid,
in_place=False)
if super_id in model.seloc and super_id: # in model.initial_superelement_models and 0:
# TODO: when should seloc get applied?
# during superelement creation or now?
# I'm going with superelement creation...
# I think we need to update the node locations for the superelements
# that exist before mirroring
seloc = model.seloc[super_id]
xyz_cid0i = seloc.transform(model, xyz_cid0i)
#print('model.spoints =', model.spoints)
#import json
#for spoint_id, spoint in model.spoints.items():
#if spoint.comment: # or spoint._comment?
#print('SPOINT comment=%r _comment=%r' % (spoint.comment, spoint._comment))
#comment_lower = spoint.comment.lower()
#print('comment_lower = %r' % comment_lower)
## pyNastran: SPOINT={'id':10, 'xyz':[10.,10.,10.]}
#if 'pynastran' in comment_lower and 'spoint' in comment_lower:
#dict_str = jsonify(comment_lower)
#print('dict_str = %r' % dict_str)
#dicti = json.loads(dict_str)
#print(dicti)
#for epoint_id, epoint in model.epoints.items():
#if epoints.comment:
#print('EPOINT comment=%r _comment=%r' % (spoint.comment, spoint._comment))
#sys.stdout.flush()
#------------------------------
nid_cp_cd[super_id] = nid_cp_cdi
xyz_cid0[super_id] = xyz_cid0i
return xyz_cid0, nid_cp_cd, icd_transform
def _prepare_superelement_model(model: BDF, log: SimpleLogger) -> None:
bdf_filename_out = 'spike.bdf'
unused_model = superelement_renumber(
model, bdf_filename_out=bdf_filename_out,
size=8, is_double=False, starting_id_dict=None,
cards_to_skip=None, log=None, debug=False)
_model2 = BDF(debug=None, log=log, mode='msc')
_model2.read_bdf(bdf_filename=bdf_filename_out,
validate=False, xref=False, punch=False, read_includes=True,
save_file_structure=False, encoding=model._encoding)
model.uncross_reference()
model.nodes = _model2.nodes
model.elements = _model2.elements
model.properties = _model2.properties
model.materials = _model2.materials
model.loads = _model2.loads
model.seloc = _model2.seloc
model.superelement_models = _model2.superelement_models
#model.write_bdf('spike2.bdf')
#os.remove('spike2.bdf')
xref_nodes = True
xref_loads = True
model.safe_cross_reference(
xref=True,
xref_nodes=xref_nodes,
xref_elements=True,
xref_nodes_with_elements=False,
xref_properties=True,
xref_masses=True,
xref_materials=False,
xref_loads=xref_loads,
xref_constraints=False,
xref_optimization=False,
xref_aero=True,
xref_sets=False,
create_superelement_geometry=False,
)
#from pyNastran.bdf.mesh_utils.bdf_renumber import (
#bdf_renumber, get_starting_ids_dict_from_mapper)
#starting_id_dict = { # todo: hardcoded
#'nid' : unids.max(),
#'eid' : 100000,
#'cid' : 100000,
#'pid' : 100000,
#}
#for seid, sebulk in sorted(model.sebulk.items()):
#if sebulk.Type == 'MIRROR':
#print('renumbering mirror seid=%s -> %s' % (sebulk.rseid, seid))
#superelement = model.superelement_models[seid]
#bdf_filename_out = 'super_%i.bdf' % seid
#_model, mapper = bdf_renumber(
#superelement, bdf_filename_out, size=8, is_double=False,
#starting_id_dict=starting_id_dict, round_ids=False,
#cards_to_skip=None, log=log, debug=False)
#starting_id_dict = get_starting_ids_dict_from_mapper(
#_model, mapper)
#superelement2 = BDF(debug=True, log=log, mode='msc')
#superelement2.read_bdf(bdf_filename_out)
#model.superelement_models[seid] = superelement2
##os.remove(bdf_filename_out)
#else: # pragma: no cover
#raise NotImplementedError(sebulk)
#model.write_bdf('spike.bdf')
def get_caero_control_surface_grid(grid,
box_id_to_caero_element_map,
caero_points,
boxes_to_show: List[int],
log):
j = 0
areas = []
centroids = []
all_points = []
plot_elements = []
assert isinstance(boxes_to_show, list), type(boxes_to_show)
vtk_type = 9 # vtkQuad
for box_id in boxes_to_show:
elementi = box_id_to_caero_element_map[box_id]
pointsi = caero_points[elementi]
p1, p2, p3, p4 = pointsi
area = np.linalg.norm(np.cross(p3 - p1, p4 - p2)) / 2.
if area == 0.0:
log.warning(f'box_id={box_id:d} has 0 area')
continue
#centroid = pointsi.sum(axis=0) / 4.
centroid = (p1 + p2 + p3 + p4) / 4.
#assert len(centroid) == 3, centroid
elem = vtkQuad()
point_ids = elem.GetPointIds()
point_ids.SetId(0, j)
point_ids.SetId(1, j + 1)
point_ids.SetId(2, j + 2)
point_ids.SetId(3, j + 3)
grid.InsertNextCell(vtk_type, point_ids)
plot_elements.append(j + elementi)
all_points.append(pointsi)
centroids.append(centroid)
areas.append(area)
j += 4
elements = np.asarray(plot_elements, dtype='int32')
return all_points, elements, centroids, areas
def _set_nid_to_pid_map(nid_to_pid_map: Dict[int, List[int]],
pid: int,
node_ids: List[int]) -> None:
for nid in node_ids:
nid_to_pid_map[nid].append(pid)
def _set_nid_to_pid_map_or_blank(nid_to_pid_map: Dict[int, List[int]],
pid: int,
node_ids: List[Optional[int]]) -> None:
for nid in node_ids:
if nid is not None:
nid_to_pid_map[nid].append(pid)
def _create_masses(gui: NastranIO, model: BDF, node_ids: np.ndarray,
create_secondary_actors=True) -> int:
"""
Count the masses.
Create an actor (with a follower function) if there are masses.
"""
assert node_ids is not None, node_ids
nconm2 = 0
if 'CONM2' in model.card_count:
nconm2 += model.card_count['CONM2']
if 'CMASS1' in model.card_count:
nconm2 += model.card_count['CMASS1']
if 'CMASS2' in model.card_count:
nconm2 += model.card_count['CMASS2']
# CMASS3, CMASS4 are applied to SPOINTs
if not create_secondary_actors or nconm2 == 0:
nconm2 = 0
return nconm2
def update_conm2s_function(unused_nid_map, unused_ugrid, points, nodes) -> None:
if not gui.settings.nastran_is_update_conm2:
return
j2 = 0
mass_grid = gui.alt_grids['conm2']
for unused_eid, element in sorted(model.masses.items()):
if isinstance(element, CONM2):
nid = element.nid
inid = np.searchsorted(node_ids, nid)
xyz_nid = nodes[inid, :]
centroid = element.offset(xyz_nid)
points.SetPoint(j2, *centroid)
elif element.type in ('CMASS1', 'CMASS2'):
n1, n2 = element.nodes
factor = 0.
if element.nodes[0] is not None:
inid = np.searchsorted(node_ids, n1)
p1 = nodes[inid, :]
factor += 1.
if element.nodes[1] is not None:
inid = np.searchsorted(node_ids, n2)
p2 = nodes[inid, :]
factor += 1.
centroid = (p1 + p2) / factor
points.SetPoint(j2, *centroid)
elem = vtk.vtkVertex()
point_ids = elem.GetPointIds()
point_ids.SetId(0, j2)
mass_grid.InsertNextCell(elem.GetCellType(), point_ids)
else:
continue
#self.gui.log_info("skipping %s" % element.type)
j2 += 1
return
gui.create_alternate_vtk_grid(
'conm2', color=ORANGE_FLOAT, line_width=5, opacity=1., point_size=4,
follower_function=update_conm2s_function,
representation='point')
return nconm2
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,640
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/bdf/cards/superelements.py
|
"""
All superelements are defined in this file. This includes:
* CSUPER
* CSUPEXT
* SEBNDRY
* SEBULK
* SECONCT
* SEELT
* SEEXCLD
* SELABEL
* SELOC
* SELOAD
* SEMPLN
* SENQSET
* SETREE
"""
from __future__ import annotations
from typing import TYPE_CHECKING
import numpy as np
from pyNastran.utils.numpy_utils import integer_types
from pyNastran.bdf.cards.base_card import (
BaseCard, expand_thru #, _node_ids
)
from pyNastran.bdf.field_writer_8 import print_card_8
from pyNastran.bdf.bdf_interface.assign_type import (
integer, integer_or_blank, integer_or_string,
string, string_or_blank, double_or_blank, integer_string_or_blank,
exact_string_or_blank,
)
if TYPE_CHECKING: # pragma: no cover
from pyNastran.bdf.bdf import BDF
class SEBNDRY(BaseCard):
"""
Superelement Boundary-Point Definition
Defines a list of grid points in a partitioned superelement for the
automatic boundary search between a specified superelement or between all
other superelements in the model.
+---------+-------+-------+-------+-------+-------+-------+-------+-------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+=======+=======+=======+=======+=======+=======+=======+=======+
| SEBNDRY | SEIDA | SEIDB | GIDA1 | GIDA2 | GIDA3 | GIDA4 | GIDA5 | GIDA6 |
+---------+-------+-------+-------+-------+-------+-------+-------+-------+
| | GIDA7 | GIDA8 | etc. | | | | | |
+---------+-------+-------+-------+-------+-------+-------+-------+-------+
| SEBNDRY | 400 | 4 | 10 | 20 | 30 | 40 | | |
+---------+-------+-------+-------+-------+-------+-------+-------+-------+
| SEBNDRY | 400 | ALL | 10 | 20 | 30 | THRU | 40 | |
+---------+-------+-------+-------+-------+-------+-------+-------+-------+
"""
type = 'SEBNDRY'
@classmethod
def _init_from_empty(cls):
seid_a = 1
seid_b = 2
ids = [10, 20, 30]
return SEBNDRY(seid_a, seid_b, ids, comment='')
def __init__(self, seid_a, seid_b, ids, comment=''):
BaseCard.__init__(self)
if comment:
self.comment = comment
self.seid_a = seid_a
self.seid_b = seid_b
#: Identifiers of grids points. (Integer > 0)
self.ids = expand_thru(ids)
self.ids_ref = None
@classmethod
def add_card(cls, card, comment=''):
seid_a = integer(card, 1, 'seid_a')
seid_b = integer_string_or_blank(card, 2, 'seid_b')
ids = []
i = 1
nfields = len(card)
for ifield in range(3, nfields):
idi = integer_string_or_blank(card, ifield, 'ID%i' % i)
if idi:
i += 1
ids.append(idi)
assert len(card) >= 3, f'len(SEBNDRY card) = {len(card):d}\ncard={card}'
return SEBNDRY(seid_a, seid_b, ids, comment=comment)
def cross_reference(self, model: BDF) -> None:
pass
def safe_cross_reference(self, model: BDF, xref_errors):
self.cross_reference(model)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
pass
def raw_fields(self):
list_fields = ['SEBNDRY', self.seid_a, self.seid_b] + self.ids
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class RELEASE(BaseCard):
"""
Superelement Boundary Grid Point Release
Defines degrees-of-freedom for superelement exterior grid points
that are not connected to the superelement.
+---------+------+------+------+------+------+------+------+------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+======+======+======+======+======+======+======+======+
| RELEASE | SEID | COMP | GID1 | GID2 | GID3 | GID4 | GID5 | GID6 |
+---------+------+------+------+------+------+------+------+------+
| | GID7 | GID8 | etc. | | | | | |
+---------+------+------+------+------+------+------+------+------+
| RELEASE | 400 | 4 | 10 | 20 | 30 | 40 | | |
+---------+------+------+------+------+------+------+------+------+
| RELEASE | 400 | 156 | 30 | THRU | 40 | | | |
+---------+------+------+------+------+------+------+------+------+
| RELEASE | 400 | 156 | ALL | | | | | |
+---------+------+------+------+------+------+------+------+------+
"""
type = 'RELEASE'
@classmethod
def _init_from_empty(cls):
seid = 1
comp = 1
nids = [10, 20, 30]
return SEBNDRY(seid, comp, nids, comment='')
def __init__(self, seid, comp, nids, comment=''):
BaseCard.__init__(self)
if comment:
self.comment = comment
self.seid = seid
self.comp = comp
#: Identifiers of grids points. (Integer > 0)
self.nids = expand_thru(nids)
self.nids_ref = None
@classmethod
def add_card(cls, card, comment=''):
seid = integer(card, 1, 'seid')
comp = integer(card, 2, 'comp')
nids = []
i = 3
nfields = len(card)
for ifield in range(3, nfields):
idi = integer_or_string(card, ifield, 'ID%i' % i)
nids.append(idi)
i += 1
assert len(card) >= 3, f'len(RELEASE card) = {len(card):d}\ncard={card}'
return RELEASE(seid, comp, nids, comment=comment)
def cross_reference(self, model: BDF) -> None:
pass
def safe_cross_reference(self, model: BDF, xref_errors):
self.cross_reference(model)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
pass
def raw_fields(self):
list_fields = ['RELEASE', self.seid, self.comp] + self.nids
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class SEELT(BaseCard):
"""
+-------+------+------+------+------+------+------+------+------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=======+======+======+======+======+======+======+======+======+
| SEELT | SEID | EID1 | EID2 | EID3 | EID4 | EID5 | EID6 | EID7 |
+-------+------+------+------+------+------+------+------+------+
"""
type = 'SEELT'
@classmethod
def _init_from_empty(cls):
seid = 10
eids = [1, 2, 3]
return SEELT(seid, eids, comment='')
def __init__(self, seid, eids, comment=''):
BaseCard.__init__(self)
if comment:
self.comment = comment
self.seid = seid
#: Identifiers of grids points. (Integer > 0)
self.eids = expand_thru(eids)
self.eids_ref = None
@classmethod
def add_card(cls, card, comment=''):
seid = integer(card, 1, 'seid')
eids = []
i = 1
nfields = len(card)
for ifield in range(2, nfields):
eid = integer_string_or_blank(card, ifield, 'eid_%i' % i)
if eid:
i += 1
eids.append(eid)
assert len(card) <= 9, f'len(SEELT card) = {len(card):d}\ncard={card}'
return SEELT(seid, eids, comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by SEELT seid=%s' % (self.seid)
eids_ref = self._xref_elements_plotels(model, self.eids, msg=msg)
self.eids_ref = eids_ref
def _xref_elements_plotels(self, model, eids, msg=''):
eids_ref = []
missing_eids = []
for eid in eids:
if eid in model.elements:
elem = model.elements[eid]
elif eid in model.plotels:
elem = model.plotels[eid]
else:
missing_eids.append(eid)
continue
eids_ref.append(elem)
if missing_eids:
raise KeyError('eids=%s not found%s' % (missing_eids, msg))
return eids_ref
def safe_cross_reference(self, model: BDF, xref_errors):
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
return self.cross_reference(model)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.eids_ref = None
def raw_fields(self):
list_fields = ['SEELT', self.seid] + self.eids ## TODO: xref
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class SELOAD(BaseCard):
"""
External Superelement Load Mapping to Residual
Maps loads from an external superelement to a specified load set for
the residual structure.
+--------+-------+------+-------+
| 1 | 2 | 3 | 4 |
+========+=======+======+=======+
| SELOAD | LIDS0 | SEID | LIDSE |
+--------+-------+------+-------+
| SELOAD | 10010 | 100 | 10 |
+--------+-------+------+-------+
"""
type = 'SELOC'
@classmethod
def _init_from_empty(cls):
lid_s0 = 1
seid = 2
lid_se = 3
return SELOAD(lid_s0, seid, lid_se, comment='')
def __init__(self, lid_s0, seid, lid_se, comment=''):
BaseCard.__init__(self)
if comment:
self.comment = comment
self.lid_s0 = lid_s0
self.seid = seid
self.lid_se = lid_se
@classmethod
def add_card(cls, card, comment=''):
lid_s0 = integer(card, 1, 'lid_s0')
seid = integer(card, 2, 'seid')
lid_se = integer(card, 3, 'lid_se')
assert len(card) <= 4, f'len(SELOAD card) = {len(card):d}\ncard={card}'
return SELOAD(lid_s0, seid, lid_se, comment=comment)
def cross_reference(self, model: BDF) -> None:
pass
def uncross_reference(self) -> None:
pass
def safe_cross_reference(self, model: BDF, xref_errors):
pass
def raw_fields(self):
list_fields = ['SELOAD', self.lid_s0, self.seid, self.lid_se]
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class SEEXCLD(BaseCard):
"""
Partitioned Superelement Exclusion
Defines grids that will be excluded during the attachment of a
partitioned superelement.
+---------+-------+-------+-------+-------+-------+-------+-------+-------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+=======+=======+=======+=======+=======+=======+=======+=======+
| SEEXCLD | SEIDA | SEIDB | GIDA1 | GIDA2 | GIDA3 | GIDA4 | GIDA5 | GIDA6 |
+---------+-------+-------+-------+-------+-------+-------+-------+-------+
| | GIDA7 | GIDA8 | etc. | | | | | |
+---------+-------+-------+-------+-------+-------+-------+-------+-------+
"""
type = 'SEEXCLD'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
seid_a = 1
seid_b = 2
nodes = [10, 20, 30]
return SEEXCLD(seid_a, seid_b, nodes, comment='')
def __init__(self, seid_a, seid_b, nodes, comment=''):
BaseCard.__init__(self)
if comment:
self.comment = comment
self.seid_a = seid_a
self.seid_b = seid_b
#: Identifiers of grids points. (Integer > 0)
self.nodes = expand_thru(nodes)
self.nodes_ref = None
@classmethod
def add_card(cls, card, comment=''):
seid_a = integer(card, 1, 'seid_a')
seid_b = integer_string_or_blank(card, 2, 'seid_b')
nodes = []
i = 1
nfields = len(card)
for ifield in range(3, nfields):
nid = integer_string_or_blank(card, ifield, 'nid_%i' % i)
if nid:
i += 1
nodes.append(nid)
assert len(card) >= 3, f'len(SEEXCLD card) = {len(card):d}\ncard={card}'
return SEEXCLD(seid_a, seid_b, nodes, comment=comment)
def cross_reference(self, model: BDF) -> None:
pass
def safe_cross_reference(self, model: BDF, xref_errors):
self.cross_reference(model)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
pass
@property
def node_ids(self):
return self.nodes
def raw_fields(self):
list_fields = ['SEEXCLD', self.seid_a, self.seid_b, ] + self.node_ids
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class SEMPLN(BaseCard):
"""
Superelement Mirror Plane
Defines a mirror plane for mirroring a partitioned superelement.
+--------+------+-------+----+----+------+
| 1 | 2 | 3 | 4 | 5 | 6 |
+========+======+=======+====+====+======+
| SEMPLN | SEID | PLANE | P1 | P2 | P3 |
+--------+------+-------+----+----+------+
| SEMPLN | 110 | PLANE | 12 | 45 | 1125 |
+--------+------+-------+----+----+------+
"""
type = 'SEMPLN'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
seid = 1
p1 = 2
p2 = 3
p3 = 4
return SEMPLN(seid, p1, p2, p3, comment='')
def __init__(self, seid, p1, p2, p3, comment=''):
BaseCard.__init__(self)
if comment:
self.comment = comment
self.seid = seid
self.nodes = [p1, p2, p3]
self.nodes_ref = None
@classmethod
def add_card(cls, card, comment=''):
seid = integer(card, 1, 'lid_s0')
plane = string(card, 2, 'seid')
p1 = integer(card, 3, 'p1')
p2 = integer(card, 4, 'p2')
p3 = integer(card, 5, 'p3')
assert plane == 'PLANE', plane
assert len(card) <= 6, f'len(SEMPLN card) = {len(card):d}\ncard={card}'
return SEMPLN(seid, p1, p2, p3, comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by SEMPLN seid=%s' % self.seid
self.nodes_ref = model.Nodes(self.nodes, msg=msg)
def safe_cross_reference(self, model: BDF, xref_errors):
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by SEMPLN seid=%s' % self.seid
self.nodes_ref = model.Nodes(self.nodes, msg=msg)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.nodes = self.node_ids
self.nodes_ref = None
@property
def node_ids(self):
return _node_ids(self, self.nodes, self.nodes_ref, allow_empty_nodes=False, msg='')
def raw_fields(self):
list_fields = ['SEMPLN', self.seid, 'PLANE'] + self.node_ids
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class SELABEL(BaseCard):
"""
Superelement Output Label
Defines a label or name to be printed in the superelement output headings.
+---------+------+---------------------------------+
| 1 | 2 | 3 |
+=========+======+=================================+
| SELABEL | SEID | LABEL |
+---------+------+---------------------------------+
| SELABEL | 10 | LEFT REAR FENDER, MODEL XYZ2000 |
+---------+------+---------------------------------+
"""
type = 'SELABEL'
@classmethod
def _init_from_empty(cls):
seid = 1
label = 'LEFT REAR FENDER'
return SELABEL(seid, label, comment='')
def __init__(self, seid, label, comment=''):
BaseCard.__init__(self)
if comment:
self.comment = comment
self.seid = seid
self.label = label
def validate(self):
assert isinstance(self.label, str), self.label
@classmethod
def add_card(cls, card, comment=''):
seid = integer(card, 1, 'seid')
label = ''.join([exact_string_or_blank(card, ifield, 'label', ' ')
for ifield in range(2, len(card))])
return SELABEL(seid, label, comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
pass
def safe_cross_reference(self, model: BDF, xref_errors):
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
pass
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
pass
def raw_fields(self):
return [self.write_card()]
def repr_fields(self):
return self.raw_fields()
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = 'SELABEL %-8s%s\n' % (self.seid, self.label)
return self.comment + card
class SELOC(BaseCard):
"""
Partitioned Superelement Location
Defines a partitioned superelement relocation by listing three non-colinear points in
the superelement and three corresponding points not belonging to the superelement.
+-------+------+-----+-----+-----+------+-----+-----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
+=======+======+=====+=====+=====+======+=====+=====+
| SELOC | SEID | PA1 | PA2 | PA3 | PB1 | PB2 | PB3 |
+-------+------+-----+-----+-----+------+-----+-----+
| SELOC | 110 | 10 | 100 | 111 | 1010 | 112 | 30 |
+-------+------+-----+-----+-----+------+-----+-----+
"""
type = 'SELOC'
_properties = ['nodes_0_ids', 'nodes_seid_ids']
@classmethod
def _init_from_empty(cls):
seid = 1
nodes_seid = [1, 2, 3]
nodes0 = 42
return SELOC(seid, nodes_seid, nodes0, comment='')
def __init__(self, seid, nodes_seid, nodes0, comment=''):
"""
Creates an SELOC card, which transforms the superelement SEID
from PA to PB. Basically, define two CORD1Rs.
Parameters
----------
seid : int
the superelement to transform
nodes_seid : List[int, int, int]
the nodes in the superelement than define the resulting coordinate system
nodes0 : List[int, int, int]
the nodes in the superelement than define the starting coordinate system
comment : str; default=''
a comment for the card
"""
BaseCard.__init__(self)
if comment:
self.comment = comment
self.seid = seid
#: Identifiers of grids points. (Integer > 0)
self.nodes_0 = expand_thru(nodes0, set_fields=False, sort_fields=False)
self.nodes_seid = expand_thru(nodes_seid, set_fields=False, sort_fields=False)
self.nodes_0_ref = None
self.nodes_seid_ref = None
@classmethod
def add_card(cls, card, comment=''):
seid = integer(card, 1, 'seid')
nodes0 = []
nodes_seid = []
i = 1
fields = card[9:]
nfields = len(fields)
assert nfields % 2 == 0, fields
for ifield in [2, 3, 4]:
nid_a = integer(card, ifield, 'nid_%i' % i)
nodes_seid.append(nid_a)
for ifield in [5, 6, 7]:
nid_b = integer(card, ifield, 'nid_%i' % i)
nodes0.append(nid_b)
assert len(card) <= 8, f'len(SELOC card) = {len(card):d}\ncard={card}'
return SELOC(seid, nodes_seid, nodes0, comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by SELOC seid=%s' % (self.seid)
#PA1-PA3 Three GRID entries in the PART that are to be used to move the PART. After moving,
#these points will be coincident with PB1-PB3.
#
# Three GRID entries
self.nodes_seid_ref = model.superelement_nodes(self.seid, self.nodes_seid, msg=msg)
#PB1-PB3 Three points (either GRID or POINT entries) defined in the Main Bulk Data Section
#that define where the PART should be.
#
# either GRID or POINT entries
self.nodes_0_ref = model.get_point_grids(self.nodes_0, msg=msg)
def safe_cross_reference(self, model: BDF, xref_errors):
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by SELOC seid=%s' % (self.seid)
self.nodes_seid_ref = model.superelement_nodes(self.seid, self.nodes_seid, msg=msg)
self.nodes_0_ref = model.get_point_grids(self.nodes_0, msg=msg)
@property
def nodes_seid_ids(self):
return _node_ids(self, self.nodes_seid, self.nodes_seid_ref,
allow_empty_nodes=False, msg='')
@property
def nodes_0_ids(self):
return _node_ids(self, self.nodes_0, self.nodes_0_ref, allow_empty_nodes=False, msg='')
def transform(self, model, xyz_cid0):
#if self.nodes_0_ref is None:
#self.cross_reference(model)
global_coord_ref = self.nodes_0_ref
seid_coord_ref = self.nodes_seid_ref
p123_0 = np.array([node.get_position() for node in global_coord_ref])
p123_seid = np.array([node.get_position() for node in seid_coord_ref])
#print('global_coord_ref:\n%s' % global_coord_ref)
#print('seid_coord_ref:\n%s' % seid_coord_ref)
#print('p123_seid:\n%s' % p123_seid)
#print('p123_0:\n%s' % p123_0)
cid = max(model.coords)
coord_seid = model.add_cord2r(cid+1, p123_seid[0, :], p123_seid[1, :], p123_seid[2, :])
coord_0 = model.add_cord2r(cid+2, p123_0[0, :], p123_0[1, :], p123_0[2, :])
coord_0.setup()
coord_seid.setup()
#print('beta_seid:\n%s' % coord_seid.beta())
#print('beta0:\n%s' % coord_0.beta())
#print(coord_seid.get_stats())
# TODO: coord xform:
# xform = coord0.T * coord_seid
# xform = coord_seid.T * coord0
xform = coord_0.beta().T @ coord_seid.beta()
#print('xform%i:\n%s' % (self.seid, xform))
dorigin = p123_0[0, :] - p123_seid[0, :] # at least, I'm sure on this...
del model.coords[cid + 1]
del model.coords[cid + 2]
# TODO: not 100% on this xform
xyz_cid0 = xyz_cid0.dot(xform.T) + dorigin
return xyz_cid0
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.nodes_seid = self.nodes_seid_ids
self.nodes_0 = self.nodes_0_ids
self.nodes_0_ref = None
self.nodes_seid_ref = None
def raw_fields(self):
list_fields = ['SELOC', self.seid] + list(self.nodes_seid_ids) + list(self.nodes_0_ids)
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class SETREE(BaseCard):
"""
Superelement Tree Definition (Alternate Form of DTI,SETREE)
Specifies superelement reduction order.
+--------+-------+-------+-------+-------+-------+-------+-------+-------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+========+=======+=======+=======+=======+=======+=======+=======+=======+
| SETREE | SEID | SEUP1 | SEUP2 | SEUP3 | SEUP4 | SEUP5 | SEUP6 | SEUP7 |
+--------+-------+-------+-------+-------+-------+-------+-------+-------+
| | SEUP8 | SEUP9 | etc. | | | | | |
+--------+-------+-------+-------+-------+-------+-------+-------+-------+
| SETREE | 400 | 10 | 20 | 30 | 40 | | | |
+--------+-------+-------+-------+-------+-------+-------+-------+-------+
"""
type = 'SETREE'
@classmethod
def _init_from_empty(cls):
seid = 10
superelements = [1, 2, 3]
return SETREE(seid, superelements, comment='')
def __init__(self, seid, superelements, comment=''):
BaseCard.__init__(self)
if comment:
self.comment = comment
self.seid = seid
#: Identifiers of grids points. (Integer > 0)
self.superelements = expand_thru(superelements)
self.superelements_ref = None
@classmethod
def add_card(cls, card, comment=''):
seid = integer(card, 1, 'seid')
superelements = []
i = 1
nfields = len(card)
for ifield in range(2, nfields):
superelement = integer_string_or_blank(card, ifield, 'ID%i' % i)
if superelement:
i += 1
superelements.append(superelement)
assert len(card) >= 3, f'len(SETREE card) = {len(card):d}\ncard={card}'
return SETREE(seid, superelements, comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by SETREE seid=%s' % self.seid
missing_superelements = []
superelements_ref = []
for super_id in self.superelements:
if super_id in model.superelement_models:
superelement = model.superelement_models[super_id]
else:
missing_superelements.append(super_id)
continue
superelements_ref.append(superelement)
if missing_superelements:
raise KeyError('cannot find superelements=%s%s' % (missing_superelements, msg))
self.superelements_ref = superelements_ref
def safe_cross_reference(self, model: BDF, xref_errors):
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
self.cross_reference(model)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
pass
def raw_fields(self):
list_fields = ['SETREE', self.seid] + list(self.superelements)
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class CSUPER(BaseCard):
"""
Secondary Superelement Connection
Defines the grid or scalar point connections for identical or mirror image
superelements or superelements from an external source. These are all known as
secondary superelements.
+--------+------+------+------+-----+-----+-----=-----+-----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+========+======+======+======+=====+=====+=====+=====+=====+
| CSUPER | SSlD | PSID | GP1 | GP2 | GP3 | GP4 | GP5 | GP6 |
+--------+------+------+------+-----+-----+-----=-----+-----+
| | GP7 | GP8 | etc. | | | | | |
+--------+------+------+------+-----+-----+-----=-----+-----+
"""
type = 'CSUPER'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
seid = 1
psid = 1
nodes = [1, 2]
return CSUPER(seid, psid, nodes, comment='')
def __init__(self, seid, psid, nodes, comment=''):
BaseCard.__init__(self)
if comment:
self.comment = comment
self.seid = seid
self.psid = psid
#: Identifiers of grids points. (Integer > 0)
self.nodes = expand_thru(nodes)
self.nodes_ref = None
@classmethod
def add_card(cls, card, comment=''):
seid = integer(card, 1, 'seid')
psid = integer_or_blank(card, 2, 'psid', 0)
nodes = []
i = 1
nfields = len(card)
for ifield in range(3, nfields):
nid = integer_string_or_blank(card, ifield, 'nid_%i' % i)
if nid:
i += 1
nodes.append(nid)
assert len(card) >= 3, f'len(CSUPER card) = {len(card):d}\ncard={card}'
return CSUPER(seid, psid, nodes, comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by CSUPER seid=%s' % self.seid
self.nodes_ref = model.Nodes(self.nodes, msg=msg)
def safe_cross_reference(self, model: BDF, xref_errors):
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by CSUPER seid=%s' % self.seid
self.nodes_ref = model.Nodes(self.nodes, msg=msg)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.nodes = self.node_ids
self.nodes_ref = None
@property
def node_ids(self):
return _node_ids(self, self.nodes, self.nodes_ref, allow_empty_nodes=False, msg='')
def raw_fields(self):
list_fields = ['CSUPER', self.seid, self.psid] + self.node_ids
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class CSUPEXT(BaseCard):
"""
Superelement Exterior Point Definition
Assigns exterior points to a superelement.
+---------+------+-----+-----+-----+-----+-----+-----+-----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+======+=====+=====+=====+=====+=====+=====+=====+
| CSUPEXT | SEID | GP1 | GP2 | GP3 | GP4 | GP5 | GP6 | GP7 |
+---------+------+-----+-----+-----+-----+-----+-----+-----+
"""
type = 'CSUPEXT'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
seid = 1
nodes = [1]
return CSUPEXT(seid, nodes, comment='')
def __init__(self, seid, nodes, comment=''):
BaseCard.__init__(self)
if comment:
self.comment = comment
self.seid = seid
#: Identifiers of grids points. (Integer > 0)
self.nodes = expand_thru(nodes)
self.nodes_ref = None
@classmethod
def add_card(cls, card, comment=''):
seid = integer(card, 1, 'seid')
nodes = []
i = 1
nfields = len(card)
for ifield in range(2, nfields):
nid = integer_string_or_blank(card, ifield, 'node_%i' % i)
if nid:
i += 1
nodes.append(nid)
assert len(card) <= 9, f'len(CSUPEXT card) = {len(card):d}\ncard={card}'
return CSUPEXT(seid, nodes, comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by CSUPEXT eid=%s' % self.seid
self.nodes_ref = model.Nodes(self.nodes, msg=msg)
def safe_cross_reference(self, model: BDF, xref_errors):
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by CSUPEXT eid=%s' % self.seid
self.nodes_ref = model.Nodes(self.nodes, msg=msg)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.nodes = self.node_ids
self.nodes_ref = None
@property
def node_ids(self):
return _node_ids(self, self.nodes, self.nodes_ref, allow_empty_nodes=False, msg='')
def raw_fields(self):
list_fields = ['CSUPEXT', self.seid] + self.node_ids
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class SEBULK(BaseCard):
"""
Partitional Superelement Connection
Defines superelement boundary search options and a repeated,
mirrored, or collector superelement.
+--------+------+--------+-------+--------+--------+-----+--------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
+========+======+========+=======+========+========+=====+========+
| SEBULK | SEID | TYPE | RSEID | METHOD | TOL | LOC | UNITNO |
+--------+------+--------+-------+--------+--------+-----+--------+
| SEBULK | 14 | REPEAT | 4 | AUTO | 1.0E-3 | | |
+--------+------+--------+-------+--------+--------+-----+--------+
"""
type = 'SEBULK'
@classmethod
def _init_from_empty(cls):
seid = 1
superelement_type = 'MIRROR'
rseid = 42
return SEBULK(seid, superelement_type, rseid,
method='AUTO', tol=1e-5, loc='YES', unitno=None, comment='')
def __init__(self, seid, superelement_type, rseid,
method='AUTO', tol=1e-5, loc='YES', unitno=None,
comment=''):
"""
Parameters
----------
seid : int
Partitioned superelement identification number.
Type : str
Superelement type.
{PRIMARY, REPEAT, MIRROR, COLLCTR, EXTERNAL, EXTOP2}
rseid : int; default=0
Identification number of the reference superelement,
used if TYPE = 'REPEAT' and 'MIRROR'.
method : str; default='AUTO'
Method to be used when searching for boundary grid points.
{AUTO, MANUAL}
tol : float; default=1e-5
Location tolerance to be used when searching for boundary grid points.
loc : str; default='YES'
Coincident location check option for manual conection option.
{YES, NO}
unitno : int / None
FORTRAN unit number for the OUTPUT2 file (applicable and
meaningful only when TYPE='EXTOP2').
"""
BaseCard.__init__(self)
if comment:
self.comment = comment
self.seid = seid
self.superelement_type = superelement_type
self.rseid = rseid
self.method = method
self.tol = tol
self.loc = loc
self.unitno = unitno
@classmethod
def add_card(cls, card, comment=''):
seid = integer(card, 1, 'seid')
superelement_type = string(card, 2, 'superelement_type')
rseid = integer_or_blank(card, 3, 'rseid', 0)
method = string_or_blank(card, 4, 'method', 'AUTO')
tol = double_or_blank(card, 5, 'tol', 1e-5)
loc = string_or_blank(card, 6, 'loc', 'YES')
unitno = integer_or_blank(card, 7, 'seid')
assert len(card) <= 8, f'len(SEBULK card) = {len(card):d}\ncard={card}'
return SEBULK(seid, superelement_type, rseid, method=method, tol=tol,
loc=loc, unitno=unitno, comment=comment)
def validate(self):
assert self.superelement_type in ['PRIMARY', 'REPEAT', 'MIRROR', 'COLLCTR', 'EXTERNAL', 'EXTOP2', 'FRFOP2', 'MANUAL'], f'superelement_type={self.superelement_type}\n{self}'
assert self.loc in ['YES', 'NO'], self.loc
assert self.method in ['AUTO', 'MANUAL'], self.method
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
pass
def safe_cross_reference(self, model: BDF, xref_errors):
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
pass
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
pass
def raw_fields(self):
list_fields = [
'SEBULK', self.seid, self.superelement_type, self.rseid, self.method, self.tol,
self.loc, self.unitno]
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class SECONCT(BaseCard):
"""
Partitioned Superelement Boundary-Point Connection
Explicitly defines grid and scalar point connection procedures for a
partitioned superelement.
+---------+-------+-------+--------+-------+-------+-------+------+------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+=======+=======+========+=======+=======+=======+======+======+
| SECONCT | SEIDA | SEIDB | TOL | LOC | | | | |
+---------+-------+-------+--------+-------+-------+-------+------+------+
| | GIDA1 | GIDB1 | GIDA2 | GIDB2 | GIDA3 | GIDB3 | etc. | etc. |
+---------+-------+-------+--------+-------+-------+-------+------+------+
| SECONCT | 10 | 20 | 1.0E-4 | YES | | | | |
+---------+-------+-------+--------+-------+-------+-------+------+------+
| | 1001 | 4001 | | | 2222 | 4444 | | |
+---------+-------+-------+--------+-------+-------+-------+------+------+
| SECONCT | SEIDA | SEIDB | TOL | LOC | | | | |
+---------+-------+-------+--------+-------+-------+-------+------+------+
| | GIDA1 | THRU | GIDA2 | GIDB1 | THRU | GIDB2 | | |
+---------+-------+-------+--------+-------+-------+-------+------+------+
| SECONCT | 10 | 20 | | | | | | |
+---------+-------+-------+--------+-------+-------+-------+------+------+
| | 101 | THRU | 110 | 201 | THRU | 210 | | |
+---------+-------+-------+--------+-------+-------+-------+------+------+
"""
type = 'SECONCT'
_properties = ['node_ids_a', 'node_ids_b']
@classmethod
def _init_from_empty(cls):
seid_a = 1
seid_b = 2
tol = 0.1
loc = 'YES'
nodes_a = [10, 20, 30]
nodes_b = [11, 21, 31]
return SECONCT(seid_a, seid_b, tol, loc, nodes_a, nodes_b, comment='')
def __init__(self, seid_a, seid_b, tol, loc, nodes_a, nodes_b, comment=''):
"""
Parameters
----------
SEIDA : int
Partitioned superelement identification number.
SEIDB : int
Identification number of superelement for connection to SEIDA.
TOL : float; default=1e-5
Location tolerance to be used when searching for or checking boundary
grid points.
LOC : str; default='YES'
Coincident location check option for manual connection.
{YES, NO}
GIDAi : int
Identification number of a grid or scalar point in superelement SEIDA,
which will be connected to GIDBi.
GIDBi : int
Identification number of a grid or scalar point in superelement SEIDB,
which will be connected to GIDAi.
"""
BaseCard.__init__(self)
if comment:
self.comment = comment
self.seid_a = seid_a
self.seid_b = seid_b
self.tol = tol
self.loc = loc
self.nodes_a = nodes_a
self.nodes_b = nodes_b
self.nodes_a_ref = None
self.nodes_b_ref = None
@classmethod
def add_card(cls, card, comment=''):
seid_a = integer(card, 1, 'seid_a')
seid_b = integer(card, 2, 'seid_b')
tol = double_or_blank(card, 3, 'tol', 1e-5)
loc = string_or_blank(card, 4, 'loc', 'YES')
fields = card[9:]
if len(fields) < 2:
assert len(card) >= 9, f'len(SECONCT card) = {len(card):d}\ncard={card}'
assert len(fields) % 2 == 0, 'card=%s\nfields=%s' % (card, fields)
if 'THRU' in fields:
raise NotImplementedError(f'THRU not supported in SECONCT card; fields={fields}')
#start_a = integer(card, 9, 'start_a')
#thru_a = string(card, 10, 'thru_a')
#end_a = integer(card, 11, 'end_a')
#start_b = integer(card, 12, 'start_b')
#thru_b = string(card, 13, 'thru_b')
#end_b = integer(card, 14, 'end_b')
#assert thru_a == 'THRU', thru_a
#assert thru_b == 'THRU', thru_b
#nodes_a = list(range(start_a+1, end_a+1))
#nodes_b = list(range(start_b+1, end_b+1))
#print(nodes_a)
else:
nodes_a = []
nodes_b = []
inode = 1
for ifield in range(0, len(fields), 2):
node_a = integer_or_blank(card, 9+ifield, 'node_a%i' % inode)
node_b = integer_or_blank(card, 9+ifield+1, 'node_b%i' % inode)
if node_a is None and node_b is None:
continue
assert node_a is not None, fields
assert node_b is not None, fields
nodes_a.append(node_a)
nodes_b.append(node_b)
inode += 1
return SECONCT(seid_a, seid_b, tol, loc, nodes_a, nodes_b, comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by SECONCT seid_a=%s seid_b=%s' % (self.seid_a, self.seid_b)
self.nodes_a_ref = model.superelement_nodes(self.seid_a, self.nodes_a, msg=msg)
self.nodes_b_ref = model.superelement_nodes(self.seid_b, self.nodes_b, msg=msg)
def safe_cross_reference(self, model: BDF, xref_errors):
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by SECONCT seid_a=%s seid_b=%s' % (self.seid_a, self.seid_b)
self.nodes_a_ref = model.superelement_nodes(self.seid_a, self.nodes_a, msg=msg)
self.nodes_b_ref = model.superelement_nodes(self.seid_b, self.nodes_b, msg=msg)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.nodes_a = self.node_ids_a
self.nodes_b = self.node_ids_b
self.nodes_a_ref = None
self.nodes_b_ref = None
@property
def node_ids_a(self):
return _node_ids(self, self.nodes_a, self.nodes_a_ref, allow_empty_nodes=False, msg='')
@property
def node_ids_b(self):
return _node_ids(self, self.nodes_b, self.nodes_b_ref, allow_empty_nodes=False, msg='')
def raw_fields(self):
list_fields = ['SECONCT', self.seid_a, self.seid_b, self.tol, self.loc,
None, None, None, None,]
for (nid_a, nid_b) in zip(self.node_ids_a, self.node_ids_b):
list_fields += [nid_a, nid_b]
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class SENQSET(BaseCard):
"""
Superelement Internal Generalized Degree-of-Freedom
Defines number of internally generated scalar points for superelement dynamic
reduction.
+---------+------+----+
| 1 | 2 | 3 |
+---------+------+----+
| SENQSET | SEID | N |
+---------+------+----+
| SENQSET | 110 | 45 |
+---------+------+----+
"""
type = 'SENQSET'
@classmethod
def _init_from_empty(cls):
set_id = 1
n = 45
return SENQSET(set_id, n, comment='')
def __init__(self, set_id, n, comment=''):
"""
Parameters
----------
set_id : int / str
Partitioned superelement identification number.
(Integer > 0 or Character='ALL')
n : int; default=0
Number of internally generated scalar points for dynamic
reduction generalized coordinates (Integer > 0).
"""
BaseCard.__init__(self)
if comment:
self.comment = comment
self.set_id = set_id
self.n = n
@classmethod
def add_card(cls, card, comment=''):
set_id = integer_or_string(card, 1, 'set_id')
n = integer_or_blank(card, 2, 'n', 0)
assert len(card) <= 3, f'len(SENQSET card) = {len(card):d}\ncard={card}'
return SENQSET(set_id, n, comment=comment)
def raw_fields(self):
list_fields = ['SENQSET', self.set_id, self.n]
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
def _node_ids(card, nodes, nodes_ref, allow_empty_nodes=False, msg=''):
if nodes_ref is None:
#nodes = card.nodes
assert nodes is not None, card.__dict__
return nodes
try:
if allow_empty_nodes:
nodes2 = []
for node in nodes_ref:
if node == 0 or node is None:
nodes2.append(None)
elif isinstance(node, integer_types):
nodes2.append(node)
else:
nodes2.append(node.nid)
assert nodes2 is not None, str(card)
return nodes2
else:
try:
node_ids = []
for node in nodes_ref:
if isinstance(node, integer_types):
node_ids.append(node)
else:
node_ids.append(node.nid)
#if isinstance(nodes[0], integer_types):
#node_ids = [node for node in nodes]
#else:
#node_ids = [node.nid for node in nodes]
except Exception:
print('type=%s nodes=%s allow_empty_nodes=%s\nmsg=%s' % (
card.type, nodes, allow_empty_nodes, msg))
raise
assert 0 not in node_ids, 'node_ids = %s' % node_ids
assert node_ids is not None, str(card)
return node_ids
except Exception:
print('type=%s nodes=%s allow_empty_nodes=%s\nmsg=%s' % (
card.type, nodes, allow_empty_nodes, msg))
raise
raise RuntimeError('huh...')
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,641
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/gui/gui_objects/settings.py
|
"""
defines:
- Settings(parent)
- reset_settings(self)
- load(self, settings)
- save(self, settings)
- on_increase_text_size(self)
- on_decrease_font_size(self)
- on_set_font_size(self, font_size, show_command=True)
- set_annotation_size_color(self, size=None, color=None)
- set_annotation_size(self, size, render=True)
- set_annotation_color(self, color, render=True)
- set_background_color_to_white(self)
- set_background_color(self, color)
- set_text_color(self, color)
- update_text_size(self, magnify=1.0)
- repr_settings(settings)
"""
from __future__ import annotations
from typing import List, Dict, Any, TYPE_CHECKING
import numpy as np
from qtpy import QtGui
from pyNastran.gui.gui_objects.alt_geometry_storage import AltGeometry
from pyNastran.gui.gui_objects.coord_properties import CoordProperties
from pyNastran.gui.gui_objects.utils import get_setting
from pyNastran.utils import object_attributes
if TYPE_CHECKING: # pragma: no cover
import vtk
from qtpy.QtCore import QSettings
BLACK = (0.0, 0.0, 0.0)
WHITE = (1., 1., 1.)
GREY = (119/255., 136/255., 153/255.)
ORANGE = (229/255., 92/255., 0.)
HIGHLIGHT_OPACITY = 0.9
HIGHLIGHT_POINT_SIZE = 10.
HIGHLIGHT_LINE_THICKNESS = 5.
ANNOTATION_SIZE = 18
FONT_SIZE = 8
TEXT_SIZE = 14
COORD_SCALE = 0.05 # in percent of max dimension
COORD_TEXT_SCALE = 0.5 # percent of nominal
NASTRAN_BOOL_KEYS = [
'nastran_create_coords',
'nastran_is_properties',
'nastran_is_element_quality',
'nastran_is_bar_axes',
'nastran_is_3d_bars', 'nastran_is_3d_bars_update',
'nastran_is_shell_mcids', 'nastran_is_update_conm2',
'nastran_stress', 'nastran_plate_stress', 'nastran_composite_plate_stress',
'nastran_strain', 'nastran_plate_strain', 'nastran_composite_plate_strain',
'nastran_rod_stress', 'nastran_bar_stress', 'nastran_beam_stress',
'nastran_rod_strain', 'nastran_bar_strain', 'nastran_beam_strain',
'nastran_spring_stress', 'nastran_solid_stress',
'nastran_spring_strain', 'nastran_solid_strain',
'nastran_force',
'nastran_bar_force', 'nastran_beam_force', 'nastran_plate_force',
'nastran_spring_force', 'nastran_gap_force', 'nastran_cbush_force',
]
class Settings:
"""storage class for various settings"""
def __init__(self, parent):
"""
Creates the Settings object
Parameters
----------
parent : MainWindow()
used by the class to access the MainWindow
"""
self.parent = parent
# booleans
self.use_parallel_projection = True
self.use_gradient_background = True
# rgb tuple
self.background_color = GREY
self.background_color2 = GREY
# TODO: what is an annotation color?
self.annotation_color = BLACK
# text in the lower left corner
self.text_size = TEXT_SIZE
self.text_color = BLACK
# used for highlight actors
self.highlight_color = ORANGE
self.highlight_opacity = HIGHLIGHT_OPACITY
self.highlight_point_size = HIGHLIGHT_POINT_SIZE
self.highlight_line_thickness = HIGHLIGHT_LINE_THICKNESS
self.show_info = True
self.show_debug = True
self.show_command = True
self.show_warning = True
self.show_error = True
# int
self.annotation_size = ANNOTATION_SIZE
self.font_size = FONT_SIZE
self.magnify = 5
# floats
self.coord_scale = COORD_SCALE
self.coord_text_scale = COORD_TEXT_SCALE
self.coord_linewidth = 2.0
# string
self.colormap = 'jet' # 'viridis'
# not stored
self.dim_max = 1.0
#self.annotation_scale = 1.0
self.nastran_is_element_quality = True
self.nastran_is_properties = True
self.nastran_is_3d_bars = True
self.nastran_is_3d_bars_update = True
self.nastran_create_coords = True
self.nastran_is_bar_axes = True
self.nastran_is_shell_mcids = True
self.nastran_is_update_conm2 = True
self.nastran_stress = True
self.nastran_spring_stress = True
self.nastran_rod_stress = True
self.nastran_bar_stress = True
self.nastran_beam_stress = True
self.nastran_plate_stress = True
self.nastran_composite_plate_stress = True
self.nastran_solid_stress = True
self.nastran_strain = True
self.nastran_spring_strain = True
self.nastran_rod_strain = True
self.nastran_bar_strain = True
self.nastran_beam_strain = True
self.nastran_plate_strain = True
self.nastran_composite_plate_strain = True
self.nastran_solid_strain = True
self.nastran_force = True
self.nastran_spring_force = True
self.nastran_cbush_force = True
self.nastran_gap_force = True
self.nastran_bar_force = True
self.nastran_beam_force = True
self.nastran_plate_force = True
def reset_settings(self) -> None:
"""helper method for ``setup_gui``"""
# rgb tuple
self.use_gradient_background = True
self.background_color = GREY
self.background_color2 = GREY
self.annotation_size = ANNOTATION_SIZE
self.annotation_color = BLACK
self.text_size = TEXT_SIZE
self.text_color = BLACK
self.highlight_color = ORANGE
self.highlight_opacity = HIGHLIGHT_OPACITY
self.highlight_point_size = HIGHLIGHT_POINT_SIZE
self.highlight_line_thickness = HIGHLIGHT_LINE_THICKNESS
self.use_parallel_projection = True
self.show_info = True
self.show_debug = True
self.show_command = True
self.show_warning = True
self.show_error = True
# int
self.font_size = FONT_SIZE
self.magnify = 5
# float
self.coord_scale = COORD_SCALE
self.coord_text_scale = COORD_TEXT_SCALE
self.coord_linewidth = 2.0
# string
self.colormap = 'jet' # 'viridis'
self.parent.resize(1100, 700)
# not stored
self.dim_max = 1.0
#self.annotation_scale = 1.0
self.nastran_is_element_quality = True
self.nastran_is_properties = True
self.nastran_is_3d_bars = True
self.nastran_is_3d_bars_update = True
self.nastran_create_coords = True
self.nastran_is_bar_axes = True
self.nastran_is_shell_mcids = True
self.nastran_is_update_conm2 = True
self.nastran_stress = True
self.nastran_spring_stress = True
self.nastran_rod_stress = True
self.nastran_bar_stress = True
self.nastran_beam_stress = True
self.nastran_plate_stress = True
self.nastran_composite_plate_stress = True
self.nastran_solid_stress = True
self.nastran_strain = True
self.nastran_spring_strain = True
self.nastran_rod_strain = True
self.nastran_bar_strain = True
self.nastran_beam_strain = True
self.nastran_plate_strain = True
self.nastran_composite_plate_strain = True
self.nastran_solid_strain = True
self.nastran_force = True
self.nastran_spring_force = True
self.nastran_cbush_force = True
self.nastran_gap_force = True
self.nastran_bar_force = True
self.nastran_beam_force = True
self.nastran_plate_force = True
def load(self, settings: QSettings) -> bool:
"""helper method for ``setup_gui``"""
#red = (1.0, 0.0, 0.0)
screen_shape_default = (1100, 700)
setting_keys = [str(key) for key in settings.childKeys()]
# sets the window size/position
main_window_geometry = get_setting(
settings, setting_keys, ['main_window_geometry', 'mainWindowGeometry'], None)
if main_window_geometry is not None:
self.parent.restoreGeometry(main_window_geometry)
# this is the gui font
self._set_setting(settings, setting_keys, ['font_size'], self.font_size, auto_type=int)
# parallel/perspective
self._set_setting(settings, setting_keys, ['use_parallel_projection'], self.use_parallel_projection,
True, auto_type=bool)
# the info/debug/gui/command preferences
self._set_setting(settings, setting_keys, ['show_info'], self.show_info,
True, auto_type=bool)
self._set_setting(settings, setting_keys, ['show_debug'], self.show_debug,
True, auto_type=bool)
self._set_setting(settings, setting_keys, ['show_command'], self.show_command,
True, auto_type=bool)
self._set_setting(settings, setting_keys, ['show_warning'], self.show_warning,
True, auto_type=bool)
self._set_setting(settings, setting_keys, ['show_error'], self.show_error,
True, auto_type=bool)
# the vtk panel background color
self._set_setting(settings, setting_keys, ['use_gradient_background'],
False, auto_type=bool)
self._set_setting(settings, setting_keys, ['background_color', 'backgroundColor'],
GREY, auto_type=float)
self._set_setting(settings, setting_keys, ['background_color2'], GREY, auto_type=float)
# scales the coordinate systems
self._set_setting(settings, setting_keys, ['coord_scale'], COORD_SCALE, auto_type=float)
self._set_setting(settings, setting_keys, ['coord_text_scale'], COORD_TEXT_SCALE, auto_type=float)
# this is for the 3d annotation
self._set_setting(settings, setting_keys, ['annotation_color', 'labelColor'],
BLACK, auto_type=float)
self._set_setting(settings, setting_keys, ['annotation_size'], ANNOTATION_SIZE, auto_type=int) # int
if isinstance(self.annotation_size, float):
# throw the float in the trash as it's from an old version of vtk
self.annotation_size = ANNOTATION_SIZE
elif isinstance(self.annotation_size, int):
pass
else:
print('annotation_size = ', self.annotation_size)
self._set_setting(settings, setting_keys, ['magnify'], self.magnify, auto_type=int)
# this is the text in the lower left corner
self._set_setting(settings, setting_keys, ['text_color', 'textColor'],
BLACK, auto_type=float)
self._set_setting(settings, setting_keys, ['text_size'], TEXT_SIZE, auto_type=int)
# highlight
self._set_setting(settings, setting_keys, ['highlight_color'],
ORANGE, auto_type=float)
self._set_setting(settings, setting_keys, ['highlight_opacity'],
HIGHLIGHT_OPACITY, auto_type=float)
self._set_setting(settings, setting_keys, ['highlight_point_size'],
HIGHLIGHT_POINT_SIZE, auto_type=float)
self._set_setting(settings, setting_keys, ['highlight_line_thickness'],
HIGHLIGHT_LINE_THICKNESS, auto_type=float)
#self._set_setting(settings, setting_keys, ['highlight_style'],
#HIGHLIGHT_OPACITY, auto_type=float)
# default colormap for legend
self._set_setting(settings, setting_keys, ['colormap'],
'jet')
# general gui sizing
screen_shape = self._set_setting(settings, setting_keys, ['screen_shape'],
screen_shape_default, save=False, auto_type=int)
#try:
#screen_shape = settings.value("screen_shape", screen_shape_default)
#except (TypeError, AttributeError):
#screen_shape = screen_shape_default
#if 'recent_files' in setting_keys:
try:
self.parent.recent_files = settings.value("recent_files", self.recent_files)
except (TypeError, AttributeError):
pass
for key in NASTRAN_BOOL_KEYS:
default = getattr(self, key)
self._set_setting(settings, setting_keys, [key],
default, save=True, auto_type=bool)
#w = screen_shape.width()
#h = screen_shape.height()
#try:
if screen_shape:
self.parent.resize(screen_shape[0], screen_shape[1])
#width, height = screen_shape
font = QtGui.QFont()
font.setPointSize(self.font_size)
self.parent.setFont(font)
#if 0:
#pos_default = 0, 0
#pos = settings.value("pos", pos_default)
#x_pos, y_pos = pos
#print(pos)
#self.mapToGlobal(QtCore.QPoint(pos[0], pos[1]))
#y_pos = pos_default[0]
#self.parent.setGeometry(x_pos, y_pos, width, height)
#except TypeError:
#self.resize(1100, 700)
is_loaded = True
return is_loaded
def _set_setting(self, settings, setting_keys: List[str],
setting_names: List[str], default: Any,
save: bool=True, auto_type=None) -> Any:
"""
helper method for ``reapply_settings``
"""
set_name = setting_names[0]
value = get_setting(settings, setting_keys, setting_names, default,
auto_type=auto_type)
if save:
setattr(self, set_name, value)
return value
def save(self, settings, is_testing: bool=False) -> None:
"""saves the settings"""
if not is_testing:
settings.setValue('main_window_geometry', self.parent.saveGeometry())
settings.setValue('mainWindowState', self.parent.saveState())
# booleans
settings.setValue('use_parallel_projection', self.use_parallel_projection)
settings.setValue('use_gradient_background', self.use_gradient_background)
# rgb tuple
settings.setValue('background_color', self.background_color)
settings.setValue('background_color2', self.background_color2)
settings.setValue('annotation_color', self.annotation_color)
settings.setValue('text_color', self.text_color)
settings.setValue('highlight_color', self.highlight_color)
settings.setValue('highlight_opacity', self.highlight_opacity)
settings.setValue('highlight_point_size', self.highlight_point_size)
settings.setValue('show_info', self.show_info)
settings.setValue('show_debug', self.show_debug)
settings.setValue('show_command', self.show_command)
settings.setValue('show_warning', self.show_warning)
settings.setValue('show_error', self.show_error)
# int
settings.setValue('font_size', self.font_size)
settings.setValue('annotation_size', self.annotation_size)
settings.setValue('magnify', self.magnify)
# float
settings.setValue('text_size', self.text_size)
settings.setValue('coord_scale', self.coord_scale)
settings.setValue('coord_text_scale', self.coord_text_scale)
# str
settings.setValue('colormap', self.colormap)
# format-specific
for key in NASTRAN_BOOL_KEYS:
value = getattr(self, key)
settings.setValue(key, value)
#screen_shape = QtGui.QDesktopWidget().screenGeometry()
if not is_testing:
main_window = self.parent.window()
width = main_window.frameGeometry().width()
height = main_window.frameGeometry().height()
settings.setValue('screen_shape', (width, height))
qpos = self.parent.pos()
pos = qpos.x(), qpos.y()
settings.setValue('pos', pos)
#---------------------------------------------------------------------------
# FONT SIZE
def on_increase_font_size(self):
"""shrinks the overall GUI font size"""
self.on_set_font_size(self.font_size + 1)
def on_decrease_font_size(self) -> None:
"""shrinks the overall GUI font size"""
self.on_set_font_size(self.font_size - 1)
def on_set_font_size(self, font_size: int, show_command: bool=True) -> None:
"""updates the GUI font size"""
return self.parent.on_set_font_size(font_size, show_command=show_command)
#---------------------------------------------------------------------------
# ANNOTATION SIZE/COLOR
def set_annotation_size_color(self, size=None, color=None) -> None:
"""
Parameters
----------
size : float
annotation size
color : (float, float, float)
RGB values
"""
if size is not None:
assert isinstance(size, int), 'size=%r' % size
self.set_annotation_size(size)
if color is not None:
assert len(color) == 3, color
assert isinstance(color[0], float), 'color=%r' % color
self.set_annotation_color(color)
def set_annotation_size(self, size: int, render: bool=True) -> None:
"""Updates the size of all the annotations"""
assert size >= 0, size
assert isinstance(size, int), size
if self.annotation_size == size:
return
self.annotation_size = size
# min/max
for actor in self.parent.min_max_actors:
actor.GetTextProperty().SetFontSize(size)
actor.Modified()
# case attached annotations (typical)
for follower_actors in self.parent.label_actors.values():
for follower_actor in follower_actors:
follower_actor.GetTextProperty().SetFontSize(size)
follower_actor.Modified()
# geometry property attached annotations (e.g., flaps)
for obj in self.parent.geometry_properties.values():
if isinstance(obj, CoordProperties):
continue
elif isinstance(obj, AltGeometry):
pass
else:
raise NotImplementedError(obj)
follower_actors = obj.label_actors
for follower_actor in follower_actors:
follower_actor.GetTextProperty().SetFontSize(size)
follower_actor.Modified()
if render:
self.parent.vtk_interactor.GetRenderWindow().Render()
self.parent.log_command('settings.set_annotation_size(%s)' % size)
def set_coord_scale(self, coord_scale: float, render: bool=True) -> None:
"""sets the coordinate system size"""
self.coord_scale = coord_scale
self.update_coord_scale(coord_scale, render=render)
def set_coord_text_scale(self, coord_text_scale: float, render: bool=True) -> None:
"""sets the coordinate system text size"""
self.coord_text_scale = coord_text_scale
self.update_coord_text_scale(coord_text_scale, render=render)
def update_coord_scale(self, coord_scale=None, coord_text_scale=None,
linewidth=None, render: bool=True) -> None:
"""internal method for updating the coordinate system size"""
if coord_scale is None:
coord_scale = self.coord_scale
#if coord_text_scale:
#self.update_coord_text_scale(coord_text_scale=coord_text_scale, render=False)
dim_max = self.dim_max
scale = coord_scale * dim_max
for unused_coord_id, axes in self.parent.axes.items():
axes.SetTotalLength(scale, scale, scale) # was coord_scale
#axes.SetScale(magnify, magnify, magnify)
#if linewidth:
#xaxis = axes.GetXAxisShaftProperty()
#yaxis = axes.GetXAxisShaftProperty()
#zaxis = axes.GetXAxisShaftProperty()
#lw = xaxis.GetLineWidth() # 1.0
#xaxis.SetLineWidth(linewidth)
#yaxis.SetLineWidth(linewidth)
#zaxis.SetLineWidth(linewidth)
#print(f'coord_scale coord_id={unused_coord_id} scale={scale} lw={linewidth}')
if render:
self.parent.vtk_interactor.GetRenderWindow().Render()
def scale_coord(self, magnify: float, render: bool=True) -> None:
"""internal method for scaling the coordinate system size"""
for unused_coord_id, axes in self.parent.axes.items():
axes.SetScale(magnify)
if render:
self.parent.vtk_interactor.GetRenderWindow().Render()
def update_coord_text_scale(self, coord_text_scale: Optional[float]=None,
render: bool=True) -> None:
"""internal method for updating the coordinate system size"""
if coord_text_scale is None:
coord_text_scale = self.coord_text_scale
update_axes_text_size(self.parent.axes, coord_text_scale,
width=1.0, height=0.25)
if render:
self.parent.vtk_interactor.GetRenderWindow().Render()
def set_annotation_color(self, color, render: bool=True) -> None:
"""
Set the annotation color
Parameters
----------
color : (float, float, float)
RGB values as floats
"""
if np.allclose(self.annotation_color, color):
return
self.annotation_color = color
# min/max
for min_max_actor in self.parent.min_max_actors:
#print(dir(min_max_actor))
prop = min_max_actor.GetProperty()
prop.SetColor(*color)
# case attached annotations (typical)
for follower_actors in self.parent.label_actors.values():
for follower_actor in follower_actors:
prop = follower_actor.GetProperty()
prop.SetColor(*color)
# geometry property attached annotations (e.g., flaps)
for obj in self.parent.geometry_properties.values():
if isinstance(obj, CoordProperties):
continue
elif isinstance(obj, AltGeometry):
pass
else:
raise NotImplementedError(obj)
follower_actors = obj.label_actors
for follower_actor in follower_actors:
prop = follower_actor.GetProperty()
prop.SetColor(*color)
if render:
self.parent.vtk_interactor.GetRenderWindow().Render()
self.parent.log_command('settings.set_annotation_color(%s, %s, %s)' % color)
#---------------------------------------------------------------------------
def set_background_color_to_white(self, render: bool=True) -> None:
"""sets the background color to white; used by gif writing?"""
self.set_gradient_background(use_gradient_background=False, render=False)
self.set_background_color(WHITE, render=render)
def set_gradient_background(self, use_gradient_background: bool=False, render: bool=True) -> None:
"""enables/diables the gradient background"""
self.use_gradient_background = use_gradient_background
self.parent.rend.SetGradientBackground(self.use_gradient_background)
if render:
self.parent.vtk_interactor.Render()
def set_background_color(self, color, render=True):
"""
Set the background color
Parameters
----------
color : (float, float, float)
RGB values as floats
"""
self.background_color = color
self.parent.rend.SetBackground(*color)
if render:
self.parent.vtk_interactor.Render()
self.parent.log_command('settings.set_background_color(%s, %s, %s)' % color)
def set_background_color2(self, color, render=True):
"""
Set the background color
Parameters
----------
color : (float, float, float)
RGB values as floats
"""
self.background_color2 = color
self.parent.rend.SetBackground2(*color)
if render:
self.parent.vtk_interactor.Render()
self.parent.log_command('settings.set_background_color2(%s, %s, %s)' % color)
def set_highlight_color(self, color: List[float]) -> None:
"""
Set the highlight color
Parameters
----------
color : (float, float, float)
RGB values as floats
"""
self.highlight_color = color
self.parent.log_command('settings.set_highlight_color(%s, %s, %s)' % color)
def set_highlight_opacity(self, opacity: float) -> None:
"""
Set the highlight opacity
Parameters
----------
opacity : float
0.0 : invisible
1.0 : solid
"""
self.highlight_opacity = opacity
self.parent.log_command('settings.set_highlight_opacity(%s)' % opacity)
def set_highlight_point_size(self, point_size: int) -> None:
"""
Set the highlight point size
Parameters
----------
opacity : float
10.0 : default
"""
self.highlight_point_size = point_size
self.parent.log_command('settings.set_highlight_point_size(%s)' % point_size)
#---------------------------------------------------------------------------
# TEXT ACTORS - used for lower left notes
def set_text_color(self, color: List[float], render: str=True) -> None:
"""
Set the text color
Parameters
----------
color : (float, float, float)
RGB values as floats
"""
self.text_color = color
for text_actor in self.parent.text_actors.values():
text_actor.GetTextProperty().SetColor(color)
if render:
self.parent.vtk_interactor.Render()
self.parent.log_command('settings.set_text_color(%s, %s, %s)' % color)
def set_text_size(self, text_size: int,render: bool=True) -> None:
"""
Set the text color
Parameters
----------
text_size : int
the lower left text size (typical 14)
"""
i = 0
dtext_size = text_size + 1
self.text_size = text_size
for text_actor in self.parent.text_actors.values():
text_prop = text_actor.GetTextProperty()
text_prop.SetFontSize(text_size)
position = [5, 5 + i * dtext_size]
text_actor.SetDisplayPosition(*position)
i += 1
if render:
self.parent.vtk_interactor.Render()
self.parent.log_command('settings.set_text_size(%s)' % text_size)
def update_text_size(self, magnify: float=1.0) -> None:
"""Internal method for updating the bottom-left text when we go to take a picture"""
text_size = int(14 * magnify)
for text_actor in self.parent.text_actors.values():
text_prop = text_actor.GetTextProperty()
text_prop.SetFontSize(text_size)
def set_magnify(self, magnify: int=5) -> None:
"""sets the screenshot magnification factor (int)"""
self.magnify = magnify
def __repr__(self) -> str:
msg = '<Settings>\n'
for key in object_attributes(self, mode='public', keys_to_skip=['parent']):
value = getattr(self, key)
if isinstance(value, tuple):
value = str(value)
msg += ' %r = %r\n' % (key, value)
return msg
def update_axes_text_size(axes: Dict[int, vtk.vtkAxes],
coord_text_scale: float,
width: float=1.0, height: float=0.25):
"""updates the coordinate system text size"""
# width doesn't set the width
# it being very large (old=0.1) makes the width constraint inactive
for unused_coord_id, axis in axes.items():
#print(f'coord_text_scale coord_id={unused_coord_id} coord_text_scale={coord_text_scale}')
texts = [
axis.GetXAxisCaptionActor2D(),
axis.GetYAxisCaptionActor2D(),
axis.GetZAxisCaptionActor2D(),
]
# this doesn't set the width
# this being very large (old=0.1) makes the width constraint inactive
for text in texts:
text.SetWidth(coord_text_scale * width)
text.SetHeight(coord_text_scale * height)
def isfloat(value) -> bool:
"""is the value floatable"""
try:
float(value)
return True
except ValueError:
return False
def repr_settings(settings: QSettings) -> str:
"""works on a QSettings, not a Settings"""
msg = 'QSettings:\n'
for key in sorted(settings.allKeys()):
value = settings.value(key)
msg += ' %r : %r\n' % (key, value)
return msg
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,642
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/op2/test/test_op2.py
|
"""Defines the command line tool `test_op2`"""
import os
import sys
import time
from traceback import print_exc
from typing import Tuple, List, Dict, Optional, Any
import numpy as np
import pyNastran
from pyNastran.op2.op2 import (
OP2, FatalError, SixtyFourBitError, OverwriteTableError)
#SortCodeError, DeviceCodeError, FortranMarkerError
from pyNastran.op2.op2_geom import OP2Geom, DuplicateIDsError
from pyNastran.utils import is_binary_file
np.set_printoptions(precision=3, threshold=20)
try:
import pandas
IS_PANDAS = True
except ImportError:
IS_PANDAS = False
try:
import h5py
IS_HDF5 = True
except ImportError:
IS_HDF5 = False
#import warnings
#warnings.filterwarnings('error')
#warnings.filterwarnings('error', category=UnicodeWarning)
def parse_table_names_from_f06(f06_filename: str) -> List[str]:
"""gets the op2 names from the f06"""
marker = 'NAME OF DATA BLOCK WRITTEN ON FORTRAN UNIT IS'
names = []
with open(f06_filename, 'r') as infile:
for line in infile:
if marker in line:
word = line.replace(marker, '').strip().strip('.')
names.append(word)
return names
def run_lots_of_files(files, make_geom: bool=True, combine: bool=True,
write_bdf: bool=False, write_f06: bool=True,
delete_f06: bool=True, delete_op2: bool=True, delete_hdf5: bool=True,
delete_debug_out: bool=True, build_pandas: bool=True, write_op2: bool=False,
write_hdf5: bool=True, debug: bool=True, skip_files: Optional[List[str]]=None,
exclude: Optional[str]=None,
stop_on_failure: bool=False, nstart: int=0, nstop: int=1000000000,
short_stats: bool=False, binary_debug: bool=False,
compare: bool=True, quiet: bool=False, dev: bool=True, xref_safe: bool=False):
"""used by op2_test.py to run thousands of files"""
if skip_files is None:
skip_files = []
#n = ''
assert make_geom in [True, False]
assert combine in [True, False]
assert write_bdf in [True, False]
assert write_f06 in [True, False]
assert write_op2 in [True, False]
assert write_hdf5 in [True, False]
assert build_pandas in [True, False]
if binary_debug in [True, False]:
binary_debug = [binary_debug]
subcases = []
failed_cases = []
nfailed = 0
ntotal = 0
npassed = 0
#t0 = time.time()
for i, op2file in enumerate(files[nstart:nstop], nstart): # 149
if not is_binary_file(op2file):
continue
basename = os.path.basename(op2file)
#if basename not in skip_files and not basename.startswith('acms') and i not in nskip:
sys.stderr.write(f'{i} file={op2file}\n')
if basename not in skip_files and '#' not in op2file:
print("%" * 80)
print(f'file={op2file}\n')
#n = '%s ' % i
ntotal += 1
is_passed = True
for binary_debugi in binary_debug:
print(f'------running binary_debug={binary_debugi}------')
is_passedi = run_op2(op2file, make_geom=make_geom, combine=combine,
write_bdf=write_bdf, write_f06=write_f06, write_op2=write_op2,
is_mag_phase=False,
delete_f06=delete_f06,
delete_op2=delete_op2,
delete_hdf5=delete_hdf5,
delete_debug_out=delete_debug_out,
build_pandas=build_pandas,
write_hdf5=write_hdf5,
exclude=exclude,
short_stats=short_stats,
subcases=subcases, debug=debug,
stop_on_failure=stop_on_failure,
binary_debug=binary_debug,
compare=compare, dev=dev,
xref_safe=xref_safe,
is_testing=True)[1]
if not is_passedi:
is_passed = False
break
if not is_passed:
sys.stderr.write(f'**file={op2file}\n')
failed_cases.append(op2file)
nfailed += 1
else:
npassed += 1
return failed_cases
def run_op2(op2_filename: str, make_geom: bool=False, combine: bool=True,
write_bdf: bool=False, read_bdf: Optional[bool]=None,
write_f06: bool=True, write_op2: bool=False,
write_hdf5: bool=True,
is_mag_phase: bool=False, is_sort2: bool=False,
is_nx: Optional[bool]=None, is_autodesk: Optional[bool]=None,
is_nasa95: Optional[bool]=None,
delete_f06: bool=False, delete_op2: bool=False, delete_hdf5: bool=False,
delete_debug_out: bool=False,
build_pandas: bool=True,
subcases: Optional[str]=None, exclude: Optional[str]=None,
short_stats: bool=False, compare: bool=True,
debug: bool=False, log: Any=None,
binary_debug: bool=False, quiet: bool=False,
stop_on_failure: bool=True,
dev: bool=False, xref_safe: bool=False,
post: Any=None, load_as_h5: bool=False,
is_testing: bool=False,
name: str='') -> Tuple[OP2, bool]:
"""
Runs an OP2
Parameters
----------
op2_filename : str
path of file to test
make_geom : bool; default=False
should the GEOMx, EPT, MPT, DYNAMIC, DIT, etc. tables be read
combine : bool; default=True
should the op2 tables be combined
write_bdf : bool; default=False
should a BDF be written based on the geometry tables
write_f06 : bool; default=True
should an F06 be written based on the results
write_op2 : bool; default=False
should an OP2 be written based on the results
write_hdf5 : bool; default=True
should an HDF5 be written based on the results
is_mag_phase : bool; default=False
False : write real/imag results
True : write mag/phase results
For static results, does nothing
is_sort2 : bool; default=False
False : writes "transient" data is SORT1
True : writes "transient" data is SORT2
is_nx : bool; default=None
True : use NX Nastran
False : use MSC Nastran
None : guess
is_autodesk : bool; default=None
True : use Autodesk Nastran
False : use MSC Nastran
None : guess
is_nasa95 : bool; default=None
True : use NASA 95 Nastran
False : use MSC Nastran
None : guess
delete_f06 : bool; default=False
deletes the F06 (assumes write_f06 is True)
delete_op2 : bool; default=False
deletes the OP2 (assumes write_op2 is True)
delete_hdf5 : bool; default=False
deletes the HDF5 (assumes write_hdf5 is True)
subcases : List[int, ...]; default=None
limits subcases to specified values; default=None -> no limiting
exclude : List[str, ...]; default=None
limits result types; (remove what's listed)
short_stats : bool; default=False
print a short version of the op2 stats
compare : bool
True : compares vectorized result to slow vectorized result
False : doesn't run slow vectorized result
debug : bool; default=False
debug flag for OP2
log : logger; default=None
a custom logger
None : use debug
binary_debug : bool; default=False
creates a very cryptic developer debug file showing exactly what was parsed
quiet : bool; default=False
don't write debug messages
stop_on_failure : bool; default=True
is this used???
dev : bool; default=False
flag that is used by op2_test.py to ignore certain errors
False : crash on errors
True : don't crash
is_testing: bool; default=False
True: release mode
False : be picky with table parsing
Returns
-------
op2 : OP2()
the op2 object
is_passed : bool
did the test pass
"""
assert build_pandas in [True, False]
if read_bdf is None:
read_bdf = write_bdf
op2 = None
op2_nv = None
if subcases is None:
subcases = []
if exclude is None:
exclude = []
if isinstance(is_sort2, bool):
sort_methods = [is_sort2]
else:
sort_methods = is_sort2
assert '.op2' in op2_filename.lower(), f'op2_filename={op2_filename} is not an OP2'
is_passed = False
fname_base = os.path.splitext(op2_filename)[0]
bdf_filename = f'{fname_base}.test_op2{name}.bdf'
if isinstance(subcases, str):
subcases = subcases.replace('_', ' ').replace(',', ' ').strip()
if ' ' in subcases:
subcases = [int(i) for i in subcases.split(' ')]
else:
subcases = [int(subcases)]
debug_file = None
model = os.path.splitext(op2_filename)[0]
if binary_debug or write_op2:
debug_file = model + '.debug.out'
#print('debug_file = %r' % debug_file, os.getcwd())
if make_geom:
op2 = OP2Geom(debug=debug, log=log)
op2_nv = OP2Geom(debug=debug, log=log, debug_file=debug_file)
op2_bdf = OP2Geom(debug=debug, log=log)
set_versions([op2, op2_nv, op2_bdf], is_nx, is_autodesk, is_nasa95, post, is_testing)
if load_as_h5 and IS_HDF5:
# you can't open the same h5 file twice
op2.load_as_h5 = load_as_h5
#op2_nv.load_as_h5 = load_as_h5
#op2_bdf.load_as_h5 = load_as_h5
op2_bdf.set_error_storage(nparse_errors=0, stop_on_parsing_error=True,
nxref_errors=0, stop_on_xref_error=True)
else:
op2 = OP2(debug=debug, log=log)
# have to double write this until ???
op2_nv = OP2(debug=debug, log=log, debug_file=debug_file)
set_versions([op2, op2_nv], is_nx, is_autodesk, is_nasa95, post, is_testing)
if load_as_h5 and IS_HDF5:
# you can't open the same h5 file twice
op2.load_as_h5 = load_as_h5
#op2_nv.load_as_h5 = load_as_h5
op2_bdf = None
op2_nv.use_vector = False
if not quiet:
op2.log.debug(f'subcases = {subcases}')
if subcases:
op2.set_subcases(subcases)
op2_nv.set_subcases(subcases)
op2.remove_results(exclude)
op2_nv.remove_results(exclude)
try:
#op2.read_bdf(op2.bdf_filename, includeDir=None, xref=False)
if compare:
op2_nv.read_op2(op2_filename, combine=combine)
op2.read_op2(op2_filename, combine=combine)
#if not make_geom: # TODO: enable this...
#op2.save()
#op2a.get_op2_stats()
op2.get_op2_stats()
op2.get_op2_stats(short=True)
op2.object_attributes()
op2.object_methods()
if not quiet:
print(f'---stats for {op2_filename}---')
print(op2.get_op2_stats(short=short_stats))
op2.print_subcase_key()
write_op2_as_bdf(op2, op2_bdf, bdf_filename, write_bdf, make_geom, read_bdf, dev,
xref_safe=xref_safe)
if compare:
assert op2 == op2_nv
if IS_HDF5 and write_hdf5:
from pyNastran.op2.op2_interface.hdf5_interface import load_op2_from_hdf5_filename
h5_filename = f'{model}.test_op2{name}.h5'
op2.export_hdf5_filename(h5_filename)
load_op2_from_hdf5_filename(h5_filename, log=op2.log)
if delete_hdf5:
remove_file(h5_filename)
if write_f06:
for is_sort2i in sort_methods:
f06_filename = f'{model}.test_op2{name}.f06'
op2.write_f06(f06_filename, is_mag_phase=is_mag_phase,
is_sort1=not is_sort2i, quiet=quiet, repr_check=True)
if delete_f06:
remove_file(f06_filename)
# we put it down here so we don't blame the dataframe for real errors
if IS_PANDAS and build_pandas:
op2.build_dataframe()
#if compare:
#op2_nv.build_dataframe()
if write_op2:
model = os.path.splitext(op2_filename)[0]
op2_filename2 = f'{model}.test_op2{name}.op2'
total_case_count = op2.write_op2(op2_filename2,
#is_mag_phase=is_mag_phase,
endian=b'<')
if total_case_count > 0:
#print('------------------------------')
op2a = OP2(debug_file='debug.out', log=log)
op2a.log.info(f'testing written OP2: {op2_filename2}')
op2a.use_vector = False
op2a.read_op2(op2_filename2)
#os.remove(op2_filename2)
#read_op2(op2_filename2)
if delete_op2:
remove_file(op2_filename2)
if debug_file is not None and delete_debug_out and os.path.exists(debug_file):
os.remove(debug_file)
#table_names_f06 = parse_table_names_from_F06(op2.f06FileName)
#table_names_op2 = op2.getTableNamesFromOP2()
#print("subcases = ", op2.subcases)
#if table_names_f06 != table_names_op2:
#msg = 'table_names_f06=%s table_names_op2=%s' % (table_names_f06, table_names_op2)
#raise RuntimeError(msg)
#op2.case_control_deck.sol = op2.sol
#print(op2.case_control_deck.get_op2_data())
#print(op2.case_control_deck.get_op2_data())
is_passed = True
except MemoryError:
raise
except KeyboardInterrupt:
sys.stdout.flush()
print_exc(file=sys.stdout)
sys.stderr.write(f'**file={op2_filename}\n')
sys.exit('keyboard stop...')
#except SortCodeError: # inherits from Runtime; comment this
#is_passed = True
#except RuntimeError: # the op2 is bad, not my fault; comment this
#is_passed = True
#if stop_on_failure:
#raise
#else:
#is_passed = True
#except RuntimeError:
#pass
#except ValueError:
#pass
#except IndexError:
#pass
#except FortranMarkerError:
#pass
except IOError: # missing file; this block should be uncommented
#if stop_on_failure:
#raise
if not dev:
raise
print(f'{op2_filename} is missing/is not binary')
raise
is_passed = True
#except UnicodeDecodeError: # this block should be commented
#is_passed = True
#except NotImplementedError: # this block should be commented
#is_passed = True
except SixtyFourBitError:
#log.error('SixtyFourBitError')
#raise
if not dev:
raise
is_passed = False
except FatalError: # this block should be commented
#if stop_on_failure:
#raise
if not dev:
raise
is_passed = True
#except KeyError: # this block should be commented
#is_passed = True
#except DeviceCodeError: # this block should be commented
#is_passed = True
#except AssertionError: # this block should be commented
#is_passed = True
#except RuntimeError: #invalid analysis code; this block should be commented
#is_passed = True
#except ValueError: # this block should be commented
#is_passed = True
#except NotImplementedError: # this block should be commented
#is_passed = True
#except FortranMarkerError: # this block should be commented
#is_passed = True
except DuplicateIDsError:
if not dev:
raise
is_passed = True
except OverwriteTableError:
if not dev:
raise
is_passed = True
except SystemExit:
#print_exc(file=sys.stdout)
#sys.exit('stopping on sys.exit')
raise
#except NameError: # variable isnt defined
# if stop_on_failure:
# raise
# else:
# is_passed = True
#except IndexError: # this block should be commented
#is_passed = True
#except SyntaxError: #Param Parse; this block should be commented
#if stop_on_failure:
#raise
#is_passed = True
except Exception:
#print(e)
if stop_on_failure:
raise
else:
print_exc(file=sys.stdout)
is_passed = False
return op2, is_passed
def write_op2_as_bdf(op2, op2_bdf, bdf_filename, write_bdf, make_geom, read_bdf, dev,
xref_safe=False):
if write_bdf:
assert make_geom, f'write_bdf=False, but make_geom={make_geom!r}; expected make_geom=True'
op2._nastran_format = 'msc'
op2.executive_control_lines = ['CEND\n']
op2.validate()
op2.write_bdf(bdf_filename, size=8)
op2.log.debug('bdf_filename = %s' % bdf_filename)
xref = xref_safe is False
if read_bdf:
try:
op2_bdf.read_bdf(bdf_filename, xref=xref)
if xref_safe:
op2_bdf.safe_cross_reference()
except Exception:
if dev and len(op2_bdf.card_count) == 0:
pass
else:
raise
#os.remove(bdf_filename)
def get_test_op2_data(argv) -> Dict[str, str]:
"""defines the docopt interface"""
from docopt import docopt
ver = str(pyNastran.__version__)
is_dev = 'dev' in ver
msg = "Usage: "
nasa95 = '|--nasa95' if is_dev else ''
version = f'[--nx|--autodesk{nasa95}]'
options = f'[-p] [-d] [-z] [-w] [-t] [-s <sub>] [-x <arg>]... {version} [--safe] [--post POST] [--load_hdf5]'
if is_dev:
line1 = f"test_op2 [-q] [-b] [-c] [-g] [-n] [-f] [-o] [--profile] [--test] [--nocombine] {options} OP2_FILENAME\n"
else:
line1 = f"test_op2 [-q] [-b] [-c] [-g] [-n] [-f] [-o] {options} OP2_FILENAME\n"
while ' ' in line1:
line1 = line1.replace(' ', ' ')
msg += line1
msg += " test_op2 -h | --help\n"
msg += " test_op2 -v | --version\n"
msg += "\n"
msg += "Tests to see if an OP2 will work with pyNastran %s.\n" % ver
msg += (
"\n"
"Positional Arguments:\n"
" OP2_FILENAME Path to OP2 file\n"
"\n"
"Options:\n"
" -b, --binarydebug Dumps the OP2 as a readable text file\n"
" -c, --disablecompare Doesn't do a validation of the vectorized result\n"
" -q, --quiet Suppresses debug messages [default: False]\n"
" -t, --short_stats Short get_op2_stats printout\n"
#if not is_release:
" -g, --geometry Reads the OP2 for geometry, which can be written out\n"
# n is for NAS
" -n, --write_bdf Writes the bdf to fem.test_op2.bdf (default=False)\n"
" -f, --write_f06 Writes the f06 to fem.test_op2.f06\n"
" -d, --write_hdf5 Writes the h5 to fem.test_op2.h5\n"
" -o, --write_op2 Writes the op2 to fem.test_op2.op2\n"
" -z, --is_mag_phase F06 Writer writes Magnitude/Phase instead of\n"
" Real/Imaginary (still stores Real/Imag); [default: False]\n"
" --load_hdf5 Load as HDF5 (default=False)\n"
" -p, --pandas Enables pandas dataframe building; [default: False]\n"
)
if is_dev:
msg += " --nocombine Disables case combination\n"
msg += " -s <sub>, --subcase Specify one or more subcases to parse; (e.g. 2_5)\n"
msg += " -w, --is_sort2 Sets the F06 transient to SORT2\n"
msg += " -x <arg>, --exclude Exclude specific results\n"
msg += " --nx Assume NX Nastran\n"
msg += " --autodesk Assume Autodesk Nastran\n"
if is_dev:
msg += " --nasa95 Assume Nastran 95\n"
msg += " --post POST Set the PARAM,POST flag\n"
msg += " --safe Safe cross-references BDF (default=False)\n"
if is_dev:
msg += (
"\n"
"Developer:\n"
' --profile Profiles the code (default=False)\n'
' --test Adds additional table checks (default=False)\n'
)
msg += (
"\n"
"Info:\n"
" -h, --help Show this help message and exit\n"
" -v, --version Show program's version number and exit\n"
)
if len(argv) == 1:
sys.exit(msg)
data = docopt(msg, version=ver, argv=argv[1:])
if not is_dev:
# just set the defaults for these so we don't need special code later
data['--profile'] = False
data['--write_xlsx'] = False
data['--nocombine'] = False
data['--nasa95'] = False
if '--geometry' not in data:
data['--geometry'] = False
if '--write_bdf' not in data:
data['--write_bdf'] = False
data['--is_sort2'] = bool(data['--is_sort2'])
#print("data", data)
return data
def remove_file(filename):
try:
os.remove(filename)
except Exception:
pass
def set_versions(op2s: List[OP2],
is_nx: bool, is_autodesk: bool, is_nasa95: bool,
post: int, is_testing: bool=False) -> None:
for op2 in op2s:
op2.IS_TESTING = is_testing
if is_nx is None and is_autodesk is None and is_nasa95 is None:
pass
elif is_nx:
for op2 in op2s:
op2.set_as_nx()
elif is_autodesk:
for op2 in op2s:
op2.set_as_autodesk()
elif is_nasa95:
for op2 in op2s:
op2.set_as_nasa95()
else:
for op2 in op2s:
op2.set_as_msc()
if post is not None:
for op2 in op2s:
op2.post = -4
def main(argv=None, show_args: bool=True) -> None:
"""the interface for test_op2"""
if argv is None:
argv = sys.argv
data = get_test_op2_data(argv)
if show_args:
for key, value in sorted(data.items()):
print("%-12s = %r" % (key.strip('--'), value))
if os.path.exists('skippedCards.out'):
os.remove('skippedCards.out')
time0 = time.time()
if data['--profile']:
import pstats
import cProfile
prof = cProfile.Profile()
prof.runcall(
run_op2,
data['OP2_FILENAME'],
make_geom=data['--geometry'],
combine=not data['--nocombine'],
load_as_h5=data['--load_hdf5'],
write_bdf=data['--write_bdf'],
write_f06=data['--write_f06'],
write_op2=data['--write_op2'],
write_hdf5=data['--write_hdf5'],
is_mag_phase=data['--is_mag_phase'],
build_pandas=data['--pandas'],
subcases=data['--subcase'],
exclude=data['--exclude'],
debug=not data['--quiet'],
binary_debug=data['--binarydebug'],
is_sort2=data['--is_sort2'],
compare=not data['--disablecompare'],
quiet=data['--quiet'],
is_nx=data['--nx'],
is_autodesk=data['--autodesk'],
is_nasa95=data['--nasa95'],
safe=data['--safe'],
post=data['--post'],
is_testing=data['--test'],
)
prof.dump_stats('op2.profile')
stats = pstats.Stats("op2.profile")
stats.sort_stats('tottime') # time in function
#stats.sort_stats('cumtime') # time in function & subfunctions
stats.strip_dirs()
stats.print_stats(40)
else:
run_op2(
data['OP2_FILENAME'],
make_geom=data['--geometry'],
combine=not data['--nocombine'],
load_as_h5=data['--load_hdf5'],
write_bdf=data['--write_bdf'],
write_f06=data['--write_f06'],
write_op2=data['--write_op2'],
write_hdf5=data['--write_hdf5'],
is_mag_phase=data['--is_mag_phase'],
build_pandas=data['--pandas'],
subcases=data['--subcase'],
exclude=data['--exclude'],
short_stats=data['--short_stats'],
debug=not data['--quiet'],
binary_debug=data['--binarydebug'],
is_sort2=data['--is_sort2'],
compare=not data['--disablecompare'],
quiet=data['--quiet'],
is_nx=data['--nx'],
is_autodesk=data['--autodesk'],
is_nasa95=data['--nasa95'],
xref_safe=data['--safe'],
post=data['--post'],
is_testing=data['--test'],
)
print("dt = %f" % (time.time() - time0))
if __name__ == '__main__': # pragma: no cover
main(show_args=True)
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,643
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/bdf/mesh_utils/loads.py
|
"""
Defines:
- sum_forces_moments
find the net force/moment on the model
- sum_forces_moments_elements
find the net force/moment on the model for a subset of elements
"""
from __future__ import annotations
from typing import Tuple, List, Dict, Optional, TYPE_CHECKING
from math import radians, sin, cos
import numpy as np
from numpy import array, cross, allclose, mean
from numpy.linalg import norm # type: ignore
from pyNastran.utils.numpy_utils import integer_types
from pyNastran.bdf.utils import get_xyz_cid0_dict, transform_load
from pyNastran.bdf.cards.loads.static_loads import update_pload4_vector, PLOAD4
if TYPE_CHECKING: # pragma: no cover
from pyNastran.nptyping import NDArray3float
from pyNastran.bdf.bdf import BDF, Subcase
def isnan(value):
return value is None or np.isnan(value)
def sum_forces_moments(model: BDF, p0: np.ndarray, loadcase_id: int,
cid: int=0,
include_grav: bool=False,
xyz_cid0: Optional[Dict[int, NDArray3float]]=None,
) -> Tuple[NDArray3float, NDArray3float]:
"""
Sums applied forces & moments about a reference point p0 for all
load cases.
Considers:
- FORCE, FORCE1, FORCE2
- MOMENT, MOMENT1, MOMENT2
- PLOAD, PLOAD2, PLOAD4
- LOAD
Parameters
----------
model : BDF()
a BDF object
p0 : NUMPY.NDARRAY shape=(3,) or integer (node ID)
the reference point
loadcase_id : int
the LOAD=ID to analyze
cid : int; default=0
the coordinate system for the summation
include_grav : bool; default=False
includes gravity in the summation (not supported)
xyz_cid0 : None / Dict[int] = (3, ) ndarray
the nodes in the global coordinate system
Returns
-------
forces : NUMPY.NDARRAY shape=(3,)
the forces
moments : NUMPY.NDARRAY shape=(3,)
the moments
.. warning:: not full validated
.. todo:: It's super slow for cid != 0. We can speed this up a lot
if we calculate the normal, area, centroid based on
precomputed node locations.
Pressure acts in the normal direction per model/real/loads.bdf and loads.f06
"""
if not isinstance(loadcase_id, integer_types):
raise RuntimeError('loadcase_id must be an integer; loadcase_id=%r' % loadcase_id)
p = _get_load_summation_point(model, p0, cid=0)
loads, scale_factors, unused_is_grav = model.get_reduced_loads(
loadcase_id, skip_scale_factor0=True)
F = array([0., 0., 0.])
M = array([0., 0., 0.])
xyz = get_xyz_cid0_dict(model, xyz_cid0=xyz_cid0)
unsupported_types = set()
for load, scale in zip(loads, scale_factors):
#if load.type not in ['FORCE1']:
#continue
if load.type == 'FORCE':
if load.Cid() != 0:
cp_ref = load.cid_ref
#from pyNastran.bdf.bdf import CORD2R
#cp_ref = CORD2R()
f = load.mag * cp_ref.transform_vector_to_global(load.xyz) * scale
else:
f = load.mag * load.xyz * scale
node = model.Node(load.node_id)
r = xyz[node.nid] - p
m = cross(r, f)
F += f
M += m
elif load.type == 'FORCE1':
f = load.mag * load.xyz * scale
node = model.Node(load.node_id)
r = xyz[node.nid] - p
m = cross(r, f)
F += f
M += m
elif load.type == 'FORCE2':
f = load.mag * load.xyz * scale
node = model.Node(load.node_id)
r = xyz[node.nid] - p
m = cross(r, f)
F += f
M += m
elif load.type == 'MOMENT':
if load.Cid() != 0:
cp = load.cid_ref
#from pyNastran.bdf.bdf import CORD2R
#cp = CORD2R()
m = load.mag * cp.transform_vector_to_global(load.xyz) * scale
else:
m = load.mag * load.xyz * scale
M += m
elif load.type == 'MOMENT1':
m = load.mag * load.xyz * scale
M += m
elif load.type == 'MOMENT2':
m = load.mag * load.xyz * scale
M += m
elif load.type == 'PLOAD':
nodes = load.node_ids
nnodes = len(nodes)
if nnodes == 3:
n1, n2, n3 = xyz[nodes[0]], xyz[nodes[1]], xyz[nodes[2]]
axb = cross(n1 - n2, n1 - n3)
centroid = (n1 + n2 + n3) / 3.
elif nnodes == 4:
n1, n2, n3, n4 = xyz[nodes[0]], xyz[nodes[1]], xyz[nodes[2]], xyz[nodes[3]]
axb = cross(n1 - n3, n2 - n4)
centroid = (n1 + n2 + n3 + n4) / 4.
else:
msg = 'invalid number of nodes on PLOAD card; nodes=%s' % str(nodes)
raise RuntimeError(msg)
area, normal = _get_area_normal(axb, nodes, xyz)
r = centroid - p
f = load.pressure * area * normal * scale
m = cross(r, f)
F += f
M += m
elif load.type == 'PLOAD1':
_pload1_total(model, loadcase_id, load, scale, xyz, F, M, p)
elif load.type == 'PLOAD2':
pressure = load.pressure * scale
for eid in load.element_ids:
elem = model.elements[eid]
if elem.type in ['CTRIA3', 'CQUAD4', 'CSHEAR', 'CQUADR', 'CTRIAR']:
n = elem.Normal()
area = elem.Area()
f = pressure * n * area
r = elem.Centroid() - p
m = cross(r, f)
F += f
M += m
else:
model.log.warning('case=%s etype=%r loadtype=%r not supported' % (
loadcase_id, elem.type, load.type))
elif load.type == 'PLOAD4':
_pload4_total(loadcase_id, load, scale, xyz, F, M, p)
elif load.type == 'GRAV':
if include_grav: # this will be super slow
gravity = load.GravityVector() * scale
for eid, elem in model.elements.items():
centroid = elem.Centroid()
mass = elem.Mass()
r = centroid - p
f = mass * gravity
m = cross(r, f)
F += f
M += m
else:
# we collect them so we only get one print
unsupported_types.add(load.type)
for load_type in unsupported_types:
model.log.warning('case=%s loadtype=%r not supported' % (loadcase_id, load_type))
#forces, moments = sum_forces_moments(self, p0, loadcase_id,
#include_grav=include_grav, xyz_cid0=xyz_cid0)
if cid == 0:
return F, M
cid0 = 0
F2, M2 = transform_load(F, M, cid0, cid, model)
return F2, M2
def _pload1_total(model, loadcase_id, load, scale, xyz, F, M, p):
"""helper method for ``sum_forces_moments``"""
elem = load.eid_ref
if elem.type in ['CBAR', 'CBEAM']:
_pload1_bar_beam(model, loadcase_id, load, elem, scale, xyz, F, M, p)
elif elem.type == 'CBEND':
model.log.warning('case=%s etype=%r loadtype=%r not supported' % (
loadcase_id, elem.type, load.type))
else:
raise RuntimeError('element.type=%r is not a CBAR, CBEAM, or CBEND' % elem.type)
def _pload1_elements(model, loadcase_id, load, scale, eids, xyz, F, M, p):
"""helper method for ``sum_forces_moments_elements``"""
#elem = model.elements[load.eid]
elem = load.eid_ref
if elem.eid not in eids:
return
_pload1_total(model, loadcase_id, load, scale, xyz, F, M, p)
def _pload1_bar_beam(model, unused_loadcase_id, load, elem, scale, xyz, F, M, p):
"""
helper method for ``sum_forces_moments`` and ``sum_forces_moments_elements``
"""
p1 = load.p1 * scale
p2 = load.p2 * scale
nodes = elem.node_ids
n1, n2 = xyz[nodes[0]], xyz[nodes[1]]
n1 += elem.wa
n2 += elem.wb
bar_vector = n2 - n1
L = norm(bar_vector)
try:
Ldir = bar_vector / L
except Exception:
msg = 'Length=0.0; nid1=%s nid2=%s\n' % (nodes[0], nodes[1])
msg += '%s%s' % (str(elem.nodes[0]), str(elem.nodes[1]))
raise FloatingPointError(msg)
if load.scale == 'FR': # x1, x2 are fractional lengths
x1 = load.x1
x2 = load.x2
#compute_fx = False
elif load.scale == 'LE': # x1, x2 are actual lengths
x1 = load.x1 / L
x2 = load.x2 / L
elif load.scale == 'LEPR':
model.log.warning('PLOAD1: LEPR continue')
return
#msg = 'scale=%r is not supported. Use "FR", "LE".' % load.scale
#raise NotImplementedError(msg)
elif load.scale == 'FRPR':
model.log.warning('PLOAD1: FRPR continue')
return
#msg = 'scale=%r is not supported. Use "FR", "LE".' % load.scale
#raise NotImplementedError(msg)
else:
msg = 'PLOAD1: scale=%r is not supported. Use "FR", "LE".' % load.scale
raise NotImplementedError(msg)
# FY - force in basic coordinate system
# FR - fractional;
assert x1 <= x2, 'x1=%s x2=%s' % (x1, x2)
if x1 != x2:
# continue
if not load.type in ['FX', 'FY', 'FZ']:
model.log.warning('PLOAD1 x1 != x2 continue; x1=%s x2=%s; scale=%r\n%s%s'% (
x1, x2, load.scale, str(elem), str(load)))
return
model.log.warning('check this...PLOAD1 x1 != x2; x1=%s x2=%s; scale=%r\n%s%s'% (
x1, x2, load.scale, str(elem), str(load)))
# y = (y2-y1)/(x2-x1)*(x-x1) + y1
# y = (y2-y1) * (x-x1)/(x2-x1) + y1
# y = y2*(x-x1)/(x2-x1) + y1*(1-(x-x1)/(x2-x1))
# y = y2 * r + y1 * (1-r)
# r = (x-x1)/(x2-x1)
#
# y = y2 * r + y1 - y1 * r
# yi = y2 * ri + y1 * x + y1 * ri
# yi = y2 * ri + y1 * (x2-x1) + y1 * ri
#
# ri = integral(r)
# ri = 1/(x2-x1) * (0.5) * (x1-x2)**2
#
# yi = integral(y)
# yi = y2 * ri + y1 * (x2-x1) + y1 * ri
# ri = 1./(x2-x1) * (0.5) * (x1-x2)**2
# y1 = p1
# y2 = p2
# yi = y2 * ri + y1 * (x2-x1) + y1 * ri
# F = yi
if allclose(p1, -p2):
Ftotal = p1
x = (x1 + x2) / 2.
else:
Ftotal = L * (x2-x1) * (p1 + p2)/2.
Mx = L * p1 * (x2-x1)/2. + L * (p2-p1) * (2./3. * x2 + 1./3. * x1)
x = Mx / Ftotal
model.log.info('L=%s x1=%s x2=%s p1/L=%s p2/L=%s Ftotal=%s Mtotal=%s x=%s' % (
L, x1, x2, p1, p2, Ftotal, Mx, x))
unused_i = Ldir
if load.Type in ['FX', 'FY', 'FZ']:
r = (1. - x) * n1 + x * n2
#print('r=%s n1=%s n2=%s' % (r, n1, n2))
if load.Type == 'FX':
force_dir = array([1., 0., 0.])
elif load.Type == 'FY':
force_dir = array([0., 1., 0.])
elif load.Type == 'FZ':
force_dir = array([0., 0., 1.])
else:
raise NotImplementedError('Type=%r is not supported. '
'Use "FX", "FY", "FZ".' % load.Type)
Fi = Ftotal * force_dir
Mi = cross(r - p, force_dir * Ftotal)
F += Fi
M += Mi
model.log.info('Fi=%s Mi=%s x=%s' % (Fi, Mi, x))
else:
_bar_eq_pload1(load, elem, xyz, Ldir,
n1, n2,
x1, x2,
p1, p2,
F, M, p)
return
def sum_forces_moments_elements(model: BDF, p0: int, loadcase_id: int,
eids: List[int], nids: List[int],
cid: int=0,
include_grav: bool=False,
xyz_cid0: Optional[Dict[int, NDArray3float]]=None,
) -> Tuple[NDArray3float, NDArray3float]:
"""
Sum the forces/moments based on a list of nodes and elements.
Parameters
----------
model : BDF()
a BDF object
eids : List[int]
the list of elements to include (e.g. the loads due to a PLOAD4)
nids : List[int]
the list of nodes to include (e.g. the loads due to a FORCE card)
p0 : int; (3,) ndarray
the point to sum moments about
type = int
sum moments about the specified grid point
type = (3, ) ndarray/list (e.g. [10., 20., 30]):
the x, y, z location in the global frame
loadcase_id : int
the LOAD=ID to analyze
include_grav : bool; default=False
includes gravity in the summation (not supported)
xyz_cid0 : None / Dict[int] = (3, ) ndarray
the nodes in the global coordinate system
Returns
-------
forces : NUMPY.NDARRAY shape=(3,)
the forces
moments : NUMPY.NDARRAY shape=(3,)
the moments
Nodal Types : FORCE, FORCE1, FORCE2,
MOMENT, MOMENT1, MOMENT2,
PLOAD
Element Types: PLOAD1, PLOAD2, PLOAD4, GRAV
If you have a CQUAD4 (eid=3) with a PLOAD4 (sid=3) and a FORCE
card (nid=5) acting on it, you can incldue the PLOAD4, but
not the FORCE card by using:
For just pressure:
.. code-block:: python
eids = [3]
nids = []
For just force:
.. code-block:: python
eids = []
nids = [5]
or both:
.. code-block:: python
eids = [3]
nids = [5]
.. note:: If you split the model into sections and sum the loads
on each section, you may not get the same result as
if you summed the loads on the total model. This is
due to the fact that nodal loads on the boundary are
double/triple/etc. counted depending on how many breaks
you have.
.. todo:: not done...
"""
if not isinstance(loadcase_id, integer_types):
raise RuntimeError('loadcase_id must be an integer; loadcase_id=%r' % loadcase_id)
p = _get_load_summation_point(model, p0, cid=0)
if eids is None:
eids = list(model.element_ids)
if nids is None:
nids = list(model.node_ids)
#for (key, load_case) in model.loads.items():
#if key != loadcase_id:
#continue
loads, scale_factors, unused_is_grav = model.get_reduced_loads(
loadcase_id, skip_scale_factor0=True)
F = array([0., 0., 0.])
M = array([0., 0., 0.])
xyz = get_xyz_cid0_dict(model, xyz_cid0)
unsupported_types = set()
shell_elements = {
'CTRIA3', 'CQUAD4', 'CTRIAR', 'CQUADR',
'CTRIA6', 'CQUAD8', 'CQUAD', 'CSHEAR'}
skip_loads = {'QVOL'}
for load, scale in zip(loads, scale_factors):
#if load.type not in ['FORCE1']:
#continue
#print(load.type)
loadtype = load.type
if loadtype == 'FORCE':
if load.node_id not in nids:
continue
if load.Cid() != 0:
cp_ref = load.cid_ref
#from pyNastran.bdf.bdf import CORD2R
#cp = CORD2R()
f = load.mag * cp_ref.transform_vector_to_global(load.xyz) * scale
else:
f = load.mag * load.xyz * scale
node = model.Node(load.node_id)
r = xyz[node.nid] - p
m = cross(r, f)
F += f
M += m
elif load.type == 'FORCE1':
not_found_nid = False
for nid in load.node_ids:
if nid not in nids:
not_found_nid = True
break
if not_found_nid:
continue
f = load.mag * load.xyz * scale
node = model.Node(load.node_id)
r = xyz[node.nid] - p
m = cross(r, f)
F += f
M += m
elif load.type == 'FORCE2':
not_found_nid = False
for nid in load.node_ids:
if nid not in nids:
not_found_nid = True
break
if not_found_nid:
continue
f = load.mag * load.xyz * scale
node = model.Node(load.node_id)
r = xyz[node.nid] - p
m = cross(r, f)
F += f
M += m
elif load.type == 'MOMENT':
not_found_nid = False
for nid in load.node_ids:
if nid not in nids:
not_found_nid = True
break
if not_found_nid:
continue
if load.Cid() != 0:
cp_ref = load.cid_ref
m = cp_ref.transform_vector_to_global(load.xyz)
else:
m = load.xyz
M += load.mag * m * scale
elif load.type == 'MOMENT1':
not_found_nid = False
for nid in load.node_ids:
if nid not in nids:
not_found_nid = True
break
if not_found_nid:
continue
m = load.mag * load.xyz * scale
M += m
elif loadtype == 'MOMENT2':
not_found_nid = False
for nid in load.node_ids:
if nid not in nids:
not_found_nid = True
break
if not_found_nid:
continue
m = load.mag * load.xyz * scale
M += m
elif loadtype == 'PLOAD':
nodes = load.node_ids
nnodes = len(nodes)
nodesi = 0
if nnodes == 3:
n1, n2, n3 = xyz[nodes[0]], xyz[nodes[1]], xyz[nodes[2]]
axb = cross(n1 - n2, n1 - n3)
centroid = (n1 + n2 + n3) / 3.
elif nnodes == 4:
n1, n2, n3, n4 = xyz[nodes[0]], xyz[nodes[1]], xyz[nodes[2]], xyz[nodes[3]]
axb = cross(n1 - n3, n2 - n4)
centroid = (n1 + n2 + n3 + n4) / 4.
if nodes[3] in nids:
nodesi += 1
else:
raise RuntimeError('invalid number of nodes on PLOAD card; '
'nodes=%s' % str(nodes))
if nodes[0] in nids:
nodesi += 1
if nodes[1] in nids:
nodesi += 1
if nodes[2] in nids:
nodesi += 1
area, normal = _get_area_normal(axb, nodes, xyz)
r = centroid - p
f = load.pressure * area * normal * scale
m = cross(r, f)
node_scale = nodesi / float(nnodes)
F += f * node_scale
M += m * node_scale
elif loadtype == 'PLOAD1':
_pload1_elements(model, loadcase_id, load, scale, eids, xyz, F, M, p)
elif loadtype == 'PLOAD2':
pressure = load.pressure * scale
for eid in load.element_ids:
if eid not in eids:
continue
elem = model.elements[eid]
if elem.type in shell_elements:
normal = elem.Normal()
area = elem.Area()
f = pressure * normal * area
r = elem.Centroid() - p
m = cross(r, f)
F += f
M += m
else:
#model.log.warning('case=%s etype=%r loadtype=%r not supported' % (
#loadcase_id, elem.type, loadtype))
raise NotImplementedError('case=%s etype=%r loadtype=%r not supported' % (
loadcase_id, elem.type, loadtype))
elif loadtype == 'PLOAD4':
_pload4_elements(loadcase_id, load, scale, eids, xyz, F, M, p)
elif loadtype == 'GRAV':
if include_grav: # this will be super slow
g = load.GravityVector() * scale
for eid, elem in model.elements.items():
if eid not in eids:
continue
centroid = elem.Centroid()
mass = elem.Mass()
r = centroid - p
f = mass * g
m = cross(r, f)
F += f
M += m
elif loadtype in skip_loads:
continue
else:
# we collect them so we only get one print
unsupported_types.add(loadtype)
for loadtype in unsupported_types:
model.log.warning('case=%s loadtype=%r not supported' % (loadcase_id, loadtype))
#model.log.info("case=%s F=%s M=%s\n" % (loadcase_id, F, M))
if cid == 0:
return F, M
cid0 = 0
F2, M2 = transform_load(F, M, cid0, cid, model)
return F2, M2
def _bar_eq_pload1(load, elem, xyz, Ldir,
n1, n2,
x1, x2,
p1, unused_p2,
F, M, p):
"""helper for ``_elements_pload1`` and ``_elementi_pload1``"""
v = elem.get_orientation_vector(xyz)
i = Ldir
ki = cross(i, v)
k = ki / norm(ki)
j = cross(k, i)
if load.Type in ['FX', 'FY', 'FZ']:
#deltaL = n2 - n1
r = (1 - x1) * n1 + x1 * n2
#print(' r =', r)
#print(' n1 =', n1)
#print(' n2 =', n2)
#print(' x1 =', x1)
#print(' 1-x1 =', 1-x1)
#print(' deltaL =', deltaL)
if load.Type == 'FX' and x1 == x2:
force_dir = array([1., 0., 0.])
elif load.Type == 'FY' and x1 == x2:
force_dir = array([0., 1., 0.])
elif load.Type == 'FZ' and x1 == x2:
force_dir = array([0., 0., 1.])
F += p1 * force_dir
M += cross(r - p, F)
elif load.Type in ['MX', 'MY', 'MZ']:
if load.Type == 'MX' and x1 == x2:
moment_dir = array([1., 0., 0.])
elif load.Type == 'MY' and x1 == x2:
moment_dir = array([0., 1., 0.])
elif load.Type == 'MZ' and x1 == x2:
moment_dir = array([0., 0., 1.])
M += p1 * moment_dir
elif load.Type in ['FXE', 'FYE', 'FZE']:
r = (1 - x1) * n1 + x1 * n2
#print('\n r =', r)
#print(' n1 =', n1)
#print(' n2 =', n2)
#print(' x1 =', x1)
#print(' 1-x1 =', 1-x1)
#print(' i =', i)
#print(' j =', j)
#print(' k =', k)
if load.Type == 'FXE' and x1 == x2:
force_dir = i
elif load.Type == 'FYE' and x1 == x2:
force_dir = j
elif load.Type == 'FZE' and x1 == x2:
force_dir = k
#print(' force_dir =', force_dir, load.Type)
try:
F += p1 * force_dir
except FloatingPointError:
msg = 'eid = %s\n' % elem.eid
msg += 'i = %s\n' % Ldir
msg += 'force_dir = %s\n' % force_dir
msg += 'load = \n%s' % str(load)
raise FloatingPointError(msg)
M += cross(r - p, F)
del force_dir
elif load.Type in ['MXE', 'MYE', 'MZE']:
if load.Type == 'MXE' and x1 == x2:
moment_dir = i
elif load.Type == 'MYE' and x1 == x2:
moment_dir = j
elif load.Type == 'MZE' and x1 == x2:
moment_dir = k
try:
M += p1 * moment_dir
except FloatingPointError:
msg = 'eid = %s\n' % elem.eid
msg += 'moment_dir = %s\n' % moment_dir
msg += 'load = \n%s' % str(load)
raise FloatingPointError(msg)
del moment_dir
else:
raise NotImplementedError(
'Type=%r is not supported.\n'
'Use [FX, FXE, FY, FYE, FZ, FZE,\n'
' MX, MXE, MY, MYE, MZ, MZE]' % load.Type)
return F, M
def _pload4_total(loadcase_id, load, scale, xyz, F, M, p):
"""helper method for ``sum_forces_moments``"""
assert load.line_load_dir == 'NORM', 'line_load_dir = %s' % (load.line_load_dir)
for elem in load.eids_ref:
fi, mi = _pload4_helper(loadcase_id, load, scale, elem, xyz, p)
F += fi
M += mi
return F, M
def _pload4_elements(loadcase_id, load, scale, eids, xyz, F, M, p):
"""helper method for ``sum_forces_moments_elements``"""
assert load.line_load_dir == 'NORM', 'line_load_dir = %s' % (load.line_load_dir)
for elem in load.eids_ref:
eid = elem.eid
if eid not in eids:
continue
fi, mi = _pload4_helper(loadcase_id, load, scale, elem, xyz, p)
F += fi
M += mi
return F, M
def _get_pload4_area_centroid_normal_nface(loadcase_id: int, load: PLOAD4, elem, xyz):
"""gets the nodes, area, face_centroid, normal, and nface"""
etype = elem.type
if etype in ['CTRIA3', 'CTRIA6', 'CTRIAR',]:
# triangles
nodes = elem.node_ids
n1, n2, n3 = xyz[nodes[0]], xyz[nodes[1]], xyz[nodes[2]]
axb = cross(n1 - n2, n1 - n3)
area, normal = _get_area_normal(axb, nodes, xyz)
face_centroid = (n1 + n2 + n3) / 3.
nface = 3
elif etype in ['CQUAD4', 'CQUAD8', 'CQUAD', 'CQUADR', 'CSHEAR']:
# quads
nodes = elem.node_ids
n1, n2, n3, n4 = xyz[nodes[0]], xyz[nodes[1]], xyz[nodes[2]], xyz[nodes[3]]
axb = cross(n1 - n3, n2 - n4)
area, normal = _get_area_normal(axb, nodes, xyz)
face_centroid = (n1 + n2 + n3 + n4) / 4.
nface = 4
elif etype == 'CTETRA':
nodes = None
face_acn = elem.get_face_area_centroid_normal(load.g1_ref.nid, load.g34_ref.nid)
unused_face, area, face_centroid, normal = face_acn
nface = 3
elif etype == 'CHEXA':
nodes = None
face_acn = elem.get_face_area_centroid_normal(load.g34_ref.nid, load.g1_ref.nid)
# TODO: backwards?
#face_acn = elem.get_face_area_centroid_normal(load.g1_ref.nid, load.g34_ref.nid)
unused_face, area, face_centroid, normal = face_acn
nface = 4
elif etype == 'CPENTA':
nodes = None
g1 = load.g1_ref.nid
if load.g34 is None:
face_acn = elem.get_face_area_centroid_normal(g1)
nface = 3
else:
face_acn = elem.get_face_area_centroid_normal(g1, load.g34_ref.nid)
nface = 4
unused_face, area, face_centroid, normal = face_acn
elif etype == 'CPYRAM':
#C:\Program Files\Siemens\NX 12.0\NXNASTRAN\nxn12\nast\demo\sslv09c.dat
nodes = None
g1 = load.g1_ref.nid
g3 = load.g34_ref.nid
nids = elem.node_ids[:5]
in1 = nids.index(g1)
in3 = nids.index(g3)
in13 = [in1, in3]
in13.sort()
in13 = tuple(in13)
xyzs = elem.get_node_positions()[:5]
if in13 in [(0, 2), (1, 3)]:
# G1 Identification number of a grid point connected to a corner of
# the face. Required data for solid elements only.
# (Integer > 0 or blank)
# G3 For CHEXA, CPYRAM, or CPENTA quadrilateral faces, G3 is
# the identification number of a grid point connected to a corner
# diagonally opposite to G1. Required for quadrilateral faces of
# CHEXA, CPYRAM and CPENTA elements only.
p1, p2, p3, p4, unused_p5 = xyzs
v31 = p3 - p1
v42 = p4 - p2
normal = np.cross(v31, v42)
face_centroid = (p1 + p2 + p3 + p4) / 4.
nface = 4
elif in13 in [(0, 1), (1, 2), (2, 3), (0, 3)]:
# For CPYRAM element triangle faces, G1 and G3 are adjacent
# corner nodes on the quadrilateral face, and the load is applied
# on the triangular face which includes those grids.
#
# 2
# / \
# 1-----3
p1 = xyzs[in13[0]]
p3 = xyzs[in13[1]]
p2 = xyzs[4] # top node
v21 = p2 - p1 # towards the top
v31 = p3 - p1 # towards the base
normal = np.cross(v21, v31)
face_centroid = (p1 + p2 + p3) / 3.
nface = 3
else:
msg = (
'Invalid CPYRAM faces nodes. Pick either:\n'
' - two opposite nodes on the quad face for pressure on the bottom face\n'
' - two adjacent nodes on the quad face for pressure on the side faces\n\n'
'Do not pick a bottom and the top node for:\n%s' % str(load))
raise RuntimeError(msg)
ni = np.linalg.norm(normal)
normal /= ni
area = 0.5 * ni
# centroid of face
#print('nface=%s ni=%s normal=%s area=%s face_centroid=%s' % (
#nface, ni, normal, area, face_centroid))
else:
eid = elem.eid
msg = 'PLOAD4: case=%s eid=%s etype=%r loadtype=%r not supported\n%s%s' % (
loadcase_id, eid, etype, load.type, str(load), str(elem))
raise NotImplementedError(msg)
return nodes, area, face_centroid, normal, nface
def _pload4_helper(loadcase_id, load, scale, elem, xyz, p):
"""gets the contribution for a single PLOAD4 element"""
#eid = elem.eid
nodes, area, face_centroid, normal, nface = _get_pload4_area_centroid_normal_nface(
loadcase_id, load, elem, xyz)
pressures = load.pressures[:nface]
assert len(pressures) == nface
cid = load.Cid()
if load.surf_or_line == 'SURF':
pressure = _mean_pressure_on_pload4(pressures, load, elem)
load_dir = update_pload4_vector(load, normal, cid)
r = face_centroid - p
fi = pressure * area * load_dir * scale
#load.cid_ref.transform_to_global()
mi = cross(r, fi)
elif load.surf_or_line == 'LINE':
load_dir = update_pload4_vector(load, normal, cid)
fi, mi = _pload4_helper_line(load, load_dir, elem, scale, pressures, nodes, xyz, p)
else: # pragma: no cover
msg = 'surf_or_line=%r on PLOAD4 is not supported\n%s' % (
load.surf_or_line, str(load))
raise NotImplementedError(msg)
return fi, mi
def _get_area_normal(axb, nodes, xyz):
"""gets the area/normal vector"""
nunit = norm(axb)
area = 0.5 * nunit
try:
normal = axb / nunit
except FloatingPointError:
msg = ''
for i, nid in enumerate(nodes):
msg += 'nid%i=%i node=%s\n' % (i+1, nid, xyz[nodes[i]])
msg += 'a x b = %s\n' % axb
msg += 'nunit = %s\n' % nunit
raise FloatingPointError(msg)
return area, normal
def _mean_pressure_on_pload4(pressures, load, unused_elem):
"""gets the mean pressure"""
if min(pressures) != max(pressures):
pressure = mean(pressures)
#msg = ('%s%s\npressure.min=%s != pressure.max=%s using average of %%s; '
#'load=%s eid=%%s' % (str(load), str(elem), min(pressures),
#max(pressures), load.sid))
#print(msg % (pressure, eid))
else:
pressure = load.pressures[0]
return pressure
def _get_load_summation_point(model, p0, cid=0):
"""
helper method
Parameters
----------
model : BDF()
a BDF object
p0 : NUMPY.NDARRAY shape=(3,) or integer (node ID)
the reference point
"""
if isinstance(p0, integer_types):
if cid == 0:
p = model.nodes[p0].get_position()
else:
p = model.nodes[p0].get_position_wrt(model, cid)
else:
p = array(p0)
return p
def _pload4_helper_line(load, load_dir, elem, scale, pressures, nodes, xyz, p):
# this is pressure per unit length?
# edge_length * thickness I assume?
fi = np.zeros(3)
mi = np.zeros(3)
edges = []
if len(pressures) == 4:
p1, p2, p3, p4 = pressures
if p1 or p2:
edges.append((0, 1))
if p2 or p3:
edges.append((1, 2))
if p3 or p4:
edges.append((2, 3))
if p4 or p1:
edges.append((3, 1))
elif len(pressures) == 3:
p1, p2, p3 = pressures
if p1 or p2:
edges.append((0, 1))
if p2 or p3:
edges.append((1, 2))
if p3 or p1:
edges.append((2, 1))
else: # pragma: no cover
raise NotImplementedError(pressures)
thickness = elem.Thickness()
for edge in edges:
inode1, inode2 = edge
ixyz1 = nodes[inode1]
ixyz2 = nodes[inode2]
xyz1 = xyz[ixyz1]
xyz2 = xyz[ixyz2]
p1 = pressures[inode1]
p2 = pressures[inode2]
area_edge = thickness * np.linalg.norm(xyz2 - xyz1)
# TODO: fails on case where p1 and p2 are nan
#The SORL field is ignored by all elements except QUADR and TRIAR. For QUADR or TRIAR
#only, if SORL=LINE, the consistent edge loads are defined by the PLOAD4 entry. P1, P2, P3 and
#P4 are load per unit length at the corner of the element.
#If all four Ps are given, then the line loads along all four edges of the element are defined.
#If any P is blank, then the line loads for only two edges are defined.
#For example, if P1 is blank, the line loads of the two edges connecting to G1 are zero.
#If two Ps are given, then the line load of the edge connecting to the two grid points is defined.
#If only one P is given, the second P value default to the first P value. For example, P1 denotes
#that the line load along edge G1 and G2 has the constant value of P1.
#The direction of the line load (SORL=LINE) is defined by either (CID, N1, N2, N3) or LDIR.
#Fatal error will be issued if both methods are given. TANG denotes that the line load is in
#tangential direction of the edge, pointing from G1 to G2 if the edge is connecting G1 and G2.
#NORM denotes that the line load is in the mean plan, normal to the edge, and pointing outward
#from the element. X, Y, or Z denotes the line load is in the X, Y, or Z direction of the element
#coordinate system.
if isnan(p1):
p1 = p2
if isnan(p2):
p2 = p1
assert abs(p1) >= 0.0, f'pressures={pressures}\n{str(load)}{str(elem)}'
assert abs(p2) >= 0.0, f'pressures={pressures}\n{str(load)}{str(elem)}'
centroid1 = (xyz2 + xyz1) / 2.
if p1 > p2:
dp = p1 - p2
pnominal = p2
centroid2 = (2*xyz1 + xyz2) / 3.
else:
dp = p2 - p1
centroid2 = (2*xyz2 + xyz1) / 3.
pnominal = p1
r1 = centroid1 - p
r2 = centroid2 - p
f1 = pnominal * area_edge * load_dir * scale
f2 = dp * area_edge * load_dir * scale
m1 = cross(r1, f1)
m2 = cross(r2, f2)
fi += f1 + f2
mi += m1 + m2
#assert abs(dp) >= 0.0, dp
#assert f1.max() >= 0.0, f1
#assert f2.max() >= 0.0, f2
#assert m1.max() >= 0.0, m1
#assert m2.max() >= 0.0, m2
#assert fi.max() >= 0, fi
#assert mi.max() >= 0, mi
return fi, mi
def get_static_force_vector_from_subcase_id(model: BDF, subcase_id: int):
"""
solves for F in:
[K]{x} = {F}
"""
load_id, ndof_per_grid, ndof = _get_loadid_ndof(model, subcase_id)
if load_id in model.load_combinations:
loads = model.load_combinations[load_id]
for load in loads:
scale = load.scale
F = np.zeros([ndof], dtype='float64')
for load_id, loads_ref, scalei in zip(load.load_ids, load.load_ids_ref, load.scale_factors):
Fi = _Fg_vector_from_loads(
model, loads_ref, ndof_per_grid, ndof)
F += Fi * (scale * scalei)
# print(load.get_stats())
else:
loads = model.loads[load_id]
F = _Fg_vector_from_loads(model, loads, ndof_per_grid, ndof)
return F
def get_ndof(model: BDF, subcase: Subcase) -> Tuple[int, int, int]:
"""gets the size of the DOFs"""
ndof_per_grid = 6
if 'HEAT' in subcase:
ndof_per_grid = 1
ngrid = model.card_count['GRID'] if 'GRID' in model.card_count else 0
nspoint = len(model.spoints) # if 'SPOINT' in model.card_count else 0
nepoint = len(model.epoints) # if 'EPOINT' in model.card_count else 0
ndof = ngrid * ndof_per_grid + nspoint + nepoint
#print(f'ngrid={ngrid} nspoint={nspoint}')
assert ndof > 0, model.card_count
return ngrid, ndof_per_grid, ndof
def _get_loadid_ndof(model: BDF, subcase_id) -> Tuple[int, int, int]:
"""helper method for ``get_static_force_vector_from_subcase_id``"""
subcase = model.subcases[subcase_id]
load_id = None
if 'LOAD' in subcase:
load_id, unused_options = subcase['LOAD']
unused_ngrid, ndof_per_grid, ndof = get_ndof(model, subcase)
return load_id, ndof_per_grid, ndof
def _get_dof_map(model: BDF) -> Dict[Tuple[int, int], int]:
"""helper method for ``get_static_force_vector_from_subcase_id``"""
i = 0
dof_map = {}
spoints = []
ps = []
for nid, node_ref in model.nodes.items():
if node_ref.type == 'GRID':
for dof in range(1, 7):
dof_map[(nid, dof)] = i
i += 1
for psi in node_ref.ps:
nid_dof = (nid, int(psi))
j = dof_map[nid_dof]
ps.append(j)
elif node_ref.type == 'SPOINT':
spoints.append(node_ref)
#dof_map[(nid, 0)] = i
#i += 1
else:
raise NotImplementedError(node_ref)
# we want the GRID points to be first
assert len(spoints) == 0, spoints
for nid in sorted(model.spoints.keys()):
key = (nid, 0)
if key not in dof_map:
dof_map[key] = i
i += 1
assert len(dof_map) > 0
return dof_map, ps
def _Fg_vector_from_loads(model: BDF, loads, ndof_per_grid: int, ndof: int,
fdtype: str='float64'):
"""helper method for ``get_static_force_vector_from_subcase_id``"""
dof_map, unused_ps = _get_dof_map(model)
Fg = np.zeros([ndof], dtype=fdtype)
skipped_load_types = set([])
not_static_loads = []
show_force_warning = True
for load in loads:
loadtype = load.type
if load.type in ['FORCE', 'MOMENT']:
offset = 1 if load.type[0] == 'F' else 4
show_force_warning = _add_force(Fg, dof_map, model, load, offset, ndof_per_grid, cid=load.cid, show_warning=show_force_warning)
elif load.type in ['FORCE1', 'MOMENT1',
'FORCE2', 'MOMENT2']:
offset = 1 if load.type[0] == 'F' else 4
show_force_warning = _add_force(Fg, dof_map, model, load, offset, ndof_per_grid, cid=0, show_warning=show_force_warning)
elif loadtype == 'SLOAD':
for nid, mag in zip(load.nodes, load.mags):
try:
irow = dof_map[(nid, 0)]
except KeyError:
print('spoints =', model.spoints)
print('dof_map =', dof_map)
raise
Fg[irow] += mag
elif loadtype in not_static_loads:
continue
else:
skipped_load_types.add(load.type)
if skipped_load_types:
skipped_load_types = list(skipped_load_types)
skipped_load_types.sort()
model.log.warning(f'skipping {skipped_load_types} in Fg')
return Fg
def _force_to_local(cd_ref, vector):
#if cd_ref.type[-1] in ['C', 'S']:
return cd_ref.transform_vector_to_local(vector)
#else:
#print(cd_ref)
#asdf
def _add_force(Fg: np.ndarray, dof_map: Dict[Tuple[int, int], int], model: BDF,
load, offset: int, ndof_per_grid: int, cid: int=0, show_warning: bool=True):
"""adds the FORCE/MOMENT loads to Fg"""
#cid = load.cid
nid = load.node
node_ref = load.node_ref
ndofi = ndof_per_grid if node_ref.type == 'GRID' else 1
assert ndofi == 6, f'GRID must have 6 DOF for structural analysis\n{node_ref}'
if node_ref.cd == cid:
fglobal = load.mag * load.xyz
elif node_ref.cd != cid:
fbasic = load.to_global()
if show_warning:
model.log.warning(f'differing cid & cd is not supported; cid={cid} cd={node_ref.cd}')
show_warning = False
cd_ref = node_ref.cd_ref
Tbg = cd_ref.beta()
fglobal = _force_to_local(cd_ref, fbasic)
if 0: # pragma: no cover
if cd_ref.type[-1] in ['C', 'S']:
ex = Tbg[0, :]
ey = Tbg[1, :]
#ez = Tbg[2, :]
xyz_local = node_ref.get_position_wrt(model, node_ref.cd)
if cd_ref.type[-1] == 'C':
theta = radians(xyz_local[1])
ct = cos(theta)
st = sin(theta)
T = np.array([
[ct, -st, 0.],
[st, ct, 0.],
[0., 0., 1.],
])
Tbg = Tbg @ T
else:
from pyNastran.bdf.cards.coordinate_systems import CORD2S
rho, thetad, phid = xyz_local
coord = CORD2S.add_ijk(-1, origin=cd_ref.origin, i=ex, j=ey, k=None, rid=0, comment='')
beta = coord.beta()
Tbg = Tbg @ beta
coord.transform_vector_to_local([rho, thetad, phid])
#theta = radians(xyz_local[1])
#phi = radians(xyz_local[2])
#ct = cos(theta)
#st = sin(theta)
#cp = cos(phi)
#sp = sin(phi)
str(xyz_local)
else:
# rectangular
pass
Tgb = Tbg.T
fglobal = Tgb @ fbasic
else:
raise NotImplementedError(f'node_ref.cd={node_ref.cd} cid={cid} load:\n{str(load)}')
for dof in range(3):
irow = dof_map[(nid, dof+offset)]
Fg[irow] += fglobal[dof]
return show_warning
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,644
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/gui/dev/gui2/load_actions.py
|
from __future__ import annotations
import os
from typing import Optional, TYPE_CHECKING
import time as time_module
import traceback
from qtpy.compat import getopenfilename
from pyNastran.utils import print_bad_path
if TYPE_CHECKING: # pragma: no cover
from pyNastran.gui.gui2 import MainWindow2
IS_TESTING = False
class LoadActions:
"""performance mode should be handled in the main gui to minimize flipping"""
def __init__(self, gui: MainWindow2):
self.gui = gui
@property
def log(self):
"""links the the GUI's log"""
return self.gui.log
def on_load_geometry(self,
infile_name: Optional[str]=None,
geometry_format: Optional[str]=None,
name: str='main',
plot: bool=True,
raise_error: bool=False):
"""
Loads a baseline geometry
Parameters
----------
infile_name : str; default=None -> popup
path to the filename
geometry_format : str; default=None
the geometry format for programmatic loading
name : str; default='main'
the name of the actor; don't use this
plot : bool; default=True
Should the baseline geometry have results created and plotted/rendered?
If you're calling the on_load_results method immediately after, set it to False
raise_error : bool; default=True
stop the code if True
"""
assert isinstance(name, str), 'name=%r type=%s' % (name, type(name))
is_failed, out = self._load_geometry_filename(
geometry_format, infile_name)
print("is_failed =", is_failed)
if is_failed:
return
has_results = False
log = self.log
infile_name, load_function, filter_index, formats, geometry_format2 = out
if load_function is not None:
self.gui.last_dir = os.path.split(infile_name)[0]
if self.gui.name == '':
name = 'main'
else:
print('name = %r' % name)
active_name = self.gui.name
alt_grids = self.gui.alt_grids
grid_mappers = self.gui.grid_mappers
if name != active_name and active_name in grid_mappers:
#scalar_range = self.grid_selected.GetScalarRange()
#self.grid_mapper.SetScalarRange(scalar_range)
grid_mappers[active_name].ScalarVisibilityOff()
#self.grid_mapper.SetLookupTable(self.color_function)
self.gui.name = name
self.gui._reset_model(name)
# reset alt grids
names = self.gui.alt_grids.keys()
for name in names:
alt_grid = alt_grids[name]
alt_grid.Reset()
alt_grid.Modified()
if not os.path.exists(infile_name) and geometry_format:
msg = 'input file=%r does not exist' % infile_name
log.error(msg)
log.error(print_bad_path(infile_name))
return
# clear out old data
if self.gui.model_type is not None:
clear_name = 'clear_' + self.gui.model_type
try:
dy_method = getattr(self, clear_name) # 'self.clear_nastran()'
dy_method()
except Exception:
self.gui.log.error("method %r does not exist" % clear_name)
log.info("reading %s file %r" % (geometry_format, infile_name))
try:
time0 = time_module.time()
if geometry_format2 in self.gui.format_class_map:
# intialize the class
#print('geometry_format=%r geometry_format2=%s' % (geometry_format, geometry_format2))
# TODO: was geometry_format going into this...
cls = self.gui.format_class_map[geometry_format2](self.gui)
function_name2 = 'load_%s_geometry' % geometry_format2
load_function2 = getattr(cls, function_name2)
has_results = load_function2(infile_name, name=name, plot=plot)
else:
has_results = load_function(infile_name, name=name, plot=plot) # self.last_dir,
dt = time_module.time() - time0
print('dt_load = %.2f sec = %.2f min' % (dt, dt / 60.))
#else:
#name = load_function.__name__
#self.log_error(str(args))
#self.log_error("'plot' needs to be added to %r; "
#"args[-1]=%r" % (name, args[-1]))
#has_results = load_function(infile_name) # , self.last_dir
#form, cases = load_function(infile_name) # , self.last_dir
except Exception as error:
#raise
msg = traceback.format_exc()
log.error(msg)
if raise_error or self.gui.dev:
raise
#return
#self.vtk_panel.Update()
self.gui.rend.ResetCamera()
# the model has been loaded, so we enable load_results
if filter_index >= 0:
self.gui.format = formats[filter_index].lower()
unused_enable = has_results
#self.load_results.Enable(enable)
else: # no file specified
return
#print("on_load_geometry(infile_name=%r, geometry_format=None)" % infile_name)
self.gui.infile_name = infile_name
self.gui.out_filename = None
#if self.out_filename is not None:
#msg = '%s - %s - %s' % (self.format, self.infile_name, self.out_filename)
if name == 'main':
msg = '%s - %s' % (self.gui.format, self.gui.infile_name)
self.gui.window_title = msg
self.gui.update_menu_bar()
main_str = ''
else:
main_str = ', name=%r' % name
self.gui.log_command("on_load_geometry(infile_name=%r, geometry_format=%r%s)" % (
infile_name, self.gui.format, main_str))
def _load_geometry_filename(self, geometry_format: str, infile_name: str) -> Tuple[bool, Any]:
"""gets the filename and format"""
wildcard = ''
is_failed = False
if geometry_format and geometry_format.lower() not in self.gui.supported_formats:
is_failed = True
msg = f'The import for the {geometry_format!r} module failed.\n'
self.gui.log_error(msg)
if IS_TESTING: # pragma: no cover
raise RuntimeError(msg)
return is_failed, None
if infile_name:
if geometry_format is None:
is_failed = True
msg = 'infile_name=%r and geometry_format=%r; both must be specified\n' % (
infile_name, geometry_format)
self.gui.log_error(msg)
return is_failed, None
geometry_format = geometry_format.lower()
for fmt in self.gui.fmts:
fmt_name, _major_name, _geom_wildcard, geom_func, res_wildcard, _resfunc = fmt
if geometry_format == fmt_name:
load_function = geom_func
if res_wildcard is None:
unused_has_results = False
else:
unused_has_results = True
break
else:
self.gui.log_error('---invalid format=%r' % geometry_format)
is_failed = True
return is_failed, None
formats = [geometry_format]
filter_index = 0
else:
# load a pyqt window
formats = []
load_functions = []
has_results_list = []
wildcard_list = []
# setup the selectable formats
for fmt in self.gui.fmts:
fmt_name, _major_name, geom_wildcard, geom_func, res_wildcard, _res_func = fmt
formats.append(_major_name)
wildcard_list.append(geom_wildcard)
load_functions.append(geom_func)
if res_wildcard is None:
has_results_list.append(False)
else:
has_results_list.append(True)
assert len(load_functions) > 0, load_functions
# the list of formats that will be selectable in some odd syntax
# that pyqt uses
wildcard = ';;'.join(wildcard_list)
# get the filter index and filename
if infile_name is not None and geometry_format is not None:
filter_index = formats.index(geometry_format)
else:
title = 'Choose a Geometry File to Load'
wildcard_index, infile_name = self.create_load_file_dialog(wildcard, title)
if not infile_name:
# user clicked cancel
is_failed = True
return is_failed, None
filter_index = wildcard_list.index(wildcard_index)
geometry_format = formats[filter_index]
load_function = load_functions[filter_index]
unused_has_results = has_results_list[filter_index]
return is_failed, (infile_name, load_function, filter_index, formats, geometry_format)
def create_load_file_dialog(self, qt_wildcard: str, title: str,
default_filename: Optional[str]=None) -> Tuple[str, str]:
#options = QFileDialog.Options()
#options |= QFileDialog.DontUseNativeDialog
#fname, flt = QFileDialog.getOpenFileName(
#self, title, default_filename, file_types, options=options)
#flt = str(filt).strip()
#return fname, flt
if default_filename is None:
default_filename = self.gui.last_dir
fname, wildcard_level = getopenfilename(
parent=self.gui, caption=title,
basedir=default_filename, filters=qt_wildcard,
selectedfilter='', options=None)
return wildcard_level, fname
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,645
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/bdf/bdf_interface/hdf5_exporter.py
|
"""Defines various helper functions for exporting a HDF5 BDF file"""
from __future__ import annotations
from collections import defaultdict
from typing import List, Any, TYPE_CHECKING
from io import StringIO
import numpy as np
from pyNastran.utils.dict_to_h5py import (
add_list_tuple, integer_types, float_types)
from pyNastran.bdf.bdf_interface.add_card import CARD_MAP
from pyNastran.utils import object_attributes
if TYPE_CHECKING: # pragma: no cover
from pyNastran.bdf.bdf import BDF
# dict[key] : [value1, value2, ...]
dict_int_list_obj_attrs = [
'spcs', 'spcadds',
'mpcs', 'mpcadds',
'loads', 'load_combinations',
'dloads', 'dload_entries',
#'usets', # has string keys
'nsms', 'nsmadds',
'frequencies',
'bcs', 'transfer_functions',
'dvgrids',
# parametric
'pval',
]
# dict[key] : value
dict_int_obj_attrs = [
# are handled explictly----
#'elements',
#'nodes',
#'coords',
#'materials',
#'properties',
#'masses',
#'tables',
#'methods',
#'creep_materials', 'csschds',
#'flutters',
#'gusts',
#'trims',
#'plotels',
'MATS1', 'MATS3', 'MATS8', 'MATT1', 'MATT2', 'MATT3', 'MATT4', 'MATT5', 'MATT8', 'MATT9',
# TODO: don't work
#'reject_count',
'dresps',
# optimization
'dconadds', 'ddvals', 'dequations', 'dscreen',
'dvcrels', 'dvmrels', 'dvprels',
# aero
'aecomps', 'aefacts', 'aelists', 'aeparams',
'aestats', 'aesurf', 'aesurfs',
'divergs', 'dlinks',
'flfacts', 'paeros',
# contact
'bconp', 'bcrparas', 'bctadds',
'bctparas', 'bctsets', 'blseg', 'bsurf', 'bsurfs',
'bgadds', 'bgsets', 'bctparms',
'bfric',
# other
'ao_element_flags', 'cMethods',
'convection_properties',
'dareas',
'dmig', 'dmiji', 'dmij', 'dmik', 'dmi', 'dmiax',
'dti',
'dphases', 'delays',
'epoints', 'gridb',
'nlparms', 'nlpcis',
'normals',
'nxstrats',
'pbusht', 'pdampt', 'pelast', 'phbdys', 'points',
'properties_mass',
'radcavs', 'radmtx', 'random_tables',
'rotors',
'sets',
'spcoffs',
'spoints',
'suport1',
'tables_d', 'tables_m', 'tables_sdamping', 'tempds',
'tics',
'tstepnls', 'tsteps',
'view3ds', 'views',
# superelements
'csuper', 'csupext',
'se_sets', 'se_usets', 'sebndry', 'sebulk', 'seconct', 'seelt',
'seexcld', 'selabel', 'seload', 'seloc', 'sempln', 'senqset', 'setree',
'release',
# axisymmetric
'ringaxs', 'ringfl',
# parametric
'pset', 'gmcurv', 'feedge', 'feface', 'gmsurf',
# cyclic
'cyjoin',
]
scalar_obj_keys = [
# required----
'aero', 'aeros', 'axic', 'axif', 'cyax', 'baror', 'beamor',
'acmodl', 'modtrak',
'doptprm',
'dtable', 'grdset', 'radset', 'seqgp',
'case_control_deck',
#'zona',
]
scalar_keys = [
# handled separately----
#'cards_to_read',
# basic types----
'bdf_filename',
'_auto_reject', '_encoding', '_iparse_errors', '_is_axis_symmetric', '_is_cards_dict',
'_is_dynamic_syntax', '_is_long_ids', '_ixref_errors', '_nastran_format', '_nparse_errors',
'_nxref_errors', '_sol', '_stop_on_duplicate_error', '_stop_on_parsing_error',
'_stop_on_xref_error',
'_xref', 'active_filename',
'dumplines', 'echo', 'force_echo_off', 'include_dir',
'is_msc', 'is_nx', 'punch',
'read_includes', 'save_file_structure', 'sol', 'sol_iline', 'nastran_format',
'is_superelements', 'is_zona', 'sol_method', 'debug',
#'_unique_bulk_data_cards',
#'is_bdf_vectorized',
#'is_long_ids',
# not sure----
#'nid_cp_cd', 'xyz_cid0',
# removed-----
#'ncaeros', 'ncoords', 'nnodes', 'nelements', 'nproperties', 'nmaterials',
#'point_ids', 'wtmass', 'log',
]
LIST_KEYS = [
# handled in minor_attributes----
#'active_filenames', 'executive_control_lines', 'case_control_lines',
#'system_command_lines',
#'reject_cards',
# required------------------
#'initial_superelement_models',
# maybe...
#'_duplicate_coords', '_duplicate_elements', '_duplicate_masses', '_duplicate_materials',
'_duplicate_nodes', '_duplicate_properties',
'_duplicate_thermal_materials', '_stored_parse_errors',
#'_stored_xref_errors',
#'units', 'xyz_limits',
# removed
#'coord_ids', 'element_ids', 'material_ids', 'node_ids', 'property_ids', 'caero_ids',
#'special_cards',
]
LIST_OBJ_KEYS = [ ## TODO: not done
# TODO: required
'asets', 'bsets', 'csets', 'omits', 'qsets',
'mkaeros',
'monitor_points',
'suport',
'se_bsets', 'se_csets', 'se_qsets', 'se_suport',
]
def export_bdf_to_hdf5_file(hdf5_file, model: BDF, exporter=None):
"""
Converts the BDF objects into hdf5 object
Parameters
----------
hdf5_file : H5File()
an h5py object
exporter : HDF5Exporter; default=None
unused
"""
unused_attrs = object_attributes(model, mode='both', keys_to_skip=None,
filter_properties=True)
encoding = model.get_encoding()
if 'GRID' in model.card_count:
model.log.debug('exporting nodes')
node_group = hdf5_file.create_group('nodes')
grid_group = node_group.create_group('GRID')
nids = model._type_to_id_map['GRID']
if len(nids) == 0:
assert len(model.nodes) == 0, len(model.nodes)
CARD_MAP['GRID'].export_to_hdf5(grid_group, model, nids)
_hdf5_export_group(hdf5_file, model, 'coords', encoding, debug=False)
_hdf5_export_elements(hdf5_file, model, encoding)
# explicit groups
#
# these are broken down by card type
# they came from dict_int_obj_attrs
groups_to_export = [
'properties', 'masses', 'rigid_elements', 'plotels',
# materials
'materials', 'thermal_materials', 'creep_materials', 'hyperelastic_materials',
#'MATS1',
#'MATT1', 'MATT2', 'MATT3', 'MATT4', 'MATT5', 'MATT8', 'MATT9',
# aero
'caeros', 'splines', 'flutters', 'trims', 'csschds', 'gusts',
# other
'methods', 'tables', 'desvars', 'topvar',
]
for group_name in groups_to_export:
_hdf5_export_group(hdf5_file, model, group_name, encoding)
unused_dict_int_attrs = [ # TODO: not used...
# required
'_dmig_temp',
'include_filenames',
'superelement_models',
'values_to_skip',
# removed
#'rsolmap_to_str',
#'nid_map',
#'subcases',
]
_export_dict_int_obj_attrs(model, hdf5_file, encoding)
_export_dict_int_list_obj_attrs(model, hdf5_file, encoding)
_export_dconstrs(hdf5_file, model, encoding)
#for key in scalar_obj_keys:
#value = getattr(model, key)
#hdf5_file.create_dataset(key, value)
if model.params:
model.log.debug('exporting params')
skip_attrs = ['comment', '_field_map']
group = hdf5_file.create_group('params')
for key, param in model.params.items():
_h5_export_class(group, model, key, param, skip_attrs, encoding, debug=False)
if model.mdlprm:
model.log.debug('exporting params')
skip_attrs = ['comment', '_field_map']
group = hdf5_file.create_group('mdlprm')
model.mdlprm.export_to_hdf5(group, model, encoding)
if model.aelinks:
model.log.debug('exporting aelinks')
skip_attrs = ['comment', '_field_map']
group = hdf5_file.create_group('aelinks')
for aelink_id, aelinks in model.aelinks.items():
groupi = group.create_group(str(aelink_id))
for j, aelinki in enumerate(aelinks):
key = str(j)
_h5_export_class(groupi, model, key, aelinki, skip_attrs, encoding, debug=False)
if model.usets:
model.log.debug('exporting usets')
skip_attrs = ['comment', '_field_map']
group = hdf5_file.create_group('usets')
for name, usets in model.usets.items():
groupi = group.create_group(name)
#print(usets)
for i, uset in enumerate(usets):
#print(uset.get_stats())
key = str(i)
_h5_export_class(groupi, model, key, uset, skip_attrs, encoding, debug=False)
_export_scalar_group(hdf5_file, model, encoding)
skip_attrs = ['comment', '_field_map']
for key in scalar_obj_keys:
value = getattr(model, key)
if value is None:
#print('None: %s %s' % (key, value))
pass
else:
model.log.debug('exporting %s' % key)
_h5_export_class(hdf5_file, model, key, value, skip_attrs, encoding, debug=False)
_export_list_keys(model, hdf5_file, LIST_KEYS)
_export_list_obj_keys(model, hdf5_file, LIST_OBJ_KEYS, encoding)
cards_to_read = [key.encode(encoding) for key in list(model.cards_to_read)]
cards_to_read = list(cards_to_read)
cards_to_read.sort()
hdf5_file.create_dataset('cards_to_read', data=cards_to_read)
#dict_keys2 = []
#list_keys2 = []
#other_keys2 = []
#for key in attrs:
#value = getattr(model, key)
#if isinstance(value, dict):
#dict_keys2.append(key)
#elif isinstance(value, list):
#list_keys2.append(key)
#else:
#other_keys2.append(key)
#print('dict_keys2 = %s' % (set(dict_keys) - set(dict_keys2)))
#print('list_keys2 = %s' % (set(list_keys) - set(list_keys2)))
#print('other_keys2 = %s' % (set(other_keys) - set(other_keys2)))
#asd
def _export_dconstrs(hdf5_file, model: BDF, encoding):
"""exports the dconstrs, which includes DCONSTRs and DCONADDs"""
if model.dconstrs:
dconstrs_group = hdf5_file.create_group('dconstrs')
unused_keys = list(model.dconstrs.keys())
dconstrsi = []
dconadds = []
for unused_key, dconstrs in model.dconstrs.items():
#group = dconstrs_group.create_group(str(key))
for dconstr in dconstrs:
Type = dconstr.type
if Type == 'DCONSTR':
dconstrsi.append(dconstr)
elif Type == 'DCONADD':
dconadds.append(dconstr)
ndconstrs = len(dconstrsi)
if ndconstrs:
dconstr_group = dconstrs_group.create_group('DCONSTR')
unused_keys = np.arange(ndconstrs, dtype='int32')
dconstr0 = dconstrsi[0]
dconstr0.export_to_hdf5(dconstr_group, dconstrsi, encoding)
ndconstrs = len(dconstrsi)
ndconadds = len(dconadds)
if ndconadds:
dconadd_group = dconstrs_group.create_group('DCONADD')
unused_keys = np.arange(ndconadds, dtype='int32')
dconadds0 = dconadds[0]
dconadds0.export_to_hdf5(dconadd_group, dconadds, encoding)
def _export_scalar_group(hdf5_file, model: BDF, encoding):
scalar_group = _export_minor_attributes(hdf5_file, model, encoding)
for key in ['case_control_lines', 'executive_control_lines',
'system_command_lines', 'active_filenames']:
# these are exported to the minor_attributes group
list_str = getattr(model, key)
if not len(list_str):
continue
list_bytes = [line.encode(encoding) for line in list_str]
#print(scalar_group)
scalar_group.create_dataset(key, data=list_bytes)
if len(model.reject_lines):
reject_group = scalar_group.create_group('reject_lines')
for i, list_str in enumerate(model.reject_lines):
list_bytes = [line.encode(encoding) for line in list_str]
reject_group.create_dataset(str(i), data=list_bytes)
if len(model.reject_cards):
reject_group = scalar_group.create_group('reject_cards')
for i, reject_card in enumerate(model.reject_cards):
fields = reject_card.fields()
list_bytes = [field.encode(encoding) if field is not None else b''
for field in fields]
reject_group.create_dataset(str(i), data=list_bytes)
def _export_minor_attributes(hdf5_file, model: BDF, encoding):
"""
Minor atributes include:
- encoding
- include_dir
- is_enddata
- is_msc
- is_nx
- punch
- read_includes
- active_filenames
- bdf_filename
- executive_control_lines
- case_control_lines
- sol
- sol_iline
"""
scalar_group = None
scalars_keys_to_analyze = []
for key in scalar_keys:
if hasattr(model, key):
scalars_keys_to_analyze.append(key)
if scalars_keys_to_analyze:
scalar_group = hdf5_file.create_group('minor_attributes')
scalar_group.create_dataset('encoding', data=encoding)
for key in sorted(scalars_keys_to_analyze):
value = getattr(model, key)
#model.log.debug(' minor %s' % key)
if value is None:
continue
#print('None: %s %s' % (key, value))
#elif isinstance(value, bool):
#print('bool: %s %s' % (key, value))
elif isinstance(value, (integer_types, float_types, str, np.ndarray)):
try:
scalar_group.create_dataset(key, data=value)
except TypeError: # pragma: no cover
print('key=%r value=%s type=%s' % (key, str(value), type(value)))
raise
#elif isinstance(value, set):
#add_list_tuple(hdf5_file, key, value, 'set', model.log)
elif isinstance(value, StringIO):
pass
elif isinstance(value, bytes):
assert len(value) > 0, key
value_bytes = np.void(value)
scalar_group.create_dataset(key, data=value_bytes)
else:
#print(key, value)
scalar_group.create_dataset(key, data=value)
#del scalar_group
scalar_group.create_dataset('is_enddata', data='ENDDATA' in model.card_count)
return scalar_group
def _export_dict_int_obj_attrs(model: BDF, hdf5_file, encoding):
unused_cards = set(list(CARD_MAP.keys()))
for attr in dict_int_obj_attrs:
dict_obj = getattr(model, attr)
#print(attr, dict_obj)
if not len(dict_obj):
continue
#model.log.info(attr)
try:
group = hdf5_file.create_group(attr) # 'gusts'
except ValueError: # pragma: no cover
model.log.error('cant create %r' % attr)
raise
_hdf5_export_object_dict(group, model, attr, dict_obj, dict_obj.keys(), encoding)
def _export_dict_int_list_obj_attrs(model, hdf5_file, encoding):
for attr in dict_int_list_obj_attrs:
dict_obj = getattr(model, attr) # spcs
if not len(dict_obj):
continue
model.log.debug('exporting %s' % attr)
try:
group = hdf5_file.create_group(attr) # 'spcs'
except ValueError: # pragma: no cover
model.log.error('cant create %r' % attr)
raise
keys = list(dict_obj.keys())
keys.sort()
#model.log.debug('keys = %s' % keys)
if attr in ['dmig', 'dmij', 'dmi', 'dmik', 'dmiji', 'dmiax']:
#print('keys =', keys)
key0 = keys[0]
value = dict_obj[key0]
group.attrs['type'] = value.type
#print('setting type', group, value.type)
model.log.debug('type = %s' % value.type)
model.log.debug('export 364')
value.export_to_hdf5(group, model, encoding)
return
group.create_dataset('keys', data=keys)
for spc_id, spcs_obj in sorted(dict_obj.items()):
id_group = group.create_group(str(spc_id))
card_types = defaultdict(list)
for spc in spcs_obj:
card_types[spc.type].append(spc)
for card_type, spcs in card_types.items():
card_group = id_group.create_group(card_type)
class_obj = spcs[0]
if hasattr(class_obj, 'export_to_hdf5'):
class_obj.export_to_hdf5(card_group, model, spcs)
else:
indices = list(range(len(spcs)))
_hdf5_export_object_dict(card_group, model,
'%s/id=%s/%s' % (attr, spc_id, card_type),
spcs, indices, encoding)
def _export_list_keys(model: BDF, hdf5_file, list_keys):
for attr in list_keys:
#print('list_key: %s' % attr)
list_obj = getattr(model, attr) # active_filenames
if not len(list_obj):
continue
#model.log.info(attr)
#try:
#group = hdf5_file.create_group(attr) # 'active_filenames'
#except ValueError:
#model.log.error('cant create %r' % attr)
#raise
if isinstance(list_obj, list):
Type = 'list'
elif isinstance(list_obj, tuple):
Type = 'tuple'
else:
raise NotImplementedError(type(list_obj))
#indices = list(range(len(list_keys)))
#group.create_dataset('keys', data=keys)
if isinstance(list_obj[0], bytes):
raise RuntimeError(list_obj[0])
if isinstance(list_obj[0], (int, float, str)):
try:
add_list_tuple(hdf5_file, attr, list_obj, Type, model.log)
except TypeError: # pragma: no cover
print(list_obj)
raise
#elif isinstance(list_obj[0], list):
#group = hdf5_file.create_group(attr)
#group.attrs['type'] = Type
#for keyi, valuei in enumerate(list_obj):
##sub_group = hdf5_file.create_group(str(keyi))
##group
#add_list_tuple(group, str(keyi), valuei, Type, model.log)
else:
raise NotImplementedError(type(list_obj[0]))
#_hdf5_export_object_dict(group, model, attr, list_obj, indices, encoding)
def _export_list_obj_keys(model: BDF, hdf5_file, list_obj_keys, encoding):
for attr in list_obj_keys:
list_obj = getattr(model, attr) # active_filenames
if not len(list_obj):
#model.log.debug('skipping list_key: %s' % attr)
continue
#model.log.debug('exporting %s' % attr)
try:
group = hdf5_file.create_group(attr) # 'active_filenames'
except ValueError: # pragma: no cover
model.log.error('cant create %r' % attr)
raise
#if isinstance(list_obj, list):
#Type = 'list'
#elif isinstance(list_obj, tuple):
#Type = 'tuple'
#else:
#raise NotImplementedError(type(list_obj))
indices = list(range(len(list_obj)))
#group.create_dataset('keys', data=indices)
#try:
#add_list_tuple(hdf5_file, attr, list_obj, Type, model.log)
#except TypeError:
#print(list_obj)
#raise
_hdf5_export_object_dict(group, model, attr, list_obj, indices, encoding)
def _h5_export_class(sub_group: Any, model: BDF, key: str, value: Any,
skip_attrs: List[str], encoding: str, debug: bool=True) -> None:
#model.log.debug('exporting %s to hdf5' % key)
#sub_groupi = sub_group.create_group('values')
class_group = sub_group.create_group(str(key))
try:
class_group.attrs['type'] = value.type
except Exception: # pragma: no cover
print('key = %r' % key)
print('value', value)
model.log.error('ERROR: key=%s value=%s' % (key, value))
raise
#if hasattr(value, 'get_h5attrs'):
#getattrs
if hasattr(value, 'export_to_hdf5'):
#print('value =', value, type(value))
#print('class_group', class_group)
#model.log.debug(' export 477 - %s' % class_group)
value.export_to_hdf5(class_group, model, encoding)
return
elif hasattr(value, 'object_attributes'):
keys_to_skip = []
if hasattr(value, '_properties'):
keys_to_skip = value._properties
h5attrs = value.object_attributes(mode='both', keys_to_skip=keys_to_skip,
filter_properties=True)
if hasattr(value, '_properties'):
h5attrs.remove('_properties')
#sub_group = hdf5_file.create_group(key)
else:
raise NotImplementedError(value)
#if hasattr(value, '_properties'):
#print(value.type, value._properties)
#if debug:
#print(h5attrs)
#for prop in value._properties:
#try:
#h5attrs.remove(prop)
#except Exception:
#print('cant remove %s' % prop)
#print(value)
#raise
#h5attrs.remove('_properties')
if debug:
model.log.debug(value)
for h5attr in h5attrs:
if '_ref' in h5attr or h5attr in skip_attrs:
continue
class_value = getattr(value, h5attr)
if class_value is None:
continue
#model.log.info('%s %s %s' % (key, h5attr, class_value))
if debug:
model.log.info('%s %s %s' % (key, h5attr, class_value))
if isinstance(class_value, dict):
class_group.attrs['type'] = 'dict'
param_group = class_group.create_group(h5attr)
keysi = []
valuesi = []
for i, (keyi, valuei) in enumerate(class_value.items()):
keysi.append(keyi)
valuesi.append(valuei)
#if isinstance(valuei, str):
#param_group.create_dataset(str(i), data=valuei.encode('ascii'))
#elif valuei is None:
#param_group.create_dataset(str(i), data=np.nan)
#else:
#param_group.create_dataset(str(i), data=valuei)
model.log.debug(' exporting dict as keys/values for %s (%s)' % (h5attr, value.type))
_export_list(param_group, '%s/%s/%s' % (value.type, key, h5attr),
'keys', keysi, encoding)
_export_list(param_group, '%s/%s/%s' % (value.type, key, h5attr),
'values', valuesi, encoding)
continue
elif isinstance(class_value, (list, np.ndarray)):
if len(class_value) == 0: # empty list
continue
is_nones = []
for class_valuei in class_value:
is_none = False
if class_valuei is None: # PAERO2 : lth
is_none = True
#break
is_nones.append(is_none)
elif isinstance(class_value, (integer_types, float_types, str, bool)):
is_nones = [False]
#elif isinstance(class_value, dict) and len(class_value) == 0:
#pass
else:
raise NotImplementedError('%s %s; class_value=%s type=%s' % (
getattr(value, 'type'), key, class_value, type(class_value)))
is_none = any(is_nones)
if all(is_nones):
model.log.warning('skipping %s attribute: %s %s %s' % (
value.type, key, h5attr, class_value))
elif all([not is_nonei for is_nonei in is_nones]): # no Nones
# no Nones
try:
class_group.create_dataset(h5attr, data=class_value)
except ValueError: # pragma: no cover
print(h5attr, class_group)
raise
except TypeError:
# contains unicode
class_group.attrs['type'] = 'list'
param_group = class_group.create_group(h5attr)
for i, valuei in enumerate(class_value):
if isinstance(valuei, str):
param_group.create_dataset(str(i), data=valuei.encode('ascii'))
else:
param_group.create_dataset(str(i), data=valuei)
#if isinstance(class_value, list):
#print('type(value[0] =', class_value, type(class_value[0]))
#raise
else:
# mixed Nones and values
class_group.attrs['type'] = 'list'
param_group = class_group.create_group(h5attr)
for i, valuei in enumerate(class_value):
if isinstance(valuei, str):
param_group.create_dataset(str(i), data=valuei.encode('ascii'))
elif valuei is None:
param_group.create_dataset(str(i), data=np.nan)
else:
param_group.create_dataset(str(i), data=valuei)
#raise RuntimeError('mixed %s attribute: %s %s %s' % (
#value.type, key, h5attr, class_value))
#assert isinstance(key, int), 'key=%s value=%s' % (key, value)
if isinstance(value, list):
raise NotImplementedError('list: %s' % value)
#for valuei in value:
#if valuei.type not in cards:
#msg = 'key=%s type=%s value=%s=' % (key, valuei.type, value)
#print(msg)
#continue
#if attr in ['elements']:
#continue
#if value.type not in cards:
#msg = 'key=%s type=%s value=%s=' % (key, value.type, value)
#print(msg)
#def _export_lists(h5_group, attr, name, values, encoding):
#print(name, attr, values)
def _export_list(h5_group, attr, name, values, encoding):
"""
exports a list of:
- constant type to a dataset
- variable type to a numbered list
"""
values2 = [value.encode(encoding) if isinstance(value, str) else value
for value in values]
types = {type(value) for value in values}
if len(types) == 1:
#print('types =', types)
#if isinstance(values[0], list):
#return _export_lists(h5_group, attr, name, values, encoding)
if not isinstance(values[0], (integer_types, float_types, str)):
raise TypeError('not a base type; %s; %s' % (attr, values2))
try:
h5_group.create_dataset(name, data=values2)
except TypeError: # pragma: no cover
print(attr, name, values2)
raise
else:
sub_group = h5_group.create_group(name)
sub_group.attrs['type'] = 'list'
for i, value in enumerate(values2):
if value is None:
sub_group.create_dataset(str(i), data=np.nan)
else:
try:
sub_group.create_dataset(str(i), data=value)
except TypeError: # pragma: no cover
print(attr, name, values2, i)
raise
#print('%s2' % name, values2)
#h5_group.create_dataset(name, data=values2)
#raise
def _hdf5_export_elements(hdf5_file, model: BDF, encoding):
"""
exports the elements to an hdf5_file
TODO: not done
"""
etypes_actual = []
if len(model.elements) == 0:
return
etypes = model._slot_to_type_map['elements']
for card_name in model.card_count:
#CTRIA3, CQUAD4
#CONROD
#CBUSH
#CBEAM
#CPENTA, CHEXA
if card_name in etypes:
#model.log.debug(card_name)
etypes_actual.append(card_name)
continue
if etypes_actual:
elements_group = hdf5_file.create_group('elements')
def save_solids(etype, slot_name):
element_group = elements_group.create_group(etype)
eids = model._type_to_id_map[etype]
CARD_MAP[slot_name].export_to_hdf5(element_group, model, eids)
solids = [
('CTETRA', 'CTETRA4'),
('CPENTA', 'CPENTA6'),
('CPYRAM', 'CPYRAM5'),
('CHEXA', 'CHEXA20'),
]
for card_name, slot_name in solids:
if card_name in model.card_count:
save_solids(card_name, slot_name)
etypes_actual.remove(card_name)
for card_type in sorted(etypes_actual):
element_group = elements_group.create_group(card_type)
eids = model._type_to_id_map[card_type]
#print(card_type, eids)
if len(eids) == 0:
continue
#for eid in eids:
#elem = model.elements[eid]
#print(elem)
class_obj = CARD_MAP[card_type]
if hasattr(class_obj, 'export_to_hdf5'):
class_obj.export_to_hdf5(element_group, model, eids)
else:
_hdf5_export_object_dict(element_group, model, card_type,
model.elements, eids, encoding)
def _hdf5_export_group(hdf5_file, model: BDF, group_name, encoding, debug=False):
"""
exports the properties to an hdf5_file
"""
data_dict = getattr(model, group_name) # model.properties
#if group_name in ['splines']:
#debug = True
if debug:
model.log.debug('%s %s' % (group_name, data_dict))
types_actual = []
types = model._slot_to_type_map[group_name]
if debug:
model.log.debug('card_count = %s' % model.card_count)
model.log.debug('types = %s' % types)
for card_name in types:
#PSHELL
if card_name in model.card_count:
types_actual.append(card_name)
continue
if types_actual:
model.log.debug('exporting %s' % group_name)
if debug: # pragma: no cover
print('types_actual =', types_actual)
group = hdf5_file.create_group(group_name)
for card_type in types_actual:
sub_group = group.create_group(card_type)
ids = model._type_to_id_map[card_type]
if debug: # pragma: no cover
print(ids)
assert len(ids) > 0, '%s : %s' % (card_type, ids)
class_obj = CARD_MAP[card_type]
#print(class_obj)
if hasattr(class_obj, 'export_to_hdf5'):
class_obj.export_to_hdf5(sub_group, model, ids)
else:
_hdf5_export_object_dict(sub_group, model, card_type, data_dict, ids, encoding)
#else:
#model.log.debug('skipping %s to hdf5' % group_name)
def _hdf5_export_object_dict(group, model: BDF, name, obj_dict, keys, encoding):
#i = 0
skip_attrs = ['comment', '_field_map']
keys_write = list(keys)
if name in ['dmig', 'dmij', 'dmik', 'dmiji', 'dmiax', 'dmi', 'dresps']:
keys = list(keys)
#print(group)
key0 = keys_write[0]
value = obj_dict[key0]
group.attrs['type'] = value.type
#print('group...', group)
model.log.debug('exporting %s' % name)
value.export_to_hdf5(group, model, encoding)
return
if isinstance(keys_write[0], str):
keys_write = list([key.encode(encoding) if isinstance(key, str) else key
for key in list(keys_write)])
sub_group = group.create_group('values')
assert isinstance(name, str), 'name=%s; type=%s' % (name, type(name))
for key in keys:
value = obj_dict[key]
#if isinstance(value, str):
#value = value.encode(encoding)
#try:
_h5_export_class(sub_group, model, key, value, skip_attrs, encoding, debug=False)
#except Exception: # pragma: no cover
#raise
# for debugging
#sub_group2 = group.create_group('values2')
#_h5_export_class(sub_group2, model, key, value, skip_attrs, encoding, debug=True)
#i += 1
#group.attrs['type'] = class_name
#print('%s keys = %s' % (name, keys))
try:
group.create_dataset('keys', data=keys_write)
except TypeError: # pragma: no cover
print('name =', name)
print('encoding =', encoding)
print('keys =', keys)
raise
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,646
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/gui/gui_common.py
|
# coding: utf-8
# pylint: disable=W0201,C0301
import os.path
from math import ceil
from collections import OrderedDict
from typing import Tuple, List, Dict, Optional, Callable, Any
import numpy as np
from cpylog import SimpleLogger
from pyNastran.gui.qt_version import qt_version
from qtpy import QtCore, QtGui #, API
from qtpy.QtWidgets import (
QMessageBox, QWidget,
QMainWindow, QDockWidget, QFrame, QHBoxLayout, QAction, QToolBar, QMenu, QToolButton)
import vtk
import pyNastran
#print('qt_version = %r' % qt_version)
# vtk makes poor choices regarding the selection of a backend and has no way
# to work around it
#from vtk.qt5.QVTKRenderWindowInteractor import QVTKRenderWindowInteractor
from .qt_files.QVTKRenderWindowInteractor import QVTKRenderWindowInteractor
from pyNastran.utils import check_path
from pyNastran.utils.numpy_utils import integer_types
#from .qt_files.gui_attributes import IS_MATPLOTLIB, IS_CUTTING_PLANE
from .qt_files.gui_vtk_common import GuiVTKCommon
from .qt_files.scalar_bar import ScalarBar
from .gui_objects.alt_geometry_storage import AltGeometry
from .menus.menus import (
on_set_modify_groups, Group,
Sidebar,
ApplicationLogWidget,
PythonConsoleWidget)
from .menus.legend.write_gif import (
setup_animation, update_animation_inputs, write_gif, make_two_sided)
from .utils.vtk.animation_callback import AnimationCallback
from .utils.vtk.base_utils import numpy_to_vtk_idtype
try:
from cpylog.html_utils import str_to_html
except ImportError:
import warnings
warnings.warn('upgrade your cpylog to v1.4')
from .utils.html_utils import str_to_html
#from pyNastran.gui.menus.multidialog import MultiFileDialog
from pyNastran.gui.formats import CLASS_MAP
# http://pyqt.sourceforge.net/Docs/PyQt5/multiinheritance.html
class GuiCommon(QMainWindow, GuiVTKCommon):
"""this class adds in interactive/menu capability into the GUI"""
def __init__(self, **kwds):
"""
fmt_order, html_logging, inputs, parent=None,
"""
# this will reset the background color/label color if things break
#super(QMainWindow, self).__init__(self)
if qt_version == 'pyqt5':
super(GuiCommon, self).__init__(**kwds)
elif qt_version == 'pyside2':
QMainWindow.__init__(self)
GuiVTKCommon.__init__(self, **kwds)
else: #: pragma: no cover
raise NotImplementedError(qt_version)
self.format_class_map = CLASS_MAP
fmt_order = kwds['fmt_order']
inputs = kwds['inputs']
#self.app = inputs['app']
#del inputs['app']
if inputs['log'] is not None:
html_logging = False
else:
html_logging = kwds['html_logging']
del kwds['html_logging']
#-----------------------------------------------------------------------
self._active_background_image = None
self.reset_settings = False
self.fmts = fmt_order
self.base_window_title = f'pyNastran v{pyNastran.__version__}'
#defaults
self.wildcard_delimited = 'Delimited Text (*.txt; *.dat; *.csv)'
# initializes tools/checkables
self.set_tools()
self.html_logging = html_logging
self.execute_python = True
self.scalar_bar = ScalarBar(self.legend_obj.is_horizontal_scalar_bar)
# in,lb,s
self.input_units = ['', '', ''] # '' means not set
self.display_units = ['', '', '']
#self.recent_files = []
#def dragEnterEvent(self, e):
#print(e)
#print('drag event')
#if e.mimeData().hasFormat('text/plain'):
#e.accept()
#else:
#e.ignore()
#def dropEvent(self, e):
#print(e)
#print('drop event')
def init_ui(self):
"""
Initialize user iterface
+--------------+
| Window Title |
+--------------+----------------+
| Menubar |
+-------------------------------+
| Toolbar |
+---------------------+---------+
| | |
| | |
| | Results |
| VTK Frame | Dock |
| | |
| | |
+---------------------+---------+
| |
| HTML Logging Dock |
| |
+-------------------------------+
"""
#self.resize(1100, 700)
self.statusBar().showMessage('Ready')
# windows title and aplication icon
self.setWindowTitle('Statusbar')
if self._logo is not None:
self.setWindowIcon(QtGui.QIcon(self._logo))
self.window_title = self.base_window_title
#=========== Results widget ===================
self.res_dock = QDockWidget('Results', self)
self.res_dock.setObjectName('results_obj')
#self.res_widget = QtGui.QTextEdit()
#self.res_widget.setReadOnly(True)
#self.res_dock.setWidget(self.res_widget)
self.res_widget = Sidebar(
self,
include_case_spinner=True,
include_deflection_scale=False,
include_vector_scale=False,
)
#self.res_widget.update_results(data)
#self.res_widget.setWidget(sidebar)
self.res_dock.setWidget(self.res_widget)
self.addDockWidget(QtCore.Qt.RightDockWidgetArea, self.res_dock)
self.create_log_python_docks()
#===============================================
self.run_vtk = True
if self.run_vtk:
self._create_vtk_objects()
self._build_menubar()
#self._hide_menubar()
if self.run_vtk:
self.build_vtk_frame()
#compassRepresentation = vtk.vtkCompassRepresentation()
#compassWidget = vtk.vtkCompassWidget()
#compassWidget.SetInteractor(self.vtk_interactor)
#compassWidget.SetRepresentation(compassRepresentation)
#compassWidget.EnabledOn()
def create_log_python_docks(self):
"""
Creates the
- HTML Log dock
- Python Console dock
"""
#=========== Logging widget ===================
if self.html_logging is True:
self.log_dock_widget = ApplicationLogWidget(self)
self.log_widget = self.log_dock_widget.log_widget
self.addDockWidget(QtCore.Qt.BottomDockWidgetArea, self.log_dock_widget)
else:
self.log_widget = self.log
if self.execute_python:
self.python_dock_widget = PythonConsoleWidget(self)
self.python_dock_widget.setObjectName('python_console')
self.addDockWidget(QtCore.Qt.BottomDockWidgetArea, self.python_dock_widget)
def _on_execute_python_button(self, clear=False):
"""executes the docked python console"""
try:
enter_data = self.python_dock_widget.enter_data
except Exception as error:
self.log_error(str(error))
self.log_error('problem getting enter_data from python console')
return
txt = str(enter_data.toPlainText()).rstrip()
is_passed = self._execute_python_code(txt)
if is_passed and clear:
enter_data.clear()
def set_tools(self,
tools: List[Tuple[str, str, str, Optional[str], str, Callable]]=None,
checkables: Optional[Dict[str, bool]]=None):
"""Creates the GUI tools"""
if checkables is None:
checkables = {
# name, is_checked
'show_info' : True,
'show_debug' : True,
'show_command' : True,
'show_warning' : True,
'show_error' : True,
'anti_alias_0' : True,
'anti_alias_1' : False,
'anti_alias_2' : False,
'anti_alias_4' : False,
'anti_alias_8' : False,
'rotation_center' : False,
'measure_distance' : False,
'probe_result' : False,
'highlight_cell' : False,
'highlight_node' : False,
'area_pick' : False,
'highlight' : False,
'zoom' : False,
}
if tools is None:
file_tools = [
('exit', '&Exit', 'texit.png', 'Ctrl+Q', 'Exit application', self.closeEvent),
('reload', 'Reload Model...', 'treload.png', '', 'Remove the model and reload the same geometry file', self.on_reload),
('load_geometry', 'Load &Geometry...', 'load_geometry.png', 'Ctrl+O', 'Loads a geometry input file', self.on_load_geometry),
('load_results', 'Load &Results...', 'load_results.png', 'Ctrl+R', 'Loads a results file', self.on_load_results),
('load_csv_user_geom', 'Load CSV User Geometry...', '', None, 'Loads custom geometry file', self.on_load_user_geom),
('load_csv_user_points', 'Load CSV User Points...', 'user_points.png', None, 'Loads CSV points', self.on_load_csv_points),
('load_custom_result', 'Load Custom Results...', '', None, 'Loads a custom results file', self.on_load_custom_results),
('script', 'Run Python Script...', 'python48.png', None, 'Runs pyNastranGUI in batch mode', self.on_run_script),
]
tools = file_tools + [
# labels
('label_clear', 'Clear Current Labels', '', 'CTRL+W', 'Clear current labels', self.clear_labels),
('label_reset', 'Clear All Labels', '', None, 'Clear all labels', self.reset_labels),
# view
('wireframe', 'Wireframe Model', 'twireframe.png', 'w', 'Show Model as a Wireframe Model', self.on_wireframe),
('surface', 'Surface Model', 'tsolid.png', 's', 'Show Model as a Surface Model', self.on_surface),
('screenshot', 'Take a Screenshot...', 'tcamera.png', 'CTRL+I', 'Take a Screenshot of current view', self.tool_actions.on_take_screenshot),
# geometry
# Geometry:
# - Create
# - Modify
('geometry', 'Geometry', 'geometry.png', None, 'Geometry', self.geometry_obj.show),
#
# core menus
('legend', 'Modify Legend...', 'legend.png', 'CTRL+L', 'Set Legend', self.legend_obj.set_legend_menu),
('animation', 'Create Animation...', 'animation.png', 'CTRL+A', 'Create Animation', self.legend_obj.set_animation_menu),
('clipping', 'Set Clipping...', '', None, 'Set Clipping', self.clipping_obj.set_clipping_menu),
('set_preferences', 'Preferences...', 'preferences.png', 'CTRL+P', 'Set GUI Preferences', self.preferences_obj.set_preferences_menu),
('geo_properties', 'Edit Geometry Properties...', '', 'CTRL+E', 'Change Model Color/Opacity/Line Width', self.edit_geometry_properties_obj.edit_geometry_properties),
('map_element_fringe', 'Map Element Fringe', '', 'CTRL+F', 'Map Elemental Centroidal Fringe Result to Nodes', self.map_element_centroid_to_node_fringe_result),
#('axis', 'Show/Hide Axis', 'axis.png', None, 'Show/Hide Global Axis', self.on_show_hide_axes),
# groups
('modify_groups', 'Modify Groups...', '', None, 'Create/Edit/Delete Groups', self.on_set_modify_groups),
('create_groups_by_visible_result', 'Create Groups By Visible Result', '', None, 'Create Groups', self.create_groups_by_visible_result),
('create_groups_by_property_id', 'Create Groups By Property ID', '', None, 'Create Groups', self.create_groups_by_property_id),
#('create_list', 'Create Lists through Booleans', '', None, 'Create List', self.create_list),
# logging
('show_info', 'Show INFO', 'show_info.png', None, 'Show "INFO" messages', self.on_show_info),
('show_debug', 'Show DEBUG', 'show_debug.png', None, 'Show "DEBUG" messages', self.on_show_debug),
('show_command', 'Show COMMAND', 'show_command.png', None, 'Show "COMMAND" messages', self.on_show_command),
('show_warning', 'Show WARNING', 'show_warning.png', None, 'Show "COMMAND" messages', self.on_show_warning),
('show_error', 'Show ERROR', 'show_error.png', None, 'Show "COMMAND" messages', self.on_show_error),
# zoom
('magnify', 'Magnify', 'plus_zoom.png', 'm', 'Increase Magnfication', self.on_increase_magnification),
('shrink', 'Shrink', 'minus_zoom.png', 'Shift+M', 'Decrease Magnfication', self.on_decrease_magnification),
# rotation
('rotate_clockwise', 'Rotate Clockwise', 'tclock.png', 'o', 'Rotate Clockwise', self.on_rotate_clockwise),
('rotate_cclockwise', 'Rotate Counter-Clockwise', 'tcclock.png', 'Shift+O', 'Rotate Counter-Clockwise', self.on_rotate_cclockwise),
#('cell_pick', 'Cell Pick', '', 'c', 'Centroidal Picking', self.on_cell_picker),
#('node_pick', 'Node Pick', '', 'n', 'Nodal Picking', self.on_node_picker),
# help
('website', 'Open pyNastran Website...', '', None, 'Open the pyNastran website', self.open_website),
('docs', 'Open pyNastran Docs Website...', '', None, 'Open the pyNastran documentation website', self.open_docs),
('report_issue', 'Report a Bug/Feature Request...', '', None, 'Open the pyNastran issue tracker', self.open_issue),
('discussion_forum', 'Discussion Forum Website...', '', None, 'Open the discussion forum to ask questions', self.open_discussion_forum),
('about', 'About pyNastran GUI...', 'tabout.png', 'CTRL+H', 'About pyNastran GUI and help on shortcuts', self.about_dialog),
# camera
('view', 'Camera View', 'view.png', None, 'Load the camera menu', self.camera_obj.set_camera_menu),
('camera_reset', 'Reset Camera View', 'trefresh.png', 'r', 'Reset the camera view to default', self.on_reset_camera),
# results
('cycle_results', 'Cycle Results', 'cycle_results.png', 'L', 'Changes the result case', self.on_cycle_results),
('rcycle_results', 'Cycle Results', 'rcycle_results.png', 'K', 'Changes the result case', self.on_rcycle_results),
# view actions
('back_view', 'Back View', 'back.png', 'x', 'Flips to +X Axis', lambda: self.view_actions.update_camera('+x')),
('right_view', 'Right View', 'right.png', 'y', 'Flips to +Y Axis', lambda: self.view_actions.update_camera('+y')),
('top_view', 'Top View', 'top.png', 'z', 'Flips to +Z Axis', lambda: self.view_actions.update_camera('+z')),
('front_view', 'Front View', 'front.png', 'Shift+X', 'Flips to -X Axis', lambda: self.view_actions.update_camera('-x')),
('left_view', 'Left View', 'left.png', 'Shift+Y', 'Flips to -Y Axis', lambda: self.view_actions.update_camera('-y')),
('bottom_view', 'Bottom View', 'bottom.png', 'Shift+Z', 'Flips to -Z Axis', lambda: self.view_actions.update_camera('-z')),
('edges', 'Show/Hide Edges', 'tedges.png', 'e', 'Show/Hide Model Edges', self.on_flip_edges),
('edges_black', 'Color Edges', '', 'b', 'Set Edge Color to Color/Black', self.on_set_edge_visibility),
('anti_alias_0', 'Off', '', None, 'Disable Anti-Aliasing', lambda: self.on_set_anti_aliasing(0)),
('anti_alias_1', '1x', '', None, 'Set Anti-Aliasing to 1x', lambda: self.on_set_anti_aliasing(1)),
('anti_alias_2', '2x', '', None, 'Set Anti-Aliasing to 2x', lambda: self.on_set_anti_aliasing(2)),
('anti_alias_4', '4x', '', None, 'Set Anti-Aliasing to 4x', lambda: self.on_set_anti_aliasing(4)),
('anti_alias_8', '8x', '', None, 'Set Anti-Aliasing to 8x', lambda: self.on_set_anti_aliasing(8)),
# mouse buttons
('rotation_center', 'Set the rotation center', 'trotation_center.png', 'f', 'Pick a node for the rotation center/focal point', self.mouse_actions.on_rotation_center),
('measure_distance', 'Measure Distance', 'measure_distance.png', None, 'Measure the distance between two nodes', self.mouse_actions.on_measure_distance),
('highlight_cell', 'Highlight Cell', '', None, 'Highlight a single cell', self.mouse_actions.on_highlight_cell),
('highlight_node', 'Highlight Node', '', None, 'Highlight a single node', self.mouse_actions.on_highlight_node),
('probe_result', 'Probe', 'tprobe.png', None, 'Probe the displayed result', self.mouse_actions.on_probe_result),
('quick_probe_result', 'Quick Probe', '', 'p', 'Probe the displayed result', self.mouse_actions.on_quick_probe_result),
('zoom', 'Zoom', 'zoom.png', None, 'Zoom In', self.mouse_actions.on_zoom),
# font size
('font_size_increase', 'Increase Font Size', 'text_up.png', 'Ctrl+Plus', 'Increase Font Size', self.on_increase_font_size),
('font_size_decrease', 'Decrease Font Size', 'text_down.png', 'Ctrl+Minus', 'Decrease Font Size', self.on_decrease_font_size),
# picking
('area_pick', 'Area Pick', 'tarea_pick.png', None, 'Get a list of nodes/elements', self.mouse_actions.on_area_pick),
('highlight', 'Highlight', 'thighlight.png', None, 'Highlight a list of nodes/elements', self.mouse_actions.on_highlight),
('highlight_nodes_elements', 'Highlight', 'thighlight.png', None, 'Highlight a list of nodes/elements', self.highlight_obj.set_menu),
('mark_nodes_elements', 'Mark', 'tmark.png', None, 'Mark a list of nodes/elements', self.mark_obj.set_menu),
]
if hasattr(self, 'cutting_plane_obj'):
tools.append(('cutting_plane', 'Cutting Plane...', 'cutting_plane.png', None, 'Create Cutting Plane', self.cutting_plane_obj.set_cutting_plane_menu))
if 'nastran' in self.fmts:
tools += [
('caero', 'Show/Hide CAERO Panels', '', None, 'Show/Hide CAERO Panel Outlines', self.toggle_caero_panels),
('caero_subpanels', 'Toggle CAERO Subpanels', '', None, 'Show/Hide CAERO Subanel Outlines', self.toggle_caero_sub_panels),
('conm2', 'Toggle CONM2s', '', None, 'Show/Hide CONM2s', self.toggle_conms),
('min', 'Min', '', None, 'Show/Hide Min Label', self.show_hide_min_actor),
('max', 'Max', '', None, 'Show/Hide Max Label', self.show_hide_max_actor),
]
self.tools = tools
self.checkables = checkables
def keyPressEvent(self, qkey_event):
#print('qkey_event =', qkey_event.key())
super(GuiCommon, self).keyPressEvent(qkey_event)
def _create_menu_bar(self, menu_bar_order: Optional[List[str]]=None):
self.menu_bar_oder = menu_bar_order
if menu_bar_order is None:
menu_bar_order = ['menu_file', 'menu_view', 'menu_window', 'menu_help']
for key in menu_bar_order:
if key == 'menu_file':
self.menu_file = self.menubar.addMenu('&File')
elif key == 'menu_view':
self.menu_view = self.menubar.addMenu('&View')
elif key == 'menu_window':
self.menu_window = self.menubar.addMenu('&Window')
elif key == 'menu_help':
self.menu_help = self.menubar.addMenu('&Help')
elif isinstance(key, tuple):
attr_name, name = key
submenu = self.menubar.addMenu(name)
setattr(self, attr_name, submenu)
else:
raise NotImplementedError(key)
# always last
self.menu_hidden = self.menubar.addMenu('&Hidden')
self.menu_hidden.menuAction().setVisible(False)
def _create_menu_items(self, actions=None, create_menu_bar=True, menu_bar_order=None):
if actions is None:
actions = self.actions
if create_menu_bar:
self._create_menu_bar(menu_bar_order=menu_bar_order)
scripts = []
if self._script_path is not None and os.path.exists(self._script_path):
scripts = [script for script in os.listdir(self._script_path) if '.py' in script]
scripts = tuple(scripts)
#if 0:
#print('script_path =', script_path)
#print('scripts =', scripts)
#self.menu_scripts = self.menubar.addMenu('&Scripts')
#for script in scripts:
#fname = os.path.join(script_path, script)
#tool = (script, script, 'python48.png', None, '',
#lambda: self.on_run_script(fname) )
#tools.append(tool)
#else:
self.menu_scripts = None
menu_window = ['toolbar', 'reswidget']
menu_view = [
'screenshot', '', 'wireframe', 'surface', 'camera_reset', '',
'set_preferences', #'cutting_plane',
'',
'label_clear', 'label_reset', '',
'legend', 'animation', 'geo_properties',
#['Anti-Aliasing', 'anti_alias_0', 'anti_alias_1', 'anti_alias_2',
#'anti_alias_4', 'anti_alias_8',],
]
if self.is_groups:
menu_view += ['modify_groups', 'create_groups_by_property_id',
'create_groups_by_visible_result']
menu_view += [
'', 'clipping', #'axis',
'edges', 'edges_black',]
if self.html_logging:
self.actions['log_dock_widget'] = self.log_dock_widget.toggleViewAction()
self.actions['log_dock_widget'].setStatusTip("Show/Hide application log")
menu_view += ['', 'show_info', 'show_debug', 'show_command', 'show_warning', 'show_error']
menu_window += ['log_dock_widget']
if self.execute_python:
self.actions['python_dock_widget'] = self.python_dock_widget.toggleViewAction()
self.actions['python_dock_widget'].setStatusTip("Show/Hide Python Console")
menu_window += ['python_dock_widget']
menu_file = [
'load_geometry', 'load_results', '',
'load_custom_result', '',
'load_csv_user_points', 'load_csv_user_geom', 'script', '', 'exit']
toolbar_tools = [
'reload', 'load_geometry', 'load_results',
'front_view', 'back_view', 'top_view', 'bottom_view',
'left_view', 'right_view',
'magnify', 'shrink', 'zoom',
'rotate_clockwise', 'rotate_cclockwise',
'rotation_center', 'measure_distance', 'probe_result',
#'highlight_cell', 'highlight_node',
'area_pick', 'highlight_nodes_elements', 'mark_nodes_elements',
'wireframe', 'surface', 'edges']
toolbar_tools += [
'camera_reset', 'view', 'screenshot', 'min', 'max', 'map_element_fringe',
'', # 'exit'
]
hidden_tools = ('cycle_results', 'rcycle_results',
'font_size_increase', 'font_size_decrease', 'highlight')
menu_items = OrderedDict()
if create_menu_bar:
menu_items['file'] = (self.menu_file, menu_file)
menu_items['view'] = (self.menu_view, menu_view)
menu_items['main'] = (self.menu_window, menu_window)
menu_items['help'] = (self.menu_help, ('website', 'docs', 'report_issue', 'discussion_forum', 'about',))
menu_items['scripts'] = (self.menu_scripts, scripts)
menu_items['toolbar'] = (self.toolbar, toolbar_tools)
menu_items['hidden'] = (self.menu_hidden, hidden_tools)
return menu_items
def _hide_menubar(self) -> None:
self.toolbar.setVisible(False)
#self.menuBar.setVisible(False)
def _build_menubar(self) -> None:
## toolbar
self.toolbar = self.addToolBar('Show toolbar')
self.toolbar.setObjectName('main_toolbar')
# the dummy toolbar stores actions but doesn't get shown
# in other words, it can set shortcuts
#self._dummy_toolbar = self.addToolBar('Dummy toolbar')
#self._dummy_toolbar.setObjectName('dummy_toolbar')
self.menubar = self.menuBar()
actions = self._prepare_actions(self._icon_path, self.tools, self.checkables)
action_names = list(self.actions.keys())
action_names.sort()
#print("self.actions =", action_names)
#for plugin in self.plugins:
menu_items = self._create_menu_items(actions)
self._populate_menu(menu_items)
self.actions['show_info'].setChecked(self.settings.show_info)
self.actions['show_debug'].setChecked(self.settings.show_debug)
self.actions['show_command'].setChecked(self.settings.show_command)
self.actions['show_warning'].setChecked(self.settings.show_warning)
self.actions['show_error'].setChecked(self.settings.show_error)
def _populate_menu(self, menu_items: Dict[str, Tuple[Any, Any]],
actions=None) -> None:
"""populate menus and toolbar"""
assert isinstance(menu_items, dict), menu_items
if actions is None:
actions = self.actions
for unused_menu_name, (menu, items) in menu_items.items():
if menu is None:
continue
for item in items:
if not item:
menu.addSeparator()
else:
if isinstance(item, list):
unused_sub_menu_name = item[0]
if isinstance(menu, QToolBar):
populate_sub_qtoolbar(menu, item, actions)
elif isinstance(menu, QMenu):
populate_sub_qmenu(menu, item, actions)
else:
raise TypeError(menu)
continue
elif not isinstance(item, str):
raise RuntimeError('what is this...action item() = %r' % item())
try:
action = self.actions[item] #if isinstance(item, str) else item()
except Exception:
keysi = list(self.actions.keys())
self.log.error(str(keysi))
raise
menu.addAction(action)
#self._create_plane_from_points(None)
def _update_menu(self, menu_items):
assert isinstance(menu_items, dict), menu_items
for unused_name, (menu, unused_items) in menu_items.items():
menu.clear()
self._populate_menu(menu_items)
#def _create_plane_from_points(self, points):
#origin, vx, vy, vz, x_limits, y_limits = self._fit_plane(points)
## We create a 100 by 100 point plane to sample
#splane = vtk.vtkPlaneSource()
#plane = splane.GetOutput()
#dx = max(x_limits) - min(x_limits)
#dy = max(y_limits) - min(y_limits)
#dx = 1.
#dy = 3.
## we need to offset the origin of the plane because the "origin"
## is at the lower left corner of the plane and not the centroid
#offset = (dx * vx + dy * vy) / 2.
#origin -= offset
#splane.SetCenter(origin)
#splane.SetNormal(vz)
## Point 1 defines the x-axis and the x-size
## Point 2 defines the y-axis and the y-size
#splane.SetPoint1(origin + dx * vx)
#splane.SetPoint2(origin + dy * vy)
#actor = vtk.vtkLODActor()
#mapper = vtk.vtkPolyDataMapper()
##mapper.InterpolateScalarsBeforeMappingOn()
##mapper.UseLookupTableScalarRangeOn()
#mapper.SetInput(plane)
#actor.GetProperty().SetColor(1., 0., 0.)
#actor.SetMapper(mapper)
#self.rend.AddActor(actor)
#splane.Update()
#def _fit_plane(self, points):
#origin = np.array([34.60272856552356, 16.92028913186242, 37.805958003209184])
#vx = np.array([1., 0., 0.])
#vy = np.array([0., 1., 0.])
#vz = np.array([0., 0., 1.])
#x_limits = [-1., 2.]
#y_limits = [0., 1.]
#return origin, vx, vy, vz, x_limits, y_limits
def _prepare_actions(self, icon_path: str, tools, checkables=None):
"""
Prepare actions that will be used in application in a way
that's independent of the menus & toolbar
"""
self._prepare_actions_helper(icon_path, tools, self.actions,
checkables=checkables)
self.actions['toolbar'] = self.toolbar.toggleViewAction()
self.actions['toolbar'].setStatusTip('Show/Hide application toolbar')
self.actions['reswidget'] = self.res_dock.toggleViewAction()
self.actions['reswidget'].setStatusTip('Show/Hide results selection')
return self.actions
def _prepare_actions_helper(self, icon_path: str, tools, actions, checkables=None):
"""
Prepare actions that will be used in application in a way
that's independent of the menus & toolbar
"""
if checkables is None:
checkables = []
for tool in tools:
(name, txt, icon, shortcut, tip, func) = tool
if name in actions:
self.log_error('trying to create a duplicate action %r' % name)
continue
if icon is None:
print(f'missing_icon = {name!r}!!!')
ico = None
else:
ico = QtGui.QIcon()
pth = os.path.join(icon_path, icon)
ico.addPixmap(QtGui.QPixmap(pth), QtGui.QIcon.Normal, QtGui.QIcon.Off)
if name in checkables:
is_checked = checkables[name]
actions[name] = QAction(ico, txt, self, checkable=True)
actions[name].setChecked(is_checked)
else:
actions[name] = QAction(ico, txt, self)
if shortcut:
actions[name].setShortcut(shortcut)
#actions[name].setShortcutContext(QtCore.Qt.WidgetShortcut)
if tip:
actions[name].setStatusTip(tip)
if func:
actions[name].triggered.connect(func)
def _logg_msg(self, log_type: str, filename: str, lineno: int, msg: str) -> None:
"""
Add message to log widget trying to choose right color for it.
Parameters
----------
log_type : str
{DEBUG, INFO, ERROR, COMMAND, WARNING} or prepend 'GUI '
filename : str
the active file
lineno : int
line number
msg : str
message to be displayed
"""
if not self.html_logging:
# standard logger
name = '%-8s' % (log_type + ':')
filename_n = '%s:%s' % (filename, lineno)
msg2 = ' %-28s %s\n' % (filename_n, msg)
print(name, msg2)
return
if 'DEBUG' in log_type and not self.settings.show_debug:
return
elif 'INFO' in log_type and not self.settings.show_info:
return
elif 'COMMAND' in log_type and not self.settings.show_command:
return
elif 'WARNING' in log_type and not self.settings.show_warning:
return
elif 'ERROR' in log_type and not self.settings.show_error:
return
if log_type in ['GUI ERROR', 'GUI COMMAND', 'GUI DEBUG', 'GUI INFO', 'GUI WARNING']:
log_type = log_type[4:] # drop the GUI
html_msg = str_to_html(log_type, filename, lineno, msg)
if self.performance_mode or self.log_widget is None:
self._log_messages.append(html_msg)
else:
self._log_msg(html_msg)
def _log_msg(self, msg: str) -> None:
"""prints an HTML log message"""
self.log_mutex.lockForWrite()
text_cursor = self.log_widget.textCursor()
end = text_cursor.End
text_cursor.movePosition(end)
text_cursor.insertHtml(msg)
self.log_widget.ensureCursorVisible() # new message will be visible
self.log_mutex.unlock()
def log_info(self, msg: str) -> None:
""" Helper funtion: log a message msg with a 'INFO:' prefix """
if msg is None:
msg = 'msg is None; must be a string'
return self.log.simple_msg(msg, 'GUI ERROR')
return self.log.simple_msg(msg, 'GUI INFO')
def log_debug(self, msg: str) -> None:
""" Helper funtion: log a message msg with a 'DEBUG:' prefix """
if msg is None:
msg = 'msg is None; must be a string'
return self.log.simple_msg(msg, 'GUI ERROR')
return self.log.simple_msg(msg, 'GUI DEBUG')
def log_command(self, msg: str) -> None:
""" Helper funtion: log a message msg with a 'COMMAND:' prefix """
if msg is None:
msg = 'msg is None; must be a string'
return self.log.simple_msg(msg, 'GUI ERROR')
return self.log.simple_msg(msg, 'GUI COMMAND')
def log_error(self, msg: str) -> None:
""" Helper funtion: log a message msg with a 'GUI ERROR:' prefix """
if msg is None:
msg = 'msg is None; must be a string'
return self.log.simple_msg(msg, 'GUI ERROR')
return self.log.simple_msg(msg, 'GUI ERROR')
def log_warning(self, msg: str) -> None:
""" Helper funtion: log a message msg with a 'WARNING:' prefix """
if msg is None:
msg = 'msg is None; must be a string'
return self.log.simple_msg(msg, 'GUI ERROR')
return self.log.simple_msg(msg, 'GUI WARNING')
def on_escape_null(self) -> None:
"""
The default state for Escape key is nothing.
"""
pass
def on_escape(self) -> None:
"""
Escape key should cancel:
- on_rotation_center
TODO: not done...
"""
pass
#def remove_picker(self):
#self.vtk_interactor.
def set_node_picker(self) -> None:
self.vtk_interactor.SetPicker(self.node_picker)
def set_cell_picker(self) -> None:
self.vtk_interactor.SetPicker(self.cell_picker)
def set_background_image(self, image_filename: str='GeologicalExfoliationOfGraniteRock.jpg'):
"""adds a background image"""
if not os.path.exists(image_filename):
return
#image_reader = vtk.vtkJPEGReader()
#image_reader = vtk.vtkPNGReader()
#image_reader = vtk.vtkTIFFReader()
#image_reader = vtk.vtkBMPReader()
#image_reader = vtk.vtkPostScriptReader() # doesn't exist?
has_background_image = self._active_background_image is not None
self._active_background_image = image_filename
#if has_background_image:
#self.image_reader.Delete()
image_reader = get_image_reader(image_filename)
if not image_reader.CanReadFile(image_filename):
print(f'Error reading file {image_filename}')
return
image_reader.SetFileName(image_filename)
image_reader.Update()
image_data = image_reader.GetOutput()
self.image_reader = image_reader
if has_background_image:
self.image_actor.SetInputData(image_data)
self.Render()
return
# Create an image actor to display the image
self.image_actor = vtk.vtkImageActor()
self.image_actor.SetInputData(image_data)
self.background_rend = vtk.vtkRenderer()
self.background_rend.SetLayer(0)
self.background_rend.InteractiveOff()
self.background_rend.AddActor(self.image_actor)
self.rend.SetLayer(1)
render_window = self.vtk_interactor.GetRenderWindow()
render_window.SetNumberOfLayers(2)
render_window.AddRenderer(self.background_rend)
# Set up the background camera to fill the renderer with the image
origin = image_data.GetOrigin()
spacing = image_data.GetSpacing()
extent = image_data.GetExtent()
camera = self.background_rend.GetActiveCamera()
camera.ParallelProjectionOn()
xcentroid = origin[0] + 0.5 * (extent[0] + extent[1]) * spacing[0]
ycentroid = origin[1] + 0.5 * (extent[2] + extent[3]) * spacing[1]
#xd = (extent[1] - extent[0] + 1) * spacing[0]
yd = (extent[3] - extent[2] + 1) * spacing[1]
distance = camera.GetDistance()
camera.SetParallelScale(0.5 * yd)
camera.SetFocalPoint(xcentroid, ycentroid, 0.0)
camera.SetPosition(xcentroid, ycentroid, distance)
def _create_vtk_objects(self):
"""creates some of the vtk objects"""
#Frame that VTK will render on
self.vtk_frame = QFrame()
# can't build an interactor without a GUI (for testing)
self.vtk_interactor = QVTKRenderWindowInteractor(parent=self.vtk_frame)
#self.vtk_interactor = PyNastranRenderWindowInteractor(parent=self.vtk_frame)
#self.set_anti_aliasing(2)
#self._camera_event_name = 'LeftButtonPressEvent'
self.mouse_actions.setup_mouse_buttons(mode='default')
def build_vtk_frame(self):
"""uses the vtk objects to set up the window (frame)"""
vtk_hbox = QHBoxLayout()
vtk_hbox.setContentsMargins(2, 2, 2, 2)
vtk_hbox.addWidget(self.vtk_interactor)
self.vtk_frame.setLayout(vtk_hbox)
self.vtk_frame.setFrameStyle(QFrame.NoFrame | QFrame.Plain)
# this is our main, 'central' widget
self.setCentralWidget(self.vtk_frame)
#=============================================================
# +-----+-----+
# | | |
# | A | B |
# | | |
# +-----+-----+
# xmin, xmax, ymin, ymax
nframes = 1
#nframes = 2
if nframes == 2:
# xmin, ymin, xmax, ymax
frame1 = [0., 0., 0.5, 1.0]
frame2 = [0.5, 0., 1., 1.0]
#frames = [frame1, frame2]
self.rend.SetViewport(*frame1)
self.vtk_interactor.GetRenderWindow().AddRenderer(self.rend)
if nframes == 2:
rend = vtk.vtkRenderer()
rend.SetViewport(*frame2)
self.vtk_interactor.GetRenderWindow().AddRenderer(rend)
self.set_background_image()
self.vtk_interactor.GetRenderWindow().Render()
#self.load_nastran_geometry(None, None)
#for cid, axes in self.axes.items():
#self.rend.AddActor(axes)
self.add_geometry()
if nframes == 2:
rend.AddActor(self.geom_actor)
# initialize geometry_actors
self.geometry_actors['main'] = self.geom_actor
# bar scale set so you can't edit the bar scale
white = (255, 255, 255)
geom_props = AltGeometry(
self, 'main', color=white, line_width=1, opacity=1.0, point_size=1,
bar_scale=0.0, representation='main', is_visible=True)
self.geometry_properties['main'] = geom_props
#self.addAltGeometry()
self.rend.GetActiveCamera().ParallelProjectionOn()
self.rend.SetBackground(*self.settings.background_color)
#self.rend.SetBackground2(*self.background_color2)
self.rend.ResetCamera()
self.mouse_actions.set_style_as_trackball()
self._build_vtk_frame_post()
def on_reset_camera(self):
self.log_command('on_reset_camera()')
self._simulate_key_press('r')
self.vtk_interactor.Render()
def on_flip_edges(self):
"""turn edges on/off"""
self.is_edges = not self.is_edges
self.edge_actor.SetVisibility(self.is_edges)
# cart3d edge color isn't black...
#self.edge_actor.GetProperty().SetColor(0, 0, 0)
self.edge_actor.Modified()
#self.widget.Update()
#self._update_camera()
self.Render()
#self.refresh()
self.log_command('on_flip_edges()')
def on_set_edge_visibility(self):
#self.edge_actor.SetVisibility(self.is_edges_black)
self.is_edges_black = not self.is_edges_black
if self.is_edges_black:
prop = self.edge_actor.GetProperty()
prop.EdgeVisibilityOn()
self.edge_mapper.SetLookupTable(self.color_function_black)
else:
prop = self.edge_actor.GetProperty()
prop.EdgeVisibilityOff()
self.edge_mapper.SetLookupTable(self.color_function)
self.edge_actor.Modified()
prop.Modified()
self.vtk_interactor.Render()
self.log_command('on_set_edge_visibility()')
#---------------------------------------------------------------------
# groups
def get_all_eids(self):
"""get the list of all the element IDs"""
return self.element_ids
#name, result = self.get_name_result_data(0)
#if name != 'ElementID':
#name, result = self.get_name_result_data(1)
#assert name == 'ElementID', name
#return result
def show_eids(self, eids):
"""shows the specified element IDs"""
all_eids = self.get_all_eids()
# remove eids that are out of range
eids = np.intersect1d(all_eids, eids)
# update for indices
ishow = np.searchsorted(all_eids, eids)
#eids_off = np.setdiff1d(all_eids, eids)
#j = np.setdiff1d(all_eids, eids_off)
self.show_ids_mask(ishow)
def hide_eids(self, eids):
"""hides the specified element IDs"""
all_eids = self.get_all_eids()
# remove eids that are out of range
eids = np.intersect1d(all_eids, eids)
# A-B
eids = np.setdiff1d(all_eids, eids)
# update for indices
ishow = np.searchsorted(all_eids, eids)
self.show_ids_mask(ishow)
def show_ids_mask(self, ids_to_show):
"""masks the specific 0-based element ids"""
#print('ids_to_show = ', ids_to_show)
prop = self.geom_actor.GetProperty()
if len(ids_to_show) == self.nelements:
#prop.BackfaceCullingOn()
pass
else:
prop.BackfaceCullingOff()
if 0: # pragma: no cover
self._show_ids_mask(ids_to_show)
elif 1:
# doesn't work for the bwb_saero.bdf
flip_flag = True is self._show_flag
assert self._show_flag is True, self._show_flag
self._update_ids_mask_show(ids_to_show)
self._show_flag = True
elif 1: # pragma: no cover
# works
flip_flag = True is self._show_flag
assert self._show_flag is True, self._show_flag
self._update_ids_mask_show_true(ids_to_show, flip_flag, render=False)
self._update_ids_mask_show_true(ids_to_show, False, render=True)
self._show_flag = True
else: # pragma: no cover
# old; works; slow
flip_flag = True is self._show_flag
self._update_ids_mask(ids_to_show, flip_flag, show_flag=True, render=False)
self._update_ids_mask(ids_to_show, False, show_flag=True, render=True)
self._show_flag = True
def hide_ids_mask(self, ids_to_hide):
"""masks the specific 0-based element ids"""
#print('hide_ids_mask = ', hide_ids_mask)
prop = self.geom_actor.GetProperty()
if len(self.ids_to_hide) == 0:
prop.BackfaceCullingOn()
else:
prop.BackfaceCullingOff()
#if 0: # pragma: no cover
#self._hide_ids_mask(ids_to_hide)
#else:
# old; works; slow
flip_flag = False is self._show_flag
self._update_ids_mask(ids_to_hide, flip_flag, show_flag=False, render=False)
self._update_ids_mask(ids_to_hide, False, show_flag=False, render=True)
self._show_flag = False
def _show_ids_mask(self, ids_to_show):
"""
helper method for ``show_ids_mask``
.. todo:: doesn't work
"""
all_i = np.arange(self.nelements, dtype='int32')
ids_to_hide = np.setdiff1d(all_i, ids_to_show)
self._hide_ids_mask(ids_to_hide)
def _hide_ids_mask(self, ids_to_hide):
"""
helper method for ``hide_ids_mask``
.. todo:: doesn't work
"""
#print('_hide_ids_mask = ', ids_to_hide)
ids = numpy_to_vtk_idtype(ids_to_hide)
#self.selection_node.GetProperties().Set(vtk.vtkSelectionNode.INVERSE(), 1)
if 1:
# sane; doesn't work
self.selection_node.SetSelectionList(ids)
ids.Modified()
self.selection_node.Modified()
self.selection.Modified()
self.grid_selected.Modified()
self.grid_selected.ShallowCopy(self.extract_selection.GetOutput())
self.update_all(render=True)
else: # pragma: no cover
# doesn't work
self.selection.RemoveAllNodes()
self.selection_node = vtk.vtkSelectionNode()
self.selection_node.SetFieldType(vtk.vtkSelectionNode.CELL)
self.selection_node.SetContentType(vtk.vtkSelectionNode.INDICES)
#self.selection_node.GetProperties().Set(vtk.vtkSelectionNode.INVERSE(), 1)
self.selection.AddNode(self.selection_node)
self.selection_node.SetSelectionList(ids)
#self.selection.RemoveAllNodes()
#self.selection.AddNode(self.selection_node)
self.grid_selected.ShallowCopy(self.extract_selection.GetOutput())
self.selection_node.SetSelectionList(ids)
self.update_all(render=True)
def _update_ids_mask_show_false(self, ids_to_show, flip_flag=True, render=True):
ids = numpy_to_vtk_idtype(ids_to_show)
ids.Modified()
if flip_flag:
self.selection.RemoveAllNodes()
self.selection_node = vtk.vtkSelectionNode()
self.selection_node.SetFieldType(vtk.vtkSelectionNode.CELL)
self.selection_node.SetContentType(vtk.vtkSelectionNode.INDICES)
self.selection_node.SetSelectionList(ids)
self.selection_node.GetProperties().Set(vtk.vtkSelectionNode.INVERSE(), 1)
self.selection.AddNode(self.selection_node)
else:
self.selection_node.SetSelectionList(ids)
# dumb; works
self.grid_selected.ShallowCopy(self.extract_selection.GetOutput())
self.update_all(render=render)
def _update_ids_mask_show(self, ids_to_show):
"""helper method for ``show_ids_mask``"""
ids = numpy_to_vtk_idtype(ids_to_show)
ids.Modified()
self.selection.RemoveAllNodes()
self.selection_node = vtk.vtkSelectionNode()
self.selection_node.SetFieldType(vtk.vtkSelectionNode.CELL)
self.selection_node.SetContentType(vtk.vtkSelectionNode.INDICES)
self.selection_node.SetSelectionList(ids)
self.selection_node.Modified()
self.selection.Modified()
self.selection.AddNode(self.selection_node)
# seems to also work
self.extract_selection.Update()
self.grid_selected.ShallowCopy(self.extract_selection.GetOutput())
self.update_all(render=True)
#if 0:
#self.grid_selected.Modified()
#self.vtk_interactor.Render()
#render_window = self.vtk_interactor.GetRenderWindow()
#render_window.Render()
def _update_ids_mask_show_true(self, ids_to_show,
flip_flag=True, render=True): # pragma: no cover
ids = numpy_to_vtk_idtype(ids_to_show)
ids.Modified()
if flip_flag:
self.selection.RemoveAllNodes()
self.selection_node = vtk.vtkSelectionNode()
self.selection_node.SetFieldType(vtk.vtkSelectionNode.CELL)
self.selection_node.SetContentType(vtk.vtkSelectionNode.INDICES)
self.selection_node.SetSelectionList(ids)
self.selection.AddNode(self.selection_node)
else:
self.selection_node.SetSelectionList(ids)
# dumb; works
self.grid_selected.ShallowCopy(self.extract_selection.GetOutput())
self.update_all(render=render)
def _update_ids_mask(self, ids_to_show, flip_flag=True, show_flag=True, render=True):
#print('flip_flag=%s show_flag=%s' % (flip_flag, show_flag))
ids = numpy_to_vtk_idtype(ids_to_show)
ids.Modified()
if flip_flag:
self.selection.RemoveAllNodes()
self.selection_node = vtk.vtkSelectionNode()
self.selection_node.SetFieldType(vtk.vtkSelectionNode.CELL)
self.selection_node.SetContentType(vtk.vtkSelectionNode.INDICES)
self.selection_node.SetSelectionList(ids)
if not show_flag:
self.selection_node.GetProperties().Set(vtk.vtkSelectionNode.INVERSE(), 1)
self.selection.AddNode(self.selection_node)
else:
self.selection_node.SetSelectionList(ids)
#self.grid_selected.Update() # not in vtk 6
#ids.Update()
#self.shown_ids.Modified()
if 0: # pragma: no cover
# doesn't work...
self.extract_selection.SetInputData(0, self.grid)
self.extract_selection.SetInputData(1, self.selection)
else:
# dumb; works
self.grid_selected.ShallowCopy(self.extract_selection.GetOutput())
#if 0:
#self.selection_node.GetProperties().Set(vtk.vtkSelectionNode.INVERSE(), 1)
#self.extract_selection.Update()
self.update_all(render=render)
def update_all_2(self, render=True): # pragma: no cover
self.grid_selected.Modified()
self.selection_node.Modified()
self.selection.Modified()
self.extract_selection.Update()
self.extract_selection.Modified()
self.grid_selected.Modified()
self.grid_mapper.Update()
self.grid_mapper.Modified()
self.vtk_interactor.Modified()
self.rend.Render()
self.rend.Modified()
self.geom_actor.Modified()
if render:
self.vtk_interactor.Render()
render_window = self.vtk_interactor.GetRenderWindow()
render_window.Render()
def update_all(self, render=True):
self.grid_selected.Modified()
#selection_node.Update()
self.selection_node.Modified()
#selection.Update()
self.selection.Modified()
self.extract_selection.Update()
self.extract_selection.Modified()
#grid_selected.Update()
self.grid_selected.Modified()
self.grid_mapper.Update()
self.grid_mapper.Modified()
#selected_actor.Update()
#selected_actor.Modified()
#right_renderer.Modified()
#right_renderer.Update()
self.vtk_interactor.Modified()
#interactor.Update()
#-----------------
self.rend.Render()
#interactor.Start()
self.rend.Modified()
self.geom_actor.Modified()
if render:
self.vtk_interactor.Render()
render_window = self.vtk_interactor.GetRenderWindow()
render_window.Render()
def _setup_element_mask(self, create_grid_selected=True):
"""
starts the masking
self.grid feeds in the geometry
"""
ids = vtk.vtkIdTypeArray()
ids.SetNumberOfComponents(1)
# the "selection_node" is really a "selection_element_ids"
# furthermore, it's an inverse model, so adding elements
# hides more elements
self.selection_node = vtk.vtkSelectionNode()
self.selection_node.SetFieldType(vtk.vtkSelectionNode.CELL)
self.selection_node.SetContentType(vtk.vtkSelectionNode.INDICES)
self.selection_node.GetProperties().Set(vtk.vtkSelectionNode.INVERSE(), 1) # added
self.selection_node.SetSelectionList(ids)
self.selection = vtk.vtkSelection()
self.selection.AddNode(self.selection_node)
self.extract_selection = vtk.vtkExtractSelection()
self.extract_selection.SetInputData(0, self.grid)
self.extract_selection.SetInputData(1, self.selection)
self.extract_selection.Update()
# In selection
if create_grid_selected:
self.grid_selected = vtk.vtkUnstructuredGrid()
self.grid_selected.ShallowCopy(self.extract_selection.GetOutput())
#if 0:
self.selection_node.GetProperties().Set(vtk.vtkSelectionNode.INVERSE(), 1)
self.extract_selection.Update()
def start_logging(self):
if self.log is not None:
return
if self.html_logging is True:
log = SimpleLogger(
level='debug', encoding='utf-8',
log_func=lambda w, x, y, z: self._logg_msg(w, x, y, z))
# logging needs synchronizing, so the messages from different
# threads would not be interleave
self.log_mutex = QtCore.QReadWriteLock()
else:
log = SimpleLogger(
level='debug', encoding='utf-8',
#log_func=lambda x, y: print(x, y) # no colorama
)
self.log = log
def on_load_geometry_button(self, infile_name=None, geometry_format=None, name='main',
raise_error=False):
"""action version of ``on_load_geometry``"""
self.on_load_geometry(infile_name=infile_name, geometry_format=geometry_format,
name=name, plot=True, raise_error=raise_error)
def _update_menu_bar_to_format(self, fmt, method):
"""customizes the gui to be nastran/cart3d-focused"""
self.menu_bar_format = fmt
tools, menu_items = getattr(self, method)()
unused_actions = self._prepare_actions(self._icon_path, tools, self.checkables)
self._update_menu(menu_items)
def update_menu_bar(self):
# the format we're switching to
method_new = f'_create_{self.format}_tools_and_menu_items'
method_cleanup = f'_cleanup_{self.menu_bar_format}_tools_and_menu_items'
# the current state of the format
#method_new = '_create_%s_tools_and_menu_items' % self.menu_bar_format
# TODO: what is cwo?
self.menu_bar_format = 'cwo'
if self.menu_bar_format is None:
self._update_menu_bar_to_format(self.format, method_new)
else:
if not pyNastran.is_pynastrangui_exe: # pragma: no cover
print(f'need to add {method_new!r}')
if self.menu_bar_format != self.format:
if hasattr(self, method_cleanup):
#if hasattr(self, method_old):
self.menu_bar_format = None
getattr(self, method_cleanup)()
if hasattr(self, method_new):
self._update_menu_bar_to_format(self.format, method_new)
#self._update_menu_bar_to_format(self.format)
#actions = self._prepare_actions(self._icon_path, self.tools, self.checkables)
#menu_items = self._create_menu_items(actions)
#menu_items = self._create_menu_items()
#self._populate_menu(menu_items)
#def _load_force(self, out_filename):
#"""loads a deflection file"""
#self._load_deflection_force(out_filename, is_deflection=True, is_force=False)
def setup_gui(self, is_gui=True):
"""
Setup the gui
1. starts the logging
2. reapplies the settings
3. create pickers
4. create main vtk actors
5. shows the Qt window
"""
assert self.fmts != [], f'supported_formats={self.supported_formats}'
self.start_logging()
settings = QtCore.QSettings()
self.create_vtk_actors()
# build GUI and restore saved application state
#nice_blue = (0.1, 0.2, 0.4)
qpos_default = self.pos()
unused_pos_default = qpos_default.x(), qpos_default.y()
self.reset_settings = False
#if self.reset_settings or qt_version in [5, 'pyside']:
#self.settings.reset_settings()
#else:
self.settings.load(settings)
self.init_ui()
if self.reset_settings:
self.res_dock.toggleViewAction()
self.init_cell_picker()
unused_main_window_state = settings.value('mainWindowState')
self.create_corner_axis()
#-------------
# loading
if is_gui:
self.show()
def setup_post(self, inputs):
"""interface for user defined post-scripts"""
self.load_batch_inputs(inputs)
if inputs['user_points'] is not None:
for fname in inputs['user_points']:
self.on_load_user_points(fname)
if inputs['user_geom'] is not None:
for fname in inputs['user_geom']:
self.on_load_user_geom(fname)
#self.set_anti_aliasing(16)
def init_cell_picker(self):
self.is_pick = False
if not self.run_vtk:
return
self.vtk_interactor.SetPicker(self.node_picker)
self.vtk_interactor.SetPicker(self.cell_picker)
self.mouse_actions.setup_mouse_buttons(mode='probe_result')
self.mouse_actions.setup_mouse_buttons(mode='default')
def convert_units(self, unused_result_name, result_value, xyz):
#self.input_units
#self.display_units
return result_value, xyz
def _on_multi_pick(self, unused_a):
"""
vtkFrustumExtractor
vtkAreaPicker
"""
pass
def _on_cell_picker(self, unused_a):
self.vtk_interactor.SetPicker(self.cell_picker)
picker = self.cell_picker
world_position = picker.GetPickPosition()
cell_id = picker.GetCellId()
select_point = picker.GetSelectionPoint() # get x,y pixel coordinate
self.log_info('world_position = %s' % str(world_position))
self.log_info(f'cell_id = {cell_id}')
self.log_info('select_point = %s' % str(select_point))
def _on_node_picker(self, unused_a):
self.vtk_interactor.SetPicker(self.node_picker)
picker = self.node_picker
world_position = picker.GetPickPosition()
node_id = picker.GetPointId()
select_point = picker.GetSelectionPoint() # get x,y pixel coordinate
self.log_info("world_position = %s" % str(world_position))
self.log_info(f'node_id = {node_id}')
self.log_info("select_point = %s" % str(select_point))
#def on_cell_picker(self):
#self.log_command("on_cell_picker()")
#picker = self.cell_picker
#world_position = picker.GetPickPosition()
#cell_id = picker.GetCellId()
##ds = picker.GetDataSet()
#select_point = picker.GetSelectionPoint() # get x,y pixel coordinate
#self.log_info("world_position = %s" % str(world_position))
#self.log_info("cell_id = %s" % cell_id)
#self.log_info("select_point = %s" % str(select_point))
#self.log_info("data_set = %s" % ds)
#def get_2d_point(self, point3d, view_matrix,
#projection_matrix,
#width, height):
#view_projection_matrix = projection_matrix * view_matrix
## transform world to clipping coordinates
#point3d = view_projection_matrix.multiply(point3d)
#win_x = math.round(((point3d.getX() + 1) / 2.0) * width)
## we calculate -point3D.getY() because the screen Y axis is
## oriented top->down
#win_y = math.round(((1 - point3d.getY()) / 2.0) * height)
#return Point2D(win_x, win_y)
#def get_3d_point(self, point2D, width, height, view_matrix, projection_matrix):
#x = 2.0 * win_x / client_width - 1
#y = -2.0 * win_y / client_height + 1
#view_projection_inverse = inverse(projection_matrix * view_vatrix)
#point3d = Point3D(x, y, 0)
#return view_projection_inverse.multiply(point3d)
def make_gif(self, gif_filename, scale, istep=None,
min_value=None, max_value=None,
animate_scale=True, animate_phase=False, animate_time=False,
icase_fringe=None, icase_disp=None, icase_vector=None,
animate_fringe=False, animate_disp=True, animate_vector=False,
icase_fringe_start=None, icase_fringe_end=None, icase_fringe_delta=None,
icase_disp_start=None, icase_disp_end=None, icase_disp_delta=None,
icase_vector_start=None, icase_vector_end=None, icase_vector_delta=None,
time=2.0, animation_profile='0 to scale',
nrepeat=0, fps=30, magnify=1,
make_images=True, delete_images=False, make_gif=True, stop_animation=False,
animate_in_gui=True):
"""
Makes an animated gif
Parameters
----------
gif_filename : str
path to the output gif & png folder
scale : float
the deflection scale factor; true scale
istep : int; default=None
the png file number (let's you pick a subset of images)
useful for when you press ``Step``
stop_animation : bool; default=False
stops the animation; don't make any images/gif
animate_in_gui : bool; default=True
animates the model; don't make any images/gif
stop_animation overrides animate_in_gui
animate_in_gui overrides make_gif
Pick One
--------
animate_scale : bool; default=True
does a deflection plot (single subcase)
animate_phase : bool; default=False
does a complex deflection plot (single subcase)
animate_time : bool; default=False
does a deflection plot (multiple subcases)
Other
-----
istep : int
the png file number (let's you pick a subset of images)
useful for when you press ``Step``
time : float; default=2.0
the runtime of the gif (seconds)
fps : int; default=30
the frames/second
Case Selection
--------------
icase_fringe/disp/vector : int; default=None
None : unused
int : the result case to plot the deflection for
active if animate_scale=True or animate_phase=True
icase_start : int; default=None
starting case id
None : unused
int : active if animate_time=True
icase_end : int; default=None
starting case id
None : unused
int : active if animate_time=True
icase_delta : int; default=None
step size
None : unused
int : active if animate_time=True
Time Plot Options
-----------------
max_value : float; default=None
the max value on the plot
min_value : float; default=None
the min value on the plot
Options
-------
animation_profile : str; default='0 to scale'
animation profile to follow
'0 to Scale',
'0 to Scale to 0',
#'0 to Scale to -Scale to 0',
'-Scale to Scale',
'-scale to scale to -scale',
nrepeat : int; default=0
0 : loop infinitely
1 : loop 1 time
2 : loop 2 times
Final Control Options
---------------------
make_images : bool; default=True
make the images
delete_images : bool; default=False
cleanup the png files at the end
make_gif : bool; default=True
actually make the gif at the end
Other local variables
---------------------
duration : float
frame time (seconds)
For one sided data
------------------
- scales/phases should be one-sided
- time should be one-sided
- analysis_time should be one-sided
- set onesided=True
For two-sided data
------------------
- scales/phases should be one-sided
- time should be two-sided
- analysis_time should be one-sided
- set onesided=False
"""
if stop_animation:
self.stop_animation()
return False
is_failed = True
try:
if not(animate_fringe or animate_disp or animate_vector):
msg = 'Either animate_fringe, animate_disp or animate_vector must be True'
raise ValueError(msg)
out = setup_animation(
scale, istep=istep,
animate_scale=animate_scale, animate_phase=animate_phase, animate_time=animate_time,
icase_fringe=icase_fringe, icase_disp=icase_disp, icase_vector=icase_vector,
icase_fringe_start=icase_fringe_start, icase_fringe_end=icase_fringe_end, icase_fringe_delta=icase_fringe_delta,
icase_disp_start=icase_disp_start, icase_disp_end=icase_disp_end, icase_disp_delta=icase_disp_delta,
icase_vector_start=icase_vector_start, icase_vector_end=icase_vector_end, icase_vector_delta=icase_vector_delta,
time=time, animation_profile=animation_profile,
fps=fps, animate_in_gui=animate_in_gui)
except (AssertionError, ValueError, RuntimeError, NotImplementedError) as error:
self.log_error(str(error))
self.stop_animation()
return is_failed
(phases, icases_fringe, icases_disp, icases_vector,
isteps, scales,
analysis_time, onesided, unused_endpoint) = out
if animate_time:
icase_msg = (
' icase_fringe_start=%s, icase_fringe_end=%s, icase_fringe_delta=%s,\n'
' icase_disp_start=%s, icase_disp_end=%s, icase_disp_delta=%s,\n'
' icase_vector_start=%s, icase_vector_end=%s, icase_vector_delta=%s,\n' % (
icase_fringe_start, icase_fringe_end, icase_fringe_delta,
icase_disp_start, icase_disp_end, icase_disp_delta,
icase_vector_start, icase_vector_end, icase_vector_delta))
else:
icase_msg = (
' icase_fringe=%s, icase_disp=%s, icase_vector=%s, \n'
' animate_fringe=%s, animate_disp=%s, animate_vector=%s, \n' % (
icase_fringe, icase_disp, icase_vector,
animate_fringe, animate_disp, animate_vector,
))
#animate_in_gui = True
self.stop_animation()
if len(icases_disp) == 1:
pass
elif animate_in_gui:
msg = (
'make_gif(%r, %s, istep=%s,\n'
' min_value=%s, max_value=%s,\n'
' animate_scale=%s, animate_phase=%s, animate_time=%s,\n%s'
#' icase_fringe=%s, icase_disp=%s, icase_vector=%s, \n'
#' icase_start=%s, icase_end=%s, icase_delta=%s,\n'
" time=%s, animation_profile=%r,\n"
' fps=%s, stop_animation=%s, animate_in_gui=%s)\n' % (
gif_filename, scale, istep, min_value, max_value,
animate_scale, animate_phase, animate_time,
icase_msg,
#icase_fringe, icase_disp, icase_vector,
#icase_start, icase_end, icase_delta,
time, animation_profile,
fps, stop_animation, animate_in_gui)
)
self.log_command(msg)
# onesided has no advantages for in-gui animations and creates confusion
scales, phases, icases_fringe, icases_disp, icases_vector, isteps = make_two_sided(
scales, phases, icases_fringe, icases_disp, icases_vector, isteps, onesided)
self._animate_in_gui(
min_value, max_value,
scales, phases,
icases_fringe, icases_disp, icases_vector,
animate_fringe, animate_vector,
fps)
is_failed = False
return is_failed
try:
is_failed = self.make_gif_helper(
gif_filename, icases_fringe, icases_disp, icases_vector, scales,
phases=phases, isteps=isteps,
animate_fringe=animate_fringe, animate_vector=animate_vector,
max_value=max_value, min_value=min_value,
time=time, analysis_time=analysis_time, fps=fps, magnify=magnify,
onesided=onesided, nrepeat=nrepeat,
make_images=make_images, delete_images=delete_images, make_gif=make_gif)
except Exception as error:
self.log_error(str(error))
raise
#self.log_error(traceback.print_stack(f))
#self.log_error('\n' + ''.join(traceback.format_stack()))
#traceback.print_exc(file=self.log_error)
if not is_failed:
msg = (
'make_gif(%r, %s, istep=%s,\n'
' min_value=%s, max_value=%s,\n'
' animate_scale=%s, animate_phase=%s, animate_time=%s,\n%s'
" time=%s, animation_profile=%r,\n"
' nrepeat=%s, fps=%s, magnify=%s,\n'
' make_images=%s, delete_images=%s, make_gif=%s, stop_animation=%s,\n'
' animate_in_gui=%s)\n' % (
gif_filename, scale, istep, min_value, max_value,
animate_scale, animate_phase, animate_time,
icase_msg,
time, animation_profile,
nrepeat, fps, magnify, make_images, delete_images, make_gif, stop_animation,
animate_in_gui)
)
self.log_command(msg)
return is_failed
def _animate_in_gui(self, min_value, max_value,
scales, phases,
icases_fringe, icases_disp, icases_vector,
animate_fringe, animate_vector,
fps):
"""helper method for ``make_gif``"""
callback = AnimationCallback(self, scales, phases,
icases_fringe, icases_disp, icases_vector,
animate_fringe, animate_vector,
min_value, max_value)
# Sign up to receive TimerEvent
observer_name = self.vtk_interactor.AddObserver('TimerEvent', callback.execute)
self.observers['TimerEvent'] = observer_name
# total_time not needed
# fps
# -> frames_per_second = 1/fps
delay = int(1. / fps * 1000)
# time in milliseconds
unused_timer_id = self.vtk_interactor.CreateRepeatingTimer(delay)
def stop_animation(self):
"""removes the animation timer"""
is_failed = False
if 'TimerEvent' in self.observers:
observer_name = self.observers['TimerEvent']
self.vtk_interactor.RemoveObserver(observer_name)
del self.observers['TimerEvent']
self.mouse_actions.setup_mouse_buttons(mode='default', force=True)
return is_failed
def animation_update(self, icase_fringe0, icase_disp0, icase_vector0,
icase_fringe, icase_disp, icase_vector, scale, phase,
animate_fringe, unused_animate_vector,
normalized_frings_scale,
min_value, max_value):
"""applies the animation update callback"""
#print('icase_fringe=%r icase_fringe0=%r' % (icase_fringe, icase_fringe0))
arrow_scale = None # self.glyph_scale_factor * scale
#icase_vector = None
is_legend_shown = self.scalar_bar.is_shown
if icase_disp != icase_disp0:
# apply the fringe
#
# min/max value is used only for the time plot
# it's assumed to be a displacement result, so the fringe=displacement
self.cycle_results_explicit(icase_disp, explicit=True,
min_value=min_value, max_value=max_value)
if icase_fringe is not None and icase_fringe != icase_fringe0:
is_valid = self.on_fringe(icase_fringe,
update_legend_window=False, show_msg=False)
if is_legend_shown:
# TODO: sort of a hack for the animation
# the fringe always shows the legend, but we may not want that
# just use whatever is active
self.show_legend()
if not is_valid:
self.log_error(f'Invalid Fringe Case {icase_fringe:d}')
return False
is_valid = self.animation_update_fringe(
icase_fringe, animate_fringe, normalized_frings_scale)
if not is_valid:
return is_valid
if icase_disp is not None:
try:
# apply the deflection
self.update_grid_by_icase_scale_phase(icase_disp, scale, phase=phase)
except(AttributeError, KeyError) as error:
self.log_error(f'Invalid Displacement Case {icase_disp:d}{str(error)}')
return False
if icase_vector is not None and icase_vector != icase_vector0:
try:
# apply the nodal forces
self.update_forces_by_icase_scale_phase(icase_vector, arrow_scale, phase=phase)
except(AttributeError, KeyError) as error:
self.log_error(f'Invalid Vector Case {icase_vector:d}{str(error)}')
return False
is_valid = True
return is_valid
def animation_update_fringe(self, icase_fringe, animate_fringe, normalized_frings_scale):
"""helper method for ``animation_update``"""
if animate_fringe:
# e^(i*(theta + phase)) = sin(theta + phase) + i*cos(theta + phase)
is_valid, data = self._update_vtk_fringe(icase_fringe, normalized_frings_scale)
if not is_valid:
return is_valid
#icase = data.icase
result_type = data.result_type
#location = data.location
min_value = data.min_value
max_value = data.max_value
#norm_value = data.norm_value
#imin = data.imin
#imax = data.imax
data_format = data.data_format
nlabels = data.nlabels
labelsize = data.labelsize
ncolors = data.ncolors
colormap = data.colormap
#subcase_id = data.subcase_id
#subtitle = data.subtitle
#label = data.label
is_legend_shown = self.scalar_bar.is_shown
self.update_scalar_bar(result_type, min_value, max_value,
data_format,
nlabels=nlabels, labelsize=labelsize,
ncolors=ncolors, colormap=colormap,
is_shown=is_legend_shown)
#obj.get_vector_array_by_phase(i, name, )
is_valid = True
return is_valid
def make_gif_helper(self, gif_filename, icases_fringe, icases_disp, icases_vector,
scales, phases=None, isteps=None,
animate_fringe=False, animate_vector=False,
max_value=None, min_value=None,
time=2.0, analysis_time=2.0, fps=30, magnify=1,
onesided=True, nrepeat=0,
make_images=True, delete_images=False, make_gif=True):
"""
Makes an animated gif
Parameters
----------
gif_filename : str
path to the output gif & png folder
icases_fringe/disp/vector : int / List[int]
the result case to plot the deflection for
scales : List[float]
List[float] : the deflection scale factors; true scale
phases : List[float]; default=None
List[float] : the phase angles (degrees)
None -> animate scale
max_value : float; default=None
the max value on the plot
min_value : float; default=None
the min value on the plot
isteps : List[int]
the png file numbers (let's you pick a subset of images)
useful for when you press ``Step``
time : float; default=2.0
the runtime of the gif (seconds)
analysis_time : float; default=2.0
The time we actually need to simulate (seconds).
We don't need to take extra pictures if they're just copies.
fps : int; default=30
the frames/second
Options
-------
onesided : bool; default=True
should the animation go up and back down
True : the video will use images [0...N]
False : the video will use images [0...N...0]
nrepeat : int; default=0
0 : loop infinitely
1 : loop 1 time
2 : loop 2 times
Final Control Options
---------------------
make_images : bool; default=True
make the images
delete_images : bool; default=False
cleanup the png files at the end
make_gif : bool; default=True
actually make the gif at the end
Other local variables
---------------------
duration : float
frame time (seconds)
For one sided data
------------------
- scales/phases should be one-sided
- time should be one-sided
- analysis_time should be one-sided
- set onesided=True
For two-sided data
------------------
- scales/phases should be one-sided
- time should be two-sided
- analysis_time should be one-sided
- set onesided=False
"""
assert fps >= 1, fps
nframes = ceil(analysis_time * fps)
assert nframes >= 2, nframes
unused_duration = time / nframes
nframes = int(nframes)
phases, icases_fringe, icases_disp, icases_vector, isteps, scales = update_animation_inputs(
phases, icases_fringe, icases_disp, icases_vector,
isteps, scales, analysis_time, fps)
if gif_filename is not None:
png_dirname = os.path.dirname(os.path.abspath(gif_filename))
if not os.path.exists(png_dirname):
os.makedirs(png_dirname)
png_filenames = []
fmt = gif_filename[:-4] + '_%%0%ii.png' % (len(str(nframes)))
icase_fringe0 = -1
icase_disp0 = -1
icase_vector0 = -1
is_failed = True
if make_images:
scale_max = max(abs(scales.max()), abs(scales.min()))
for istep, icase_fringe, icase_disp, icase_vector, scale, phase in zip(
isteps, icases_fringe, icases_disp, icases_vector, scales, phases):
normalized_frings_scale = scale / scale_max
is_valid = self.animation_update(
icase_fringe0, icase_disp0, icase_vector0,
icase_fringe, icase_disp, icase_vector,
scale, phase,
animate_fringe, animate_vector,
normalized_frings_scale,
min_value, max_value)
if not is_valid:
return is_failed
if gif_filename is not None:
png_filename = fmt % istep
self.on_take_screenshot(fname=png_filename, magnify=magnify)
png_filenames.append(png_filename)
else:
for istep in isteps:
png_filename = fmt % istep
png_filenames.append(png_filename)
check_path(png_filename, 'png_filename')
if gif_filename is not None and png_filenames:
is_failed = write_gif(
gif_filename, png_filenames, time=time,
onesided=onesided,
nrepeat=nrepeat, delete_images=delete_images,
make_gif=make_gif)
return is_failed
def ResetCamera(self):
self.GetCamera().ResetCamera()
def GetCamera(self):
return self.rend.GetActiveCamera()
def _simulate_key_press(self, key):
"""
A little hack method that simulates pressing the key for the VTK
interactor. There is no easy way to instruct VTK to e.g. change mouse
style to 'trackball' (as by pressing 't' key),
(see http://public.kitware.com/pipermail/vtkusers/2011-November/119996.html)
therefore we trick VTK to think that a key has been pressed.
Parameters
----------
key : str
a key that VTK should be informed about, e.g. 't'
"""
#print("key_press = ", key)
if key == 'f': # change focal point
#print('focal_point!')
return
self.vtk_interactor._Iren.SetEventInformation(0, 0, 0, 0, key, 0, None)
self.vtk_interactor._Iren.KeyPressEvent()
self.vtk_interactor._Iren.CharEvent()
#if key in ['y', 'z', 'X', 'Y', 'Z']:
#self.update_camera(key)
def _finish_results_io2(self, model_name, form, cases, reset_labels=True):
"""
Adds results to the Sidebar
Parameters
----------
model_name : str
the name of the model; unused
form : List[pairs]
There are two types of pairs
header_pair : (str, None, List[pair])
defines a heading
str : the sidebar label
None : flag that there are sub-results
List[pair] : more header/result pairs
result_pair : (str, int, List[])
str : the sidebar label
int : the case id
List[] : flag that there are no sub-results
cases : dict[case_id] = result
case_id : int
the case id
result : GuiResult
the class that stores the result
reset_labels : bool; default=True
should the label actors be reset
form = [
'Model', None, [
['NodeID', 0, []],
['ElementID', 1, []]
['PropertyID', 2, []]
],
'time=0.0', None, [
['Stress', 3, []],
['Displacement', 4, []]
],
'time=1.0', None, [
['Stress', 5, []],
['Displacement', 6, []]
],
]
cases = {
0 : GuiResult(...), # NodeID
1 : GuiResult(...), # ElementID
2 : GuiResult(...), # PropertyID
3 : GuiResult(...), # Stress; t=0.0
4 : GuiResult(...), # Displacement; t=0.0
5 : GuiResult(...), # Stress; t=1.0
6 : GuiResult(...), # Displacement; t=1.0
}
case_keys = [0, 1, 2, 3, 4, 5, 6]
"""
self.turn_text_on()
self._set_results(form, cases)
# assert len(cases) > 0, cases
# if isinstance(cases, OrderedDict):
# self.case_keys = cases.keys()
# else:
# self.case_keys = sorted(cases.keys())
# assert isinstance(cases, dict), type(cases)
self.on_update_geometry_properties(self.geometry_properties, write_log=False)
# self.result_cases = cases
#print("cases =", cases)
#print("case_keys =", self.case_keys)
self.reset_labels(reset_minus1=reset_labels)
self.cycle_results_explicit() # start at nCase=0
if self.ncases:
self.scalar_bar_actor.VisibilityOn()
self.scalar_bar_actor.Modified()
#data = [
# ('A', []),
# ('B', []),
# ('C', []),
#]
data = []
for key in self.case_keys:
assert isinstance(key, integer_types), key
unused_obj, (i, unused_name) = self.result_cases[key]
tuple_data = (i, [])
data.append(tuple_data)
self.res_widget.set_case_keys(self.case_keys)
self.res_widget.update_results(form, self.name)
key = self.case_keys[0]
location = self.get_case_location(key)
method = 'centroid' if location else 'nodal'
data2 = [(method, None, [])]
self.res_widget.update_methods(data2)
if self.node_ids is None: # pragma: no cover
raise RuntimeError('implement self.node_ids for this format')
#if self.element_ids is None: # pragma: no cover
#raise RuntimeError('implement self.element_ids for this format')
if self.is_groups:
#eids = np.arange(172)
#eids = []
#self.hide_elements_mask(eids)
elements_pound = self.element_ids[-1]
main_group = Group(
'main', '', elements_pound,
editable=False)
main_group.element_ids = self.element_ids
self.groups['main'] = main_group
self.post_group(main_group)
#self.show_elements_mask(np.arange(self.nelements))
for unused_module_name, module in self.modules.items():
module.post_load_geometry()
def clear_application_log(self, force=False):
"""
Clears the application log
Parameters
----------
force : bool; default=False
clears the dialog without asking
"""
# popup menu
if force:
self.log_widget.clear()
self.log_command('clear_application_log(force=%s)' % force)
else:
widget = QWidget()
title = 'Clear Application Log'
msg = 'Are you sure you want to clear the Application Log?'
result = QMessageBox.question(widget, title, msg,
QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if result == QMessageBox.Yes:
self.log_widget.clear()
self.log_command(f'clear_application_log(force={force})')
#---------------------------------------------------------------------------------------
# PICKER
@property
def node_picker_size(self):
"""Gets the node picker size"""
return self.node_picker.GetTolerance()
@node_picker_size.setter
def node_picker_size(self, size):
"""Sets the node picker size"""
assert size >= 0., size
self.node_picker.SetTolerance(size)
@property
def element_picker_size(self):
"""Gets the element picker size"""
return self.cell_picker.GetTolerance()
@element_picker_size.setter
def element_picker_size(self, size):
"""Sets the element picker size"""
assert size >= 0., size
self.cell_picker.SetTolerance(size)
#---------------------------------------------------------------------------------------
def on_set_anti_aliasing(self, scale=0):
assert isinstance(scale, int), f'scale={scale!r}; type={type(scale)}'
renwin = self.render_window
renwin.LineSmoothingOn()
renwin.PolygonSmoothingOn()
renwin.PointSmoothingOn()
renwin.SetMultiSamples(scale)
self.vtk_interactor.Render()
self.log_command(f'on_set_anti_aliasing({scale!r})')
#---------------------------------------------------------------------------------------
def on_set_modify_groups(self):
"""
Opens a dialog box to set:
+--------+----------+
| Name | String |
+--------+----------+
| Min | Float |
+--------+----------+
| Max | Float |
+--------+----------+
| Format | pyString |
+--------+----------+
"""
on_set_modify_groups(self)
def _apply_modify_groups(self, data):
"""called by on_set_modify_groups when apply is clicked"""
self.on_update_modify_groups(data)
imain = self._modify_groups_window.imain
name = self._modify_groups_window.keys[imain]
self.post_group_by_name(name)
def on_update_modify_groups(self, out_data):
"""
Applies the changed groups to the different groups if
something changed.
"""
#self.groups = out_data
data = {}
groups_dict = {group_id: group for group_id, group in out_data.items()
if isinstance(group_id, integer_types)}
for unused_group_id, group in sorted(groups_dict.items()):
if not isinstance(group, Group):
continue
data[group.name] = group
self.groups = data
def populate_sub_qmenu(menu, items, actions):
sub_menu_name = items[0]
sub_menu = menu.addMenu(sub_menu_name)
for ii_count, ii in enumerate(items[1:]):
if not isinstance(ii, str):
raise RuntimeError('what is this...action ii() = %r' % ii())
action = actions[ii]
if ii_count > 0:
action.setChecked(False)
sub_menu.addAction(action)
def populate_sub_qtoolbar(toolbar, items, actions):
"""
refs
https://www.walletfox.com/course/customqtoolbutton.php
https://stackoverflow.com/questions/9076332/qt-pyqt-how-do-i-create-a-drop-down-widget-such-as-a-qlabel-qtextbrowser-etc
"""
sub_menu_name = items[0]
action0 = actions[sub_menu_name]
drop_down_menu = QMenu()
custom_button = QToolButton()
custom_button.setPopupMode(QToolButton.InstantPopup)
custom_button.setMenu(drop_down_menu)
custom_button.setDefaultAction(action0)
toolbar.addWidget(custom_button)
for unused_ii_count, itemi in enumerate(items[1:]):
if not isinstance(itemi, str):
raise RuntimeError('what is this...action ii() = %r' % itemi())
action = actions[itemi]
# temp
#if ii_count > 0:
#action.setChecked(False)
drop_down_menu.addAction(action) # thrown in the trash?
def get_image_reader(image_filename: str):
fmt = os.path.splitext(image_filename)[1].lower()
if fmt not in ['.jpg', '.jpeg', '.png', '.tif', '.tiff', '.bmp']:
msg = f'invalid image type={fmt!r}; filename={image_filename!r}'
raise NotImplementedError(msg)
if fmt in ['.jpg', '.jpeg']:
image_reader = vtk.vtkJPEGReader()
elif fmt == '.png':
image_reader = vtk.vtkPNGReader()
elif fmt in ['.tif', '.tiff']:
image_reader = vtk.vtkTIFFReader()
elif fmt == '.bmp':
image_reader = vtk.vtkBMPReader()
#elif fmt == '.ps': # doesn't exist?
#self.image_reader = vtk.vtkPostScriptReader()
else:
raise NotImplementedError(f'invalid image type={fmt!r}; filename={image_filename!r}')
return image_reader
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,647
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py
|
"""defines cutting plane tests"""
import os
from itertools import count
from typing import Tuple, List, Any
import unittest
import numpy as np
#import PySide
from pyNastran.gui.matplotlib_backend import matplotlib_backend
try:
import matplotlib # pylint: disable=unused-import
IS_MATPLOTLIB = True
except ImportError: # pragma: no cover
IS_MATPLOTLIB = False
if IS_MATPLOTLIB:
matplotlib.use(matplotlib_backend)
import matplotlib.pyplot as plt # pylint: disable=unused-import
import pyNastran
from pyNastran.bdf.bdf import read_bdf, BDF, CORD2R
from cpylog import SimpleLogger
from pyNastran.bdf.mesh_utils.cut_model_by_plane import (
cut_edge_model_by_coord, cut_face_model_by_coord, connect_face_rows,
split_to_trias, calculate_area_moi)
from pyNastran.bdf.mesh_utils.cutting_plane_plotter import cut_and_plot_model
#from pyNastran.bdf.mesh_utils.bdf_merge import bdf_merge
from pyNastran.op2.op2_geom import read_op2_geom
PKG_PATH = pyNastran.__path__[0]
MODEL_PATH = os.path.join(PKG_PATH, '..', 'models')
class TestCuttingPlane(unittest.TestCase):
"""various cutting plane tests"""
def test_cut_plate(self):
"""mode 10 is a sine wave"""
log = SimpleLogger(level='warning', encoding='utf-8', log_func=None)
bdf_filename = os.path.join(MODEL_PATH, 'plate_py', 'plate_py.dat')
op2_filename = os.path.join(MODEL_PATH, 'plate_py', 'plate_py.op2')
model = read_bdf(bdf_filename, log=log)
op2_model = read_op2_geom(op2_filename, log=log)
title = 'Mode 10 Eigenvector'
p1 = None
p2 = None
zaxis = None
coord = CORD2R(1, rid=0, origin=[0., 0., 0.], zaxis=[0., 0., 1], xzplane=[1., 0., 0.],
comment='')
model.coords[1] = coord
ytol = 2.
# no result
nodal_result = None
cut_and_plot_model(title, p1, p2, zaxis,
model, coord, nodal_result, model.log, ytol,
plane_atol=1e-5, csv_filename=None, invert_yaxis=False,
cut_type='edge', plot=False, show=False)
# real
nodal_result = op2_model.eigenvectors[1].data[9, :, 2]
cut_and_plot_model(title, p1, p2, zaxis,
model, coord, nodal_result, model.log, ytol,
plane_atol=1e-5, csv_filename='real_result.csv', invert_yaxis=False,
cut_type='edge', plot=IS_MATPLOTLIB, show=False)
# complex
nodal_result2 = np.asarray(nodal_result, dtype='complex64')
nodal_result2.imag = -nodal_result.real
cut_and_plot_model(title, p1, p2, zaxis,
model, coord, nodal_result2, model.log, ytol,
plane_atol=1e-5, csv_filename='complex_result.csv', invert_yaxis=True,
cut_type='edge', plot=IS_MATPLOTLIB, show=False)
os.remove('real_result.csv')
os.remove('complex_result.csv')
def _test_cut_box(self): # pragma: no cover
"""recover element ids"""
log = SimpleLogger(level='warning', encoding='utf-8', log_func=None)
#bdf_filename = r'SEction_1_box.bdf' # x-axis
#normal_plane = np.array([1., 0., 0.])
bdf_filename = 'SEction_1_box_4.bdf' # y-axis
normal_plane = np.array([0., 1., 0.])
dys, coords = get_coords_box()
y, A, I, J, EI, GJ, avg_centroid, plane_bdf_filenames = cut_and_plot_moi(
bdf_filename, normal_plane, log,
dys, coords,
ytol=0.0,
plot=True, show=True)
show = True
if IS_MATPLOTLIB:
plot_inertia(y, A, I, J, EI, GJ, avg_centroid, show=show)
def test_cut_bwb(self):
"""recover element ids"""
log = SimpleLogger(level='warning', encoding='utf-8', log_func=None)
is_bwb = True
if is_bwb:
bdf_filename = os.path.join(MODEL_PATH, 'bwb', 'bwb_saero.bdf') # ymax~=1262.0
dys, coords = get_coords_bwb()
else: # pragma: no cover
bdf_filename = r'C:\NASA\asm\all_modes_mach_0.85\flutter.bdf' # ymax=1160.601
dys, coords = get_coords_crm()
normal_plane = np.array([0., 1., 0.])
y, A, I, J, EI, J, avg_centroid, plane_bdf_filenames = cut_and_plot_moi(
bdf_filename, normal_plane, log,
dys, coords,
ytol=2.0,
plot=True, show=True)
show = True
#show = False
if IS_MATPLOTLIB:
GJ = J
plot_inertia(y, A, I, J, EI, GJ, avg_centroid, show=show)
os.remove('normalized_inertia_vs_span.png')
os.remove('area_vs_span.png')
os.remove('amoi_vs_span.png')
os.remove('e_amoi_vs_span.png')
os.remove('cg_vs_span.png')
#bdf_merge(plane_bdf_filenames, bdf_filename_out='merge.bdf', renumber=True,
#encoding=None, size=8, is_double=False, cards_to_skip=None,
#log=None, skip_case_control_deck=False)
for plane_bdf_filename in plane_bdf_filenames:
os.remove(plane_bdf_filename)
os.remove('thetas.csv')
os.remove('equivalent_beam_model.bdf')
os.remove('cut_data_vs_span.csv')
#os.remove('cut_face.csv')
#if IS_MATPLOTLIB:
#os.remove('area_vs_span.png')
#os.remove('amoi_vs_span.png')
#os.remove('normalized_inertia_vs_span.png')
#os.remove('cg_vs_span.png')
#os.remove('e_amoi_vs_span.png')
def test_cut_plate_eids(self):
"""recover element ids"""
log = SimpleLogger(level='warning', encoding='utf-8', log_func=None)
bdf_filename = os.path.join(MODEL_PATH, 'plate_py', 'plate_py.dat')
model = read_bdf(bdf_filename, log=log)
nnodes = len(model.nodes)
nodal_result = np.ones(nnodes)
coord = CORD2R(1, rid=0, origin=[0., 0., 0.], zaxis=[0., 0., 1], xzplane=[1., 0., 0.],
comment='')
model.coords[1] = coord
ytol = 2.
unique_geometry_array, unique_results_array, unused_rods = cut_face_model_by_coord(
bdf_filename, coord, ytol,
nodal_result, plane_atol=1e-5, skip_cleanup=True,
csv_filename='cut_face.csv',
plane_bdf_filename='plane_face.bdf',
)
#print(unique_geometry_array)
#print(unique_results_array)
unique_geometry_array = np.array(unique_geometry_array)
unique_results_array = np.array(unique_results_array)
assert unique_geometry_array.shape == (1, 40, 4), unique_geometry_array.shape
assert unique_results_array.shape == (1, 40, 7), unique_results_array.shape
unique_geometry_array = unique_geometry_array[0, :, :]
unique_results_array = unique_results_array[0, :, :]
assert unique_geometry_array.shape == (40, 4), unique_geometry_array.shape
assert unique_results_array.shape == (40, 7), unique_results_array.shape
#print(unique_geometry_array)
#print(unique_results_array)
os.remove('cut_face.csv')
os.remove('plane_face.bdf')
def test_cut_shell_model_edge_1(self):
"""
tests:
- cut_edge_model_by_coord
- cut_face_model_by_coord
"""
model, nodal_result = _cut_shell_model_quads()
coord = CORD2R(1, rid=0, origin=[0.5, 0., 0.], zaxis=[0.5, 0., 1], xzplane=[1.5, 0., 0.],
comment='')
model.coords[1] = coord
tol = 2.
#-------------------------------------------------------------------------
title = 'result'
p1 = None
p2 = None
zaxis = None
cut_and_plot_model(title, p1, p2, zaxis,
model, coord, nodal_result, model.log, tol,
plane_atol=1e-5,
csv_filename=None,
invert_yaxis=False,
cut_type='edge', plot=IS_MATPLOTLIB, show=False)
#=========================================================================
out = cut_edge_model_by_coord(
model, coord, tol, nodal_result,
plane_atol=1e-5)
unused_local_points_array, unused_global_points_array, result_array = out
assert len(result_array) == 16, len(result_array)
unused_geometry_array, result_array, unused_rods = cut_face_model_by_coord(
model, coord, tol, nodal_result,
plane_atol=1e-5)
result_array = np.array(result_array)
assert result_array.shape == (1, 8, 7), result_array.shape
#os.remove('plane_edge.bdf')
os.remove('plane_face.bdf')
def test_cut_shell_model_edge_2(self):
"""
tests:
- cut_edge_model_by_coord
- cut_face_model_by_coord
"""
tol = 2.
coord = CORD2R(1, rid=0, origin=[0.5, 0., 0.], zaxis=[0.5, 0., 1], xzplane=[1.5, 0., 0.],
comment='')
model, nodal_result = _cut_shell_model_quads()
#-------------------------------------------------------------------------
# triangles
split_to_trias(model)
model.coords[1] = coord
model.write_bdf('tris.bdf')
#print('----------------------------')
title = 'result'
p1 = None
p2 = None
zaxis = None
cut_and_plot_model(title, p1, p2, zaxis,
model, coord, nodal_result, model.log, tol,
plane_atol=1e-5,
csv_filename=None,
invert_yaxis=False,
cut_type='edge', plot=IS_MATPLOTLIB, show=False)
out = cut_edge_model_by_coord(
model, coord, tol, nodal_result,
plane_atol=1e-5, csv_filename='cut_edge_2.csv')
unused_local_points_array, unused_global_points_array, result_array = out
assert len(result_array) == 20, len(result_array)
unused_geometry_arrays, result_arrays, unused_rods = cut_face_model_by_coord(
model, coord, tol, nodal_result,
plane_atol=1e-5, csv_filename='cut_face_2.csv')
assert len(result_arrays[0]) == 8, len(result_arrays)
os.remove('tris.bdf')
os.remove('cut_edge_2.csv')
os.remove('cut_face_2.csv')
#os.remove('plane_edge.bdf')
os.remove('plane_face.bdf')
def test_cut_shell_model_face_1(self):
"""
tests:
- cut_edge_model_by_coord
- cut_face_model_by_coord
"""
tol = 2.
coord = CORD2R(1, rid=0, origin=[0.5, 0., 0.], zaxis=[0.5, 0., 1], xzplane=[1.5, 0., 0.],
comment='')
model, nodal_result = _cut_shell_model_quads()
#-------------------------------------------------------------------------
# triangles
split_to_trias(model)
model.coords[1] = coord
model.write_bdf('tris.bdf')
#print('----------------------------')
title = 'result'
p1 = None
p2 = None
zaxis = None
#print(nodal_result)
with self.assertRaises(TypeError):
cut_and_plot_model(title, p1, p2, zaxis,
model, coord, nodal_result, model.log, tol,
plane_atol=1e-5,
csv_filename=None,
invert_yaxis=False,
cut_type='face', plot=IS_MATPLOTLIB, show=False)
os.remove('tris.bdf')
def test_connect_face_rows(self):
"""in order"""
geometry_array = np.array([
[1, 1, 2],
[2, 2, 3],
[3, 3, 4],
[4, 4, 5],
[5, 5, 6],
])
nedges = geometry_array.shape[0]
results_array = np.arange(0, nedges)
#print(results_array)
iedges, unused_geometry_arrays2, unused_results_arrays2 = connect_face_rows(
geometry_array, results_array, skip_cleanup=False)
assert np.array_equal(iedges, [[0, 1, 2, 3, 4]]), 'iedges=%s' % iedges
#-----------------------------------------------------------------------
# out of order
geometry_array = np.array([
[1, 1, 2], # 0
[2, 4, 5], # 3
[3, 5, 6], # 4
[4, 3, 4], # 2
[5, 2, 3], # 1
])
nedges = geometry_array.shape[0]
results_array = np.arange(0, nedges)
iedges, unused_geometry_arrays2, unused_results_arrays2 = connect_face_rows(
geometry_array, results_array, skip_cleanup=False)
assert np.array_equal(iedges, [[0, 4, 3, 1, 2]]), 'iedges=%s' % iedges
#print(geometry_array2)
#-----------------------------------------------------------------------
# in order, two blocks
#print('*****************')
geometry_array = np.array([
# block 1
[1, 1, 2],
[2, 2, 3],
[3, 3, 4],
# block 2
[10, 10, 20],
[20, 20, 30],
[30, 30, 40],
])
nedges = geometry_array.shape[0]
results_array = np.arange(0, nedges)
#print(results_array)
iedges, unused_geometry_array2, unused_results_array2 = connect_face_rows(
geometry_array, results_array, skip_cleanup=False)
assert np.array_equal(iedges, [[0, 1, 2], [3, 4, 5]]), 'iedges=%s' % iedges
def test_connect_face_rows_ring_1(self):
"""in order, one ring"""
geometry_array = np.array([
[1, 1, 2],
[2, 2, 3],
[3, 3, 4],
[4, 1, 4],
])
nedges = geometry_array.shape[0]
results_array = np.arange(0, nedges)
#print(results_array)
iedges, unused_geometry_array2, unused_results_array2 = connect_face_rows(
geometry_array, results_array, skip_cleanup=False)
assert np.array_equal(iedges, [[0, 1, 2, 3, 0]]), 'iedges=%s' % iedges
def test_connect_face_rows_ring_2(self):
"""in order, two rings"""
geometry_array = np.array([
[1, 1, 2],
[2, 2, 3],
[3, 3, 4],
[4, 1, 4],
[10, 10, 20],
[20, 20, 30],
[30, 30, 40],
[40, 10, 40],
])
nedges = geometry_array.shape[0]
results_array = np.arange(0, nedges)
#print(results_array)
iedges, unused_geometry_array2, unused_results_array2 = connect_face_rows(
geometry_array, results_array, skip_cleanup=False)
assert np.array_equal(iedges, [[0, 1, 2, 3, 0], [4, 5, 6, 7, 4]]), 'iedges=%s' % iedges
def get_coords_bwb(ncuts=2000):
dys = []
coords = []
for i in range(ncuts):
dy = 100. * i + 1. # bwb
coord = CORD2R(1, rid=0, origin=[0., dy, 0.], zaxis=[0., dy, 1], xzplane=[1., dy, 0.])
dys.append(dy)
coords.append(coord)
return dys, coords
def get_coords_crm(ncuts=2000): # pragma: no cover
dys = []
coords = []
for i in range(ncuts):
dy = 4. * i + 1. # CRM
coord = CORD2R(1, rid=0, origin=[0., dy, 0.], zaxis=[0., dy, 1], xzplane=[1., dy, 0.])
dys.append(dy)
coords.append(coord)
return dys, coords
def get_coords_box(ncuts): # pragma: no cover
dys = []
coords = []
for i in range(ncuts):
dy = -0.1 * i - 0.1 # box
#coord = CORD2R(1, rid=0, origin=[0., dy, 0.], zaxis=[0., dy, 1], xzplane=[1., dy, 0.])
coord = CORD2R(1, rid=0, origin=[0., dy, 0.], zaxis=[0., dy, 1], xzplane=[1., dy, 0.])
#if dy < -5:
#print('break', dy)
#break
#origin = np.array([0., dy, 0.])
#xzplane = origin + dx
#xzplane = np.array([1., dy, 0.])
#coord = CORD2R.add_axes(cid, rid=0, origin=p1, xaxis=p2-p1, yaxis=None, zaxis=None,
#xyplane=None, yzplane=None, xzplane=None, comment='')
#print(coord)
dys.append(dy)
coords.append(coord)
return dys, coords
def cut_and_plot_moi(bdf_filename: str, normal_plane: np.ndarray, log: SimpleLogger,
dys: List[float],
coords: List[CORD2R],
ytol: float=2.0,
dirname: str='',
plot: bool=True, show: bool=False) -> Tuple[Any, Any, Any, Any, Any]: # y, A, I, EI, avg_centroid
model = read_bdf(bdf_filename, log=log)
model2 = read_bdf(bdf_filename, log=log)
# initialize theta
thetas = {}
for eid in model.elements:
# theta, Ex, Ey, Gxy
thetas[eid] = (0., 0., 0., 0.)
#p1 = np.array([466.78845, 735.9053, 0.0])
#p2 = np.array([624.91345, 639.68896, -0.99763656])
#dx = p2 - p1
nodal_result = None
plane_bdf_filenames = []
y = []
A = []
I = []
J = []
EI = []
GJ = []
avg_centroid = []
for i, dy, coord in zip(count(), dys, coords):
model.coords[1] = coord
plane_bdf_filename = os.path.join(dirname, f'plane_face_{i:d}.bdf')
cut_face_filename = os.path.join(dirname, f'cut_face_{i:d}.csv')
if os.path.exists(cut_face_filename):
os.remove(cut_face_filename)
try:
out = cut_face_model_by_coord(
model2, coord, ytol,
nodal_result, plane_atol=1e-5, skip_cleanup=True,
#csv_filename=cut_face_filename,
csv_filename=None,
#plane_bdf_filename=None)
plane_bdf_filename=plane_bdf_filename, plane_bdf_offset=dy)
except RuntimeError:
# incorrect ivalues=[0, 1, 2]; dy=771. for CRM
continue
unused_unique_geometry_array, unused_unique_results_array, rods = out
if not os.path.exists(plane_bdf_filename):
break
plane_bdf_filenames.append(plane_bdf_filename)
# eid, nid, inid1, inid2
#print(unique_geometry_array)
#moi_filename = 'amoi_%i.bdf' % i
moi_filename = None
out = calculate_area_moi(model, rods, normal_plane, thetas, moi_filename=moi_filename)
#print(out)
Ai, Ii, EIi, avg_centroidi = out
#Ai, Ii, Ji, EIi, GJi, avg_centroidi = out
Ji = GJi = 1.0
y.append(dy)
A.append(Ai)
I.append(Ii)
J.append(Ji)
EI.append(EIi)
GJ.append(GJi)
avg_centroid.append(avg_centroidi)
#break
thetas_csv_filename = os.path.join(dirname, 'thetas.csv')
with open(thetas_csv_filename, 'w') as csv_filename:
csv_filename.write('# eid(%i),theta,Ex,Ey,Gxy\n')
for eid, (theta, Ex, Ey, Gxy) in sorted(thetas.items()):
csv_filename.write('%i,%f,%f,%f,%f\n' % (eid, theta, Ex, Ey, Gxy))
y = np.array(y, dtype='float64')
A = np.array(A, dtype='float64')
I = np.array(I, dtype='float64')
J = np.array(J, dtype='float64')
EI = np.array(EI, dtype='float64')
GJ = np.array(GJ, dtype='float64')
avg_centroid = np.array(avg_centroid, dtype='float64')
inid = 1
beam_model = BDF(debug=False)
avg_centroid[:, 1] = y
# wrong
mid = 1
E = 3.0e7
G = None
nu = 0.3
model.add_mat1(mid, E, G, nu, rho=0.1)
# 0 1 2 3 4 5
# [Ixx, Iyy, Izz, Ixy, Iyz, Ixz]
Ix = I[:, 0]
Iy = I[:, 1]
Iz = I[:, 2]
Ixz = I[:, 5]
J = Ix + Iz
#i1, i2, i12 = Ix, Iy, Ixy
for inid, xyz in enumerate(avg_centroid):
beam_model.add_grid(inid+1, xyz)
for eid in range(1, len(A)):
pid = eid
nids = [eid, eid + 1]
x = [1., 0., 0.]
g0 = None
beam_model.add_cbeam(eid, pid, nids, x, g0, offt='GGG', bit=None,
pa=0, pb=0, wa=None, wb=None, sa=0, sb=0, comment='')
# j = i1 + i2
so = ['YES', 'YES']
xxb = [0., 1.]
area = [A[eid-1], A[eid]]
i1 = [Ix[eid-1], Ix[eid]]
i2 = [Iz[eid-1], Iz[eid]]
i12 = [Ixz[eid-1], Ixz[eid]]
j = [J[eid-1], J[eid]]
beam_model.add_pbeam(pid, mid, xxb, so, area, i1, i2, i12, j, nsm=None,
c1=None, c2=None, d1=None, d2=None, e1=None, e2=None, f1=None, f2=None,
k1=1., k2=1., s1=0., s2=0., nsia=0., nsib=None, cwa=0., cwb=None,
m1a=0., m2a=0., m1b=None, m2b=None,
n1a=0., n2a=0., n1b=None, n2b=None,
comment='')
beam_model_bdf_filename = os.path.join(dirname, 'equivalent_beam_model.bdf')
beam_model.write_bdf(beam_model_bdf_filename)
X = np.vstack([y, A]).T
Y = np.hstack([X, I, EI, avg_centroid])
header = 'y, A, Ix, Iz, Ixz, Ex*Ix, Ex*Iz, Ex*Ixz, xcentroid, ycentroid, zcentroid'
cut_data_span_filename = os.path.join(dirname, 'cut_data_vs_span.csv')
np.savetxt(cut_data_span_filename, Y, header=header, delimiter=',')
if IS_MATPLOTLIB and (plot or show):
plot_inertia(y, A, I, J, EI, GJ, avg_centroid, show=show, dirname=dirname)
else:
plane_bdf_filenames = []
return y, A, I, J, EI, GJ, avg_centroid, plane_bdf_filenames
def plot_inertia(y, A, I, J, EI, GJ, avg_centroid, ifig: int=1, show: bool=True, dirname: str=''):
"""hepler method for test"""
#plt.plot(y, I[:, 0] / I[:, 0].max(), 'ro-', label='Qxx')
#plt.plot(y, I[:, 1] / I[:, 1].max(), 'bo-', label='Qyy')
#plt.plot(y, I[:, 2] / I[:, 2].max(), 'go-', label='Qxy')
aI = np.abs(I)
aEI = np.abs(EI)
aGJ = np.abs(GJ)
fig = plt.figure(ifig)
ax = fig.gca()
ax.plot(y, I[:, 0] / aI[:, 0].max(), 'ro-', label='Ixx')
ax.plot(y, I[:, 1] / aI[:, 1].max(), 'bo-', label='Izz')
ax.plot(y, I[:, 2] / aI[:, 2].max(), 'go-', label='Ixz')
ax.plot(y, EI[:, 0] / aEI[:, 0].max(), 'ro', label='EIxx', linestyle='--')
ax.plot(y, EI[:, 1] / aEI[:, 1].max(), 'bo', label='EIzz', linestyle='--')
ax.plot(y, EI[:, 2] / aEI[:, 2].max(), 'go', label='EIxz', linestyle='--')
#ax.plot(y, GJ / aGJ.max(), 'go-', label='GJ', linestyle='--')
ax.grid(True)
ax.set_xlabel('Span, y')
ax.set_ylabel('Normalized Area MOI, I')
ax.legend()
fig.savefig('normalized_inertia_vs_span.png')
#-------------------------------------------------------
fig = plt.figure(ifig + 1)
ax = fig.gca()
ax.plot(y, A, 'ro', label='Area', linestyle='-')
ax.grid(True)
ax.set_xlabel('Span, y')
ax.set_ylabel('Area, A')
ax.legend()
fig.savefig('area_vs_span.png')
#-------------------------------------------------------
fig = plt.figure(ifig + 2)
ax = fig.gca()
ax.plot(y, I[:, 0], 'ro-', label='Ixx')
ax.plot(y, I[:, 1], 'bo-', label='Izz')
ax.plot(y, I[:, 2], 'go-', label='Ixz')
ax.grid(True)
ax.set_xlabel('Span, y')
ax.set_ylabel('Area MOI, I')
ax.legend()
fig.savefig('amoi_vs_span.png')
#-------------------------------------------------------
fig = plt.figure(ifig + 3)
ax = fig.gca()
ax.plot(y, EI[:, 0], 'ro-', label='EIxx')
#ax.plot(y, I[:, 0], 'bo-', label='Ixx')
ax.grid(True)
ax.set_xlabel('Span, y')
ax.set_ylabel('Exx*Area MOI, Exx*I')
ax.legend()
fig.savefig('e_amoi_vs_span.png')
#-------------------------------------------------------
fig = plt.figure(ifig + 4)
ax = fig.gca()
ax.plot(y, avg_centroid[:, 0], 'ro-', label='xcg')
ax.plot(y, avg_centroid[:, 2], 'bo-', label='zcg')
ax.grid(True)
ax.set_xlabel('Span, y')
ax.set_ylabel('CG')
ax.legend()
fig.savefig('cg_vs_span.png')
#-------------------------------------------------------
if show:
plt.show()
ifig += 4
return ifig
def _cut_shell_model_quads():
"""helper method"""
log = SimpleLogger(level='error')
pid = 10
mid1 = 100
model = BDF(log=log)
# intersects (min)
model.add_grid(1, [0., 0., 0.])
model.add_grid(2, [1., 0., 0.])
model.add_grid(3, [1., 1., 0.])
model.add_grid(4, [0., 1., 0.])
model.add_cquad4(1, pid, [1, 2, 3, 4])
# intersects (max)
model.add_grid(5, [0., 0., 1.])
model.add_grid(6, [1., 0., 1.])
model.add_grid(7, [1., 1., 1.])
model.add_grid(8, [0., 1., 1.])
model.add_cquad4(2, pid, [5, 6, 7, 8])
# intersects (mid)
model.add_grid(9, [0., 0., 0.5])
model.add_grid(10, [1., 0., 0.5])
model.add_grid(11, [1., 1., 0.5])
model.add_grid(12, [0., 1., 0.5])
model.add_cquad4(3, pid, [9, 10, 11, 12])
# doesn't intersect
model.add_grid(13, [10., 0., 0.])
model.add_grid(14, [11., 0., 0.])
model.add_grid(15, [11., 1., 0.])
model.add_grid(16, [10., 1., 0.])
model.add_cquad4(4, pid, [13, 14, 15, 16])
model.add_pshell(pid, mid1=mid1, t=2.)
E = 1.0
G = None
nu = 0.3
model.add_mat1(mid1, E, G, nu, rho=1.0)
model.validate()
model.cross_reference()
#xyz_points = [
#[0.4, 0.6, 0.], [-1., -1, 0.],]
#tol = 2.
nodal_result = np.linspace(0., 1., num=16)
return model, nodal_result
if __name__ == '__main__': # pragma: no cover
unittest.main()
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,648
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/bdf/cards/loads/dloads.py
|
# coding: utf-8
"""
All dynamic loads are defined in this file. This includes:
* ACSRCE
* DLOAD
* TLOAD1
* TLOAD2
* RLOAD1
* RLOAD2
"""
from __future__ import annotations
from typing import TYPE_CHECKING
import numpy as np
from pyNastran.utils.numpy_utils import integer_types
from pyNastran.bdf import MAX_INT
from pyNastran.bdf.field_writer_8 import set_blank_if_default
from pyNastran.bdf.bdf_interface.assign_type import (
integer, double_or_blank, integer_string_or_blank,
integer_double_or_blank, double)
from pyNastran.bdf.field_writer_8 import print_card_8
from pyNastran.bdf.field_writer_16 import print_card_16
from pyNastran.bdf.field_writer_double import print_card_double
from pyNastran.bdf.cards.loads.loads import DynamicLoad, LoadCombination, BaseCard
if TYPE_CHECKING: # pragma: no cover
from pyNastran.bdf.bdf import BDF
class ACSRCE(BaseCard):
r"""
Defines acoustic source as a function of power vs. frequency.
+--------+-----+----------+---------------+-----------------+-------+-----+---+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
+========+=====+==========+===============+=================+=======+=====+===+
| ACSRCE | SID | EXCITEID | DELAYI/DELAYR | DPHASEI/DPHASER | TP/RP | RHO | B |
+--------+-----+----------+---------------+-----------------+-------+-----+---+
..math ::
C = \sqrt(B ⁄ ρ)
Source Strength = {A} * 1/(2πf) * \sqrt( 8πC P(f) / ρ) ^ (ei(θ + 2πfτ))
"""
type = 'ACSRCE'
@classmethod
def _init_from_empty(cls):
sid = 1
excite_id = 2
rho = 3.
b = 5.
return ACSRCE(sid, excite_id, rho, b,
delay=0, dphase=0, power=0, comment='')
def __init__(self, sid, excite_id, rho, b,
delay=0, dphase=0, power=0, comment=''):
"""
Creates an ACSRCE card
Parameters
----------
sid : int
load set id number (referenced by DLOAD)
excite_id : int
Identification number of a DAREA or SLOAD entry that lists
each degree of freedom to apply the excitation and the
corresponding scale factor, A, for the excitation
rho : float
Density of the fluid
b : float
Bulk modulus of the fluid
delay : int; default=0
Time delay, τ.
dphase : int / float; default=0
the dphase; if it's 0/blank there is no phase lag
float : delay in units of time
int : delay id
power : int; default=0
Power as a function of frequency, P(f).
float : value of P(f) used over all frequencies for all
degrees of freedom in EXCITEID entry.
int : TABLEDi entry that defines P(f) for all degrees of
freedom in EXCITEID entry.
comment : str; default=''
a comment for the card
"""
if comment:
self.comment = comment
self.sid = sid
self.excite_id = excite_id
self.delay = delay
self.dphase = dphase
self.power = power
self.rho = rho
self.b = b
self.power_ref = None
self.sloads_ref = None
self.delay_ref = None
self.dphase_ref = None
#self.dphases_ref = None
#self.delays_ref = None
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a ACSRCE card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
sid = integer(card, 1, 'sid')
excite_id = integer(card, 2, 'excite_id') # DAREA, FBALOAD, SLOAD
delay = integer_double_or_blank(card, 3, 'delay', 0) # DELAY, FBADLAY
dphase = integer_double_or_blank(card, 4, 'dphase', 0) # DPHASE, FBAPHAS
power = integer_double_or_blank(card, 5, 'power/tp/rp', 0) # TABLEDi/power
rho = double(card, 6, 'rho')
b = double(card, 7, 'bulk modulus')
assert len(card) <= 8, 'len(ACSRCE card) = %i\n%s' % (len(card), card)
return ACSRCE(sid, excite_id, rho, b,
delay=delay, dphase=dphase, power=power, comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
cmsg = ', which is required by ACSRCE=%s' % (self.sid)
# TODO: excite_id = DAREA, FBALOAD, SLOAD
sloads_ref = {}
lseqs_ref = {}
for load_id, loads in model.loads.items():
for load in loads:
if load.type == 'SLOAD':
#if load_id not in sloads_ref:
#sloads_ref[load_id] = []
for nid in load.node_ids:
sloads_ref[(load_id, nid, 0)] = load
elif load.type == 'LSEQ':
load_idi = load.lid_ref[0].sid
#print(load)
#print(load.lid)
excite_idi = load.excite_id
#print('load_idi = %s' % load_idi)
#print('excite_id = %s' % excite_idi)
assert load_idi not in lseqs_ref
lseqs_ref[load_idi] = load
if sloads_ref:
self.sloads_ref = sloads_ref
sload_keys = list(sloads_ref.keys())
#print('sload_keys =', sload_keys)
else:
sload_keys = []
if self.excite_id not in model.dareas and self.excite_id not in lseqs_ref:
darea_keys = list(model.dareas.keys())
dphase_keys = list(model.dphases.keys())
delay_keys = list(model.delays.keys())
msg = 'excite_id=%s delay=%s dphase=%s\n' % (
self.excite_id, self.delay, self.dphase)
msg += ' darea_keys=%s\n' % darea_keys
msg += ' sloads(load_id, nid, comp)=%s\n' % sload_keys
msg += ' dphases(sid)=%s\n' % dphase_keys
msg += ' delays(delay_id)=%s\n' % delay_keys
#raise RuntimeError(msg)
#print(msg)
if isinstance(self.delay, integer_types) and self.delay > 0:
delays_ref = {}
for sload_key in sload_keys:
nid = sload_key[1]
delay_key = (self.delay, nid, 0)
delays_ref[sload_key] = model.DELAY(self.delay, msg=cmsg)
if delays_ref:
self.delay_ref = delays_ref
if isinstance(self.dphase, integer_types) and self.dphase > 0:
dphases_ref = {}
for sload_key in sload_keys:
nid = sload_key[1]
dphase_key = (self.dphase, nid, 0)
dphases_ref[sload_key] = model.DPHASE(self.dphase, msg=cmsg)
if dphases_ref:
self.dphase_ref = dphases_ref
if isinstance(self.power, integer_types) and self.power > 0:
self.power_ref = model.TableD(self.power, msg=cmsg)
#load_ids2 = []
#for load_id in self.load_ids:
#load_id2 = model.DLoad(load_id, consider_dload_combinations=False, msg=msg)
#load_ids2.append(load_id2)
#self.load_ids = load_ids2
#self.load_ids_ref = self.load_ids
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.power = self.Power()
self.dphase = self.DPhase()
self.delay = self.Delay()
#self.sloads = self.
#self.tb = self.Tb()
#self.tp = self.Tp()
#self.delay = self.delay_id
#if self.tb > 0:
#del self.tb_ref
#if self.tp > 0:
#del self.tp_ref
self.power_ref = None
self.sloads_ref = None
self.delay_ref = None
self.dphase_ref = None
#self.dphases_ref = None
#self.delays_ref = None
def safe_cross_reference(self, model: BDF, xref_errors):
return self.cross_reference(model)
#def uncross_reference(self) -> None:
#self.load_ids = [self.LoadID(load) for load in self.load_ids]
#del self.load_ids_ref
def Delay(self):
if self.delay_ref is not None:
return next(self.delay_ref.values()).sid
elif self.delay in [0, 0.0]:
return 0
else:
return self.delay
def DPhase(self):
if self.dphase_ref is not None:
return next(self.delay_ref.values()).tid
elif self.dphase in [0, 0.0]:
return 0
else:
return self.dphase
def Power(self):
if self.power_ref is not None:
return self.power_ref.tid
return self.power
def get_load_at_freq(self, freq):
r"""
..math ::
C = \sqrt(B ⁄ ρ)
Source_strength = {A} * 1/(2πf) * \sqrt( 8πC P(f) / ρ) ^ (ei(θ + 2πfτ))
"""
C = np.sqrt(self.b / self.rho)
ei = np.exp(1) * 1.j
A = 0.0
pi = np.pi
if self.delay in [0, 0.]:
tau = 0.
else:
#print('delay\n', self.delay_ref)
tau = self.delay_ref.value
Pf = self.power_ref.interpolate(freq)
if self.dphase in [0, 0.]:
theta = 0.
else:
#print('dphase\n', self.dphase_ref)
theta = self.dphase_ref.interpolate(freq)
strength = A / (2.* pi * freq) * np.sqrt(8*pi*C*Pf / self.rho) ** (ei*(theta + 2*pi*freq*tau))
return 0.0
def raw_fields(self):
list_fields = ['ACSRCE', self.sid, self.excite_id, self.Delay(), self.DPhase(),
self.Power(), self.rho, self.b]
return list_fields
def repr_fields(self):
return self.raw_fields()
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.raw_fields()
if size == 16:
return self.comment + print_card_16(card)
return self.comment + print_card_8(card)
class DLOAD(LoadCombination):
"""
+-------+-----+----+------+----+----+----+----+----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=======+=====+====+======+====+====+====+====+====+
| DLOAD | SID | S | S1 | L1 | S2 | L2 | S3 | L3 |
+-------+-----+----+------+----+----+----+----+----+
| | S4 | L4 | etc. | | | | | |
+-------+-----+----+------+----+----+----+----+----+
"""
type = 'DLOAD'
@classmethod
def _init_from_empty(cls):
sid = 1
scale = 1.
scale_factors = [1., 2.]
load_ids = [1, 2]
return DLOAD(sid, scale, scale_factors, load_ids, comment='')
def __init__(self, sid, scale, scale_factors, load_ids, comment=''):
"""
Creates a DLOAD card
Parameters
----------
sid : int
Load set identification number. See Remarks 1. and 4. (Integer > 0)
scale : float
Scale factor. See Remarks 2. and 8. (Real)
Si : List[float]
Scale factors. See Remarks 2., 7. and 8. (Real)
load_ids : List[int]
Load set identification numbers of RLOAD1, RLOAD2, TLOAD1,
TLOAD2, and ACSRCE entries. See Remarks 3 and 7. (Integer > 0)
comment : str; default=''
a comment for the card
"""
LoadCombination.__init__(self, sid, scale, scale_factors, load_ids,
comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
dload_ids2 = []
msg = ', which is required by DLOAD=%s' % (self.sid)
for dload_id in self.load_ids:
dload_id2 = model.DLoad(dload_id, consider_dload_combinations=False, msg=msg)
dload_ids2.append(dload_id2)
self.load_ids_ref = dload_ids2
def safe_cross_reference(self, model: BDF, xref_errors, debug=True):
dload_ids2 = []
msg = ', which is required by DLOAD=%s' % (self.sid)
for dload_id in self.load_ids:
try:
dload_id2 = model.DLoad(dload_id, consider_dload_combinations=False, msg=msg)
except KeyError:
if debug:
msg = 'Couldnt find dload_id=%i, which is required by %s=%s' % (
dload_id, self.type, self.sid)
model.log.warning(msg)
continue
dload_ids2.append(dload_id2)
self.load_ids_ref = dload_ids2
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.load_ids = [self.LoadID(dload) for dload in self.get_load_ids()]
self.load_ids_ref = None
def raw_fields(self):
list_fields = ['DLOAD', self.sid, self.scale]
for (scale_factor, load_id) in zip(self.scale_factors, self.get_load_ids()):
list_fields += [scale_factor, self.LoadID(load_id)]
return list_fields
def repr_fields(self):
return self.raw_fields()
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.raw_fields()
if size == 16 or max(self.get_load_ids()) > MAX_INT:
return self.comment + print_card_16(card)
return self.comment + print_card_8(card)
class RLOAD1(DynamicLoad):
r"""
Defines a frequency-dependent dynamic load of the form
for use in frequency response problems.
.. math::
\left\{ P(f) \right\} = \left\{A\right\} [ C(f)+iD(f)]
e^{ i \left\{\theta - 2 \pi f \tau \right\} }
+--------+-----+----------+-------+--------+----+----+------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
+========+=====+==========+=======+========+====+====+======+
| RLOAD1 | SID | EXCITEID | DELAY | DPHASE | TC | TD | TYPE |
+--------+-----+----------+-------+--------+----+----+------+
| RLOAD1 | 5 | 3 | | | 1 | | |
+--------+-----+----------+-------+--------+----+----+------+
NX allows DELAY and DPHASE to be floats
"""
type = 'RLOAD1'
_properties = ['delay_id', 'dphase_id']
@classmethod
def _init_from_empty(cls):
sid = 1
excite_id = 1
return RLOAD1(sid, excite_id, delay=0, dphase=0, tc=0, td=0, Type='LOAD', comment='')
def __init__(self, sid, excite_id, delay=0, dphase=0, tc=0, td=0, Type='LOAD', comment=''):
"""
Creates an RLOAD1 card, which defienes a frequency-dependent load
based on TABLEDs.
Parameters
----------
sid : int
load id
excite_id : int
node id where the load is applied
delay : int/float; default=None
the delay; if it's 0/blank there is no delay
float : delay in units of time
int : delay id
dphase : int/float; default=None
the dphase; if it's 0/blank there is no phase lag
float : delay in units of time
int : delay id
tc : int/float; default=0
TABLEDi id that defines C(f) for all degrees of freedom in
EXCITEID entry
td : int/float; default=0
TABLEDi id that defines D(f) for all degrees of freedom in
EXCITEID entry
Type : int/str; default='LOAD'
the type of load
0/LOAD
1/DISP
2/VELO
3/ACCE
4, 5, 6, 7, 12, 13 - MSC only
comment : str; default=''
a comment for the card
"""
DynamicLoad.__init__(self)
if comment:
self.comment = comment
Type = update_loadtype(Type)
self.sid = sid
self.excite_id = excite_id
self.delay = delay
self.dphase = dphase
self.tc = tc
self.td = td
self.Type = Type
assert sid > 0, self
self.tc_ref = None
self.td_ref = None
self.delay_ref = None
self.dphase_ref = None
def validate(self):
msg = ''
is_failed = False
if self.tc > 0 or self.td > 0:
msg += 'either RLOAD1 TC or TD > 0; tc=%s td=%s\n' % (self.tc, self.td)
if self.Type in [0, 'L', 'LO', 'LOA', 'LOAD']:
self.Type = 'LOAD'
elif self.Type in [1, 'D', 'DI', 'DIS', 'DISP']:
self.Type = 'DISP'
elif self.Type in [2, 'V', 'VE', 'VEL', 'VELO']:
self.Type = 'VELO'
elif self.Type in [3, 'A', 'AC', 'ACC', 'ACCE']:
self.Type = 'ACCE'
else:
msg += 'invalid RLOAD1 type Type=%r\n' % self.Type
is_failed = True
if is_failed:
msg += str(self)
raise RuntimeError(msg)
assert self.sid > 0, self.sid
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a RLOAD1 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
sid = integer(card, 1, 'sid')
excite_id = integer(card, 2, 'excite_id')
delay = integer_double_or_blank(card, 3, 'delay', 0)
dphase = integer_double_or_blank(card, 4, 'dphase', 0)
tc = integer_double_or_blank(card, 5, 'tc', 0)
td = integer_double_or_blank(card, 6, 'td', 0)
Type = integer_string_or_blank(card, 7, 'Type', 'LOAD')
assert len(card) <= 8, f'len(RLOAD1 card) = {len(card):d}\ncard={card}'
return RLOAD1(sid, excite_id, delay, dphase, tc, td, Type, comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by RLOAD1 sid=%s' % (self.sid)
_cross_reference_excite_id(self, model, msg)
if isinstance(self.tc, integer_types) and self.tc:
self.tc_ref = model.TableD(self.tc, msg=msg)
if isinstance(self.td, integer_types) and self.td:
self.td_ref = model.TableD(self.td, msg=msg)
if isinstance(self.delay, integer_types) and self.delay > 0:
self.delay_ref = model.DELAY(self.delay_id, msg=msg)
if isinstance(self.dphase, integer_types) and self.dphase > 0:
self.dphase_ref = model.DPHASE(self.dphase, msg=msg)
def safe_cross_reference(self, model: BDF, xref_errors, ):
msg = ', which is required by RLOAD1 sid=%s' % (self.sid)
_cross_reference_excite_id(self, model, msg)
if isinstance(self.tc, integer_types) and self.tc:
self.tc_ref = model.TableD(self.tc, msg=msg)
if isinstance(self.td, integer_types) and self.td:
self.td_ref = model.TableD(self.td, msg=msg)
if isinstance(self.delay, integer_types) and self.delay > 0:
self.delay_ref = model.DELAY(self.delay_id, msg=msg)
if isinstance(self.dphase, integer_types) and self.dphase > 0:
self.dphase_ref = model.DPHASE(self.dphase, msg=msg)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.tc = self.Tc()
self.td = self.Td()
self.delay = self.delay_id
self.dphase = self.dphase_id
self.tc_ref = None
self.td_ref = None
self.delay_ref = None
self.dphase_ref = None
def get_loads(self):
return [self]
def Tc(self):
if self.tc_ref is not None:
return self.tc_ref.tid
elif self.tc in [0, 0.0]:
return 0
return self.tc
def Td(self):
if self.td_ref is not None:
return self.td_ref.tid
elif self.td in [0, 0.0]:
return 0
return self.td
@property
def delay_id(self):
if self.delay_ref is not None:
return self.delay_ref.sid
elif self.delay in [0, 0.]:
return 0
return self.delay
@property
def dphase_id(self):
if self.dphase_ref is not None:
return self.dphase_ref.sid
elif self.dphase in [0, 0.0]:
return 0
return self.dphase
def get_load_at_freq(self, freq, scale=1.):
# A = 1. # points to DAREA or SPCD
if isinstance(freq, float):
freq = np.array([freq])
else:
freq = np.asarray(freq)
if isinstance(self.tc, float):
c = float(self.tc)
elif self.tc == 0:
c = 0.
else:
c = self.tc_ref.interpolate(freq)
if isinstance(self.td, float):
d = float(self.td)
elif self.td == 0:
d = 0.
else:
d = self.td_ref.interpolate(freq)
if isinstance(self.dphase, float):
dphase = self.dphase
elif self.dphase == 0:
dphase = 0.0
else:
nids, comps, dphases = self.dphase_ref.get_dphase_at_freq(freq)
assert len(dphases) == 1, 'dphases=%s\n%s' % (dphases, self.dphase_ref)
dphase = dphases[0]
if isinstance(self.delay, float):
tau = self.delay
elif self.delay == 0:
tau = 0.0
else:
nids, comps, taus = self.delay_ref.get_delay_at_freq(freq)
assert len(taus) == 1, 'taus=%s\n%s' % (taus, self.delay_ref)
tau = taus[0]
out = (c + 1.j * d) * np.exp(dphase - 2 * np.pi * freq * tau)
return out
def raw_fields(self):
list_fields = ['RLOAD1', self.sid, self.excite_id, self.delay_id, self.dphase_id,
self.Tc(), self.Td(), self.Type]
return list_fields
def repr_fields(self):
Type = set_blank_if_default(self.Type, 'LOAD')
list_fields = ['RLOAD1', self.sid, self.excite_id, self.delay_id, self.dphase_id,
self.Tc(), self.Td(), Type]
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
if size == 8:
return self.comment + print_card_8(card)
if is_double:
return self.comment + print_card_double(card)
return self.comment + print_card_16(card)
def _cross_reference_excite_id_backup(self, model, msg): # pragma: no cover
"""not quite done...not sure how to handle the very odd xref
EXCITEID may refer to one or more static load entries (FORCE, PLOADi, GRAV, etc.).
"""
excite_id_ref = []
case_control = model.case_control_deck
if case_control is not None:
#print('cc = %r' % case_control)
for key, subcase in sorted(model.case_control_deck.subcases.items()):
#print(subcase, type(subcase))
#if 'LOADSET' in subcase:
#lseq_id = subcase['LOADSET'][0]
#lseq = model.Load(lseq_id, consider_load_combinations=False, msg=msg)[0]
#self.excite_id_ref = lseq
##self.dload_id = lseq.
#if 'DLOAD' in subcase:
if self.excite_id in model.loads:
# FORCE, FORCE1, FORCE2, PLOAD4, GRAV
# changes the magnitudes of the load, not the direction
model.log.debug('excite_id load = %s' % self.excite_id)
#print(' dloads =', list(model.dloads.keys()))
#print(' dareas =', list(model.dareas.keys()))
excite_id_ref += model.loads[self.excite_id]
if self.excite_id in model.dareas:
model.log.debug('excite_id darea = %s' % self.excite_id)
darea_ref = model.DAREA(self.excite_id, msg=msg)
excite_id_ref.append(darea_ref)
if self.excite_id in model.dload_entries:
# this is probably wrong...
# it was added to pass TestLoads.test_loads_nonlinear_thermal1, but
# I think QVECT should be in self.loads, not self.dload_entries...
model.log.debug('excite_id dload_entries = %s' % self.excite_id)
excite_id_ref += model.dload_entries
# what about TEMPBC?
#else:
#msg = ('LOADSET and DLOAD are not found in the case control deck\n%s' %
#str(model.case_control_deck))
#raise RuntimeError(msg)
#else:
#model.log.warning('could not find excite_id=%i for\n%s' % (self.excite_id, str(self)))
#self.excite_id_ref = model.DAREA(self.excite_id, msg=msg)
if len(excite_id_ref) == 0:
print('excite_id = %s' % self.excite_id)
print(' loads =', list(model.loads.keys()))
print(' dareas =', list(model.dareas.keys()))
print(' dloads =', list(model.dloads.keys()))
print(' dload_entries =', list(model.dload_entries.keys()))
model.log.warning('could not find excite_id=%i for\n%s' % (self.excite_id, str(self)))
raise RuntimeError('could not find excite_id=%i for\n%s' % (self.excite_id, str(self)))
def get_lseqs_by_excite_id(model, excite_id):
from collections import defaultdict
# get the lseqs that correspond to the correct EXCITE_ID id
lseq_sids = defaultdict(list)
for sid, loads in model.load_combinations.items():
for load in loads:
if load.type == 'LSEQ':
if excite_id == load.excite_id:
#print(load)
lseq_sids[sid].append(load)
#for sid, loads in lseqs.items():
#print(sid, loads)
return lseq_sids
def _cross_reference_excite_id(self, model, msg):
"""not quite done...not sure how to handle the very odd xref
EXCITEID may refer to one or more static load entries (FORCE, PLOADi, GRAV, etc.).
"""
#print('*' * 80)
lseq_sids = get_lseqs_by_excite_id(model, self.excite_id)
# find all the LOADSETs in the model
# LOADSETs reference LSEQs by sid
valid_lseqs = []
if lseq_sids:
# get the sid for the LSEQ
case_control = model.case_control_deck
if case_control is not None:
#print('cc = %r' % case_control)
for key, subcase in sorted(model.case_control_deck.subcases.items()):
if 'LOADSET' in subcase:
lseq_sid = subcase['LOADSET'][0]
if lseq_sid in lseq_sids:
model.log.debug('adding LOADSET = %i' % lseq_sid)
valid_lseqs.append(lseq_sid)
if valid_lseqs:
valid_lseqs = list(set(valid_lseqs))
valid_lseqs.sort()
#assert len(valid_lseqs) == 1, 'valid_lseqs=%s' % valid_lseqs
#print('valid_lseqs =', valid_lseqs)
# can Case Control LOADSET be substituded for Case Control DLOAD id?
excite_id_ref = []
if self.excite_id in model.loads:
# FORCE, FORCE1, FORCE2, PLOAD4, GRAV
# changes the magnitudes of the load, not the direction
model.log.debug('excite_id load = %s' % self.excite_id)
#print(' dloads =', list(model.dloads.keys()))
#print(' dareas =', list(model.dareas.keys()))
excite_id_ref += model.loads[self.excite_id]
if self.excite_id in model.dareas:
model.log.debug('excite_id darea = %s' % self.excite_id)
darea_ref = model.DAREA(self.excite_id, msg=msg)
excite_id_ref.append(darea_ref)
if self.excite_id in model.bcs:
# CONV, TEMPBC
model.log.debug('excite_id bcs = %s' % self.excite_id)
excite_id_ref = model.bcs[self.excite_id]
if self.excite_id in model.dload_entries: # this is probably wrong...
# this is probably wrong...
# it was added to pass TestLoads.test_loads_nonlinear_thermal1, but
# I think QVECT should be in self.loads, not self.dload_entries...
model.log.debug('excite_id dload_entries = %s' % self.excite_id)
excite_id_ref += model.dload_entries
if self.excite_id in model.load_combinations: # this should be right...
# C:\NASA\m4\formats\git\examples\move_tpl\nlstrs2.op2
model.log.debug('excite_id load_combinations = %s' % self.excite_id)
excite_id_ref = model.load_combinations[self.excite_id]
# handles LSEQ
if valid_lseqs:
for lseq_sid in valid_lseqs:
excite_id_ref += lseq_sids[lseq_sid]
# what about SPCD?
if len(excite_id_ref) == 0:
print(model.get_bdf_stats())
print('excite_id = %s' % self.excite_id)
print(' loads =', list(model.loads.keys()))
print(' dareas =', list(model.dareas.keys()))
print(' bcs =', list(model.bcs.keys()))
print(' dloads =', list(model.dloads.keys()))
print(' dload_entries =', list(model.dload_entries.keys()))
print(' load_combinations =', list(model.load_combinations.keys())) # what about LSEQ
if lseq_sids:
sids = list(lseq_sids.keys())
print(' lseq_excite_ids=%s; lseq_sids=%s; valid_lseqs=%s' % (
self.excite_id, sids, valid_lseqs))
else:
print(' lseq_sids = []')
model.log.warning('could not find excite_id=%i for\n%s' % (self.excite_id, str(self)))
raise RuntimeError('could not find excite_id=%i for\n%s' % (self.excite_id, str(self)))
class RLOAD2(DynamicLoad):
r"""
Defines a frequency-dependent dynamic load of the form
for use in frequency response problems.
.. math:: \left\{ P(f) \right\} = \left\{A\right\} * B(f)
e^{ i \left\{ \phi(f) + \theta - 2 \pi f \tau \right\} }
+--------+-----+----------+-------+--------+----+----+------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
+========+=====+==========+=======+========+====+====+======+
| RLOAD2 | SID | EXCITEID | DELAY | DPHASE | TB | TP | TYPE |
+--------+-----+----------+-------+--------+----+----+------+
| RLOAD2 | 5 | 3 | | | 1 | | |
+--------+-----+----------+-------+--------+----+----+------+
NX allows DELAY and DPHASE to be floats
"""
type = 'RLOAD2'
_properties = ['delay_id', 'dphase_id']
@classmethod
def _init_from_empty(cls):
sid = 1
excite_id = 1
return RLOAD2(sid, excite_id, delay=0, dphase=0, tb=0, tp=0, Type='LOAD', comment='')
# P(f) = {A} * B(f) * e^(i*phi(f), + theta - 2*pi*f*tau)
def __init__(self, sid, excite_id, delay=0, dphase=0, tb=0, tp=0, Type='LOAD', comment=''):
"""
Creates a nRLOAD2 card, which defienes a frequency-dependent load
based on TABLEDs.
Parameters
----------
sid : int
load id
excite_id : int
node id where the load is applied
delay : int/float; default=None
the delay; if it's 0/blank there is no delay
float : delay in units of time
int : delay id
dphase : int/float; default=None
the dphase; if it's 0/blank there is no phase lag
float : delay in units of time
int : delay id
tb : int/float; default=0
TABLEDi id that defines B(f) for all degrees of freedom in
EXCITEID entry
tc : int/float; default=0
TABLEDi id that defines C(f) for all degrees of freedom in
EXCITEID entry
td : int/float; default=0
TABLEDi id that defines D(f) for all degrees of freedom in
EXCITEID entry
tp : int/float; default=0
TABLEDi id that defines phi(f) for all degrees of freedom in
EXCITEID entry
Type : int/str; default='LOAD'
the type of load
0/LOAD
1/DISP
2/VELO
3/ACCE
4, 5, 6, 7, 12, 13 - MSC only
comment : str; default=''
a comment for the card
"""
DynamicLoad.__init__(self)
if comment:
self.comment = comment
Type = update_loadtype(Type)
self.sid = sid
self.excite_id = excite_id
self.delay = delay
self.dphase = dphase
self.tb = tb
self.tp = tp
self.Type = Type
self.tb_ref = None
self.tp_ref = None
self.delay_ref = None
self.dphase_ref = None
#@property
#def Type(self):
#"""gets the load_type"""
#return self.load_type
#@Type.setter
#def Type(self, load_type):
#"""sets the load_type"""
#self.load_type = load_type
def validate(self):
msg = ''
is_failed = False
if self.tb > 0 or self.tp > 0:
msg += 'either RLOAD2 TB or TP > 0; tb=%s tp=%s\n' % (self.tb, self.tp)
if self.Type in [0, 'L', 'LO', 'LOA', 'LOAD']:
self.Type = 'LOAD'
elif self.Type in [1, 'D', 'DI', 'DIS', 'DISP']:
self.Type = 'DISP'
elif self.Type in [2, 'V', 'VE', 'VEL', 'VELO']:
self.Type = 'VELO'
elif self.Type in [3, 'A', 'AC', 'ACC', 'ACCE']:
self.Type = 'ACCE'
else:
msg += 'invalid RLOAD2 type Type=%r\n' % self.Type
is_failed = True
if is_failed:
msg += str(self)
raise RuntimeError(msg)
assert self.sid > 0, self.sid
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a RLOAD2 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
sid = integer(card, 1, 'sid')
excite_id = integer(card, 2, 'excite_id')
delay = integer_double_or_blank(card, 3, 'delay', 0)
dphase = integer_double_or_blank(card, 4, 'dphase', 0)
tb = integer_double_or_blank(card, 5, 'tb', 0)
tp = integer_double_or_blank(card, 6, 'tp', 0)
Type = integer_string_or_blank(card, 7, 'Type', 'LOAD')
assert len(card) <= 8, f'len(RLOAD2 card) = {len(card):d}\ncard={card}'
return RLOAD2(sid, excite_id, delay, dphase, tb, tp, Type, comment=comment)
def get_load_at_freq(self, freq, scale=1.):
# A = 1. # points to DAREA or SPCD
if isinstance(self.tb, float):
b = self.tb
elif self.tb == 0:
b = 0.0
else:
b = self.tb_ref.interpolate(freq)
if isinstance(self.tp, float):
p = self.tp
elif self.tp == 0:
p = 0.0
else:
p = self.tp_ref.interpolate(freq)
if isinstance(self.dphase, float):
dphase = self.dphase
elif self.dphase == 0 or self.dphase is None:
dphase = 0.0
else:
nids, comps, dphases = self.dphase_ref.get_dphase_at_freq(freq)
assert len(dphases) == 1, dphases
dphase = dphases[0]
if isinstance(self.delay, float):
tau = self.delay
elif self.delay == 0:
tau = 0.0
else:
nids, comps, taus = self.delay_ref.get_delay_at_freq(freq)
assert len(taus) == 1, taus
tau = taus[0]
try:
out = b * np.exp(1.j * p + dphase - 2 * np.pi * freq * tau)
except TypeError:
print('b =', b)
print('p =', p)
print('dphase =', dphase)
print('freq =', freq)
print('tau =', tau)
raise
return out
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by RLOAD2=%s' % (self.sid)
_cross_reference_excite_id(self, model, msg)
if isinstance(self.tb, integer_types) and self.tb:
self.tb_ref = model.TableD(self.tb, msg=msg)
if isinstance(self.tp, integer_types) and self.tp:
self.tp_ref = model.TableD(self.tp, msg=msg)
if isinstance(self.delay, integer_types) and self.delay > 0:
self.delay_ref = model.DELAY(self.delay, msg=msg)
if isinstance(self.dphase, integer_types) and self.dphase > 0:
self.dphase_ref = model.DPHASE(self.dphase, msg=msg)
def safe_cross_reference(self, model: BDF, xref_errors, ):
msg = ', which is required by RLOAD2=%s' % (self.sid)
_cross_reference_excite_id(self, model, msg)
if isinstance(self.tb, integer_types) and self.tb:
self.tb_ref = model.TableD(self.tb, msg=msg)
if isinstance(self.tp, integer_types) and self.tp:
self.tp_ref = model.TableD(self.tp, msg=msg)
if isinstance(self.delay, integer_types) and self.delay > 0:
self.delay_ref = model.DELAY(self.delay, msg=msg)
if isinstance(self.dphase, integer_types) and self.dphase > 0:
self.dphase_ref = model.DPHASE(self.dphase, msg=msg)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.tb = self.Tb()
self.tp = self.Tp()
self.delay = self.delay_id
self.dphase = self.dphase_id
self.tb_ref = None
self.tp_ref = None
self.delay_ref = None
self.dphase_ref = None
def get_loads(self):
return [self]
def LoadID(self):
return self.sid
def Tb(self):
if self.tb_ref is not None:
return self.tb_ref.tid
elif self.tb == 0:
return 0
return self.tb
def Tp(self):
if self.tp_ref is not None:
return self.tp_ref.tid
elif self.tp == 0:
return 0
return self.tp
@property
def delay_id(self):
if self.delay_ref is not None:
return self.delay_ref.sid
elif self.delay == 0:
return 0
return self.delay
@property
def dphase_id(self):
if self.dphase_ref is not None:
return self.dphase_ref.sid
elif self.dphase == 0:
return 0
return self.dphase
def raw_fields(self):
list_fields = ['RLOAD2', self.sid, self.excite_id, self.delay_id, self.dphase_id,
self.Tb(), self.Tp(), self.Type]
return list_fields
def repr_fields(self):
Type = set_blank_if_default(self.Type, 0.0)
list_fields = ['RLOAD2', self.sid, self.excite_id, self.delay_id, self.dphase_id,
self.Tb(), self.Tp(), Type]
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
if size == 8:
return self.comment + print_card_8(card)
if is_double:
return self.comment + print_card_double(card)
return self.comment + print_card_16(card)
class TLOAD1(DynamicLoad):
r"""
Transient Response Dynamic Excitation, Form 1
Defines a time-dependent dynamic load or enforced motion of the form:
.. math::
\left\{ P(t) \right\} = \left\{ A \right\} \cdot F(t-\tau)
for use in transient response analysis.
MSC 20005.2
+--------+-----+----------+-------+------+-----+-----+-----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
+========+=====+==========+=======+======+=====+=====+=====+
| TLOAD1 | SID | EXCITEID | DELAY | TYPE | TID | US0 | VS0 |
+--------+-----+----------+-------+------+-----+-----+-----+
NX 11
+--------+-----+----------+-------+------+-----+
| 1 | 2 | 3 | 4 | 5 | 6 |
+========+=====+==========+=======+======+=====+
| TLOAD1 | SID | EXCITEID | DELAY | TYPE | TID |
+--------+-----+----------+-------+------+-----+
"""
type = 'TLOAD1'
_properties = ['delay_id']
@classmethod
def _init_from_empty(cls):
sid = 1
excite_id = 1
tid = 1
return TLOAD1(sid, excite_id, tid, delay=0, Type='LOAD', us0=0.0, vs0=0.0, comment='')
def __init__(self, sid, excite_id, tid, delay=0, Type='LOAD',
us0=0.0, vs0=0.0, comment=''):
"""
Creates a TLOAD1 card, which defienes a time-dependent load
based on a DTABLE.
Parameters
----------
sid : int
load id
excite_id : int
node id where the load is applied
tid : int
TABLEDi id that defines F(t) for all degrees of freedom in
EXCITEID entry
float : MSC not supported
delay : int/float; default=None
the delay; if it's 0/blank there is no delay
float : delay in units of time
int : delay id
Type : int/str; default='LOAD'
the type of load
0/LOAD
1/DISP
2/VELO
3/ACCE
4, 5, 6, 7, 12, 13 - MSC only
us0 : float; default=0.
Factor for initial displacements of the enforced degrees-of-freedom
MSC only
vs0 : float; default=0.
Factor for initial velocities of the enforced degrees-of-freedom
MSC only
comment : str; default=''
a comment for the card
"""
DynamicLoad.__init__(self)
if delay is None:
delay = 0
Type = update_loadtype(Type)
if comment:
self.comment = comment
#: load ID
self.sid = sid
#: Identification number of DAREA or SPCD entry set or a thermal load
#: set (in heat transfer analysis) that defines {A}. (Integer > 0)
self.excite_id = excite_id
#: If it is a non-zero integer, it represents the
#: identification number of DELAY Bulk Data entry that defines .
#: If it is real, then it directly defines the value of that will
#: be used for all degrees-of-freedom that are excited by this
#: dynamic load entry. See also Remark 9. (Integer >= 0,
#: real or blank)
self.delay = delay
#: Defines the type of the dynamic excitation. (LOAD,DISP, VELO, ACCE)
self.Type = Type
#: Identification number of TABLEDi entry that gives F(t). (Integer > 0)
self.tid = tid
#: Factor for initial displacements of the enforced degrees-of-freedom.
#: (Real; Default = 0.0)
self.us0 = us0
#: Factor for initial velocities of the enforced degrees-of-freedom.
#: (Real; Default = 0.0)
self.vs0 = vs0
self.tid_ref = None
self.delay_ref = None
def validate(self):
if self.Type in [0, 'L', 'LO', 'LOA', 'LOAD']:
self.Type = 'LOAD'
elif self.Type in [1, 'D', 'DI', 'DIS', 'DISP']:
self.Type = 'DISP'
elif self.Type in [2, 'V', 'VE', 'VEL', 'VELO']:
self.Type = 'VELO'
elif self.Type in [3, 'A', 'AC', 'ACC', 'ACCE']:
self.Type = 'ACCE'
elif self.Type in [4, 5, 6, 7, 12, 13]: # MSC-only
pass
else:
msg = 'invalid TLOAD1 type Type=%r' % self.Type
raise AssertionError(msg)
assert self.sid > 0, self.sid
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a TLOAD1 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
sid = integer(card, 1, 'sid')
excite_id = integer(card, 2, 'excite_id')
delay = integer_double_or_blank(card, 3, 'delay', 0)
Type = integer_string_or_blank(card, 4, 'Type', 'LOAD')
tid = integer(card, 5, 'tid')
us0 = double_or_blank(card, 6, 'us0', 0.0)
vs0 = double_or_blank(card, 7, 'vs0', 0.0)
assert len(card) <= 8, f'len(TLOAD1 card) = {len(card):d}\ncard={card}'
return TLOAD1(sid, excite_id, tid, delay=delay, Type=Type, us0=us0, vs0=vs0, comment=comment)
def get_loads(self):
return [self]
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by TLOAD1=%s' % (self.sid)
_cross_reference_excite_id(self, model, msg)
if self.tid:
self.tid_ref = model.TableD(self.tid, msg=msg)
if isinstance(self.delay, integer_types) and self.delay > 0:
self.delay_ref = model.DELAY(self.delay, msg=msg)
def safe_cross_reference(self, model: BDF, debug=True):
msg = ', which is required by TLOAD1=%s' % (self.sid)
_cross_reference_excite_id(self, model, msg)
if self.tid:
#try:
self.tid_ref = model.TableD(self.tid, msg=msg)
#except
if isinstance(self.delay, integer_types) and self.delay > 0:
self.delay_ref = model.DELAY(self.delay_id, msg=msg)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.tid = self.Tid()
self.delay = self.delay_id
self.tid_ref = None
self.delay_ref = None
def Tid(self):
if self.tid_ref is not None:
return self.tid_ref.tid
elif self.tid == 0:
return 0
else:
return self.tid
@property
def delay_id(self):
if self.delay_ref is not None:
return self.delay_ref.sid
elif self.delay == 0:
return 0
return self.delay
def get_load_at_time(self, time, scale=1.):
# A = 1. # points to DAREA or SPCD
if isinstance(time, float):
time = np.array([time])
else:
time = np.asarray(time)
if isinstance(self.delay, float):
tau = self.delay
elif self.delay == 0 or self.delay is None:
tau = 0.0
else:
tau = self.delay_ref.get_delay_at_time(time)
i = np.where(time - tau > 0)
time2 = time[i]
resp = self.tid_ref.interpolate(time2)
is_spcd = False
if self.Type == 'VELO' and is_spcd:
resp[0] = self.us0
if self.Type == 'ACCE' and is_spcd:
resp[0] = self.vs0
return resp * scale
def raw_fields(self):
list_fields = ['TLOAD1', self.sid, self.excite_id, self.delay_id, self.Type,
self.Tid(), self.us0, self.vs0]
return list_fields
def repr_fields(self):
us0 = set_blank_if_default(self.us0, 0.0)
vs0 = set_blank_if_default(self.vs0, 0.0)
list_fields = ['TLOAD1', self.sid, self.excite_id, self.delay_id, self.Type,
self.Tid(), us0, vs0]
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
if size == 8:
if max(self.sid, self.excite_id, self.delay_id, self.Tid()) > MAX_INT:
return self.comment + print_card_16(card)
return self.comment + print_card_8(card)
if is_double:
return self.comment + print_card_double(card)
return self.comment + print_card_16(card)
class TLOAD2(DynamicLoad):
r"""
Transient Response Dynamic Excitation, Form 1
Defines a time-dependent dynamic load or enforced motion of the form:
.. math::
\left\{ P(t) \right\} = \left\{ A \right\} e^(C*t) cos(2 \pi f t + \phi)
P(t) = 0 (t<T1+tau or t > T2+tau)
P(t) = {A} * t^b * e^(C*t) * cos(2*pi*f*t + phase) (T1+tau <= t <= T2+tau)
for use in transient response analysis.
MSC 2016.1
+--------+-----+----------+-------+------+-----+-----+--------+---------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+========+=====+==========+=======+======+=====+=====+========+=========+
| TLOAD2 | SID | EXCITEID | DELAY | TYPE | T1 | T2 | FREQ | PHASE |
+--------+-----+----------+-------+------+-----+-----+--------+---------+
| | C | B | US0 | VS0 | | | | |
+--------+-----+----------+-------+------+-----+-----+--------+---------+
NX 11
+--------+-----+----------+-------+------+-----+-----+--------+---------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+========+=====+==========+=======+======+=====+=====+========+=========+
| TLOAD2 | SID | EXCITEID | DELAY | TYPE | T1 | T2 | FREQ | PHASE |
+--------+-----+----------+-------+------+-----+-----+--------+---------+
| | C | B | | | | | | |
+--------+-----+----------+-------+------+-----+-----+--------+---------+
"""
type = 'TLOAD2'
_properties = ['delay_id']
@classmethod
def _init_from_empty(cls):
sid = 1
excite_id = 1
return TLOAD2(sid, excite_id, delay=0, Type='LOAD', T1=0., T2=None,
frequency=0., phase=0., c=0., b=0., us0=0., vs0=0., comment='')
def __init__(self, sid, excite_id, delay=0, Type='LOAD', T1=0., T2=None,
frequency=0., phase=0., c=0., b=0., us0=0., vs0=0., comment=''):
"""
Creates a TLOAD2 card, which defines a exponential time dependent
load based on constants.
Parameters
----------
sid : int
load id
excite_id : int
node id where the load is applied
delay : int/float; default=None
the delay; if it's 0/blank there is no delay
float : delay in units of time
int : delay id
Type : int/str; default='LOAD'
the type of load
0/LOAD
1/DISP
2/VELO
3/ACCE
4, 5, 6, 7, 12, 13 - MSC only
T1 : float; default=0.
time constant (t1 > 0.0)
times below this are ignored
T2 : float; default=None
time constant (t2 > t1)
times above this are ignored
frequency : float; default=0.
Frequency in cycles per unit time.
phase : float; default=0.
Phase angle in degrees.
c : float; default=0.
Exponential coefficient.
b : float; default=0.
Growth coefficient.
us0 : float; default=0.
Factor for initial displacements of the enforced degrees-of-freedom
MSC only
vs0 : float; default=0.
Factor for initial velocities of the enforced degrees-of-freedom
MSC only
comment : str; default=''
a comment for the card
"""
DynamicLoad.__init__(self)
if comment:
self.comment = comment
if T2 is None:
T2 = T1
Type = update_loadtype(Type)
#: load ID
#: SID must be unique for all TLOAD1, TLOAD2, RLOAD1, RLOAD2, and ACSRCE entries.
self.sid = sid
self.excite_id = excite_id
self.delay = delay
#: Defines the type of the dynamic excitation. (Integer; character
#: or blank; Default = 0)
self.Type = Type
#: Time constant. (Real >= 0.0)
self.T1 = T1
#: Time constant. (Real; T2 > T1)
self.T2 = T2
#: Frequency in cycles per unit time. (Real >= 0.0; Default = 0.0)
self.frequency = frequency
#: Phase angle in degrees. (Real; Default = 0.0)
self.phase = phase
#: Exponential coefficient. (Real; Default = 0.0)
self.c = c
#: Growth coefficient. (Real; Default = 0.0)
self.b = b
#: Factor for initial displacements of the enforced degrees-of-freedom.
#: (Real; Default = 0.0)
self.us0 = us0
#: Factor for initial velocities of the enforced degrees-of-freedom
#: (Real; Default = 0.0)
self.vs0 = vs0
self.delay_ref = None
def validate(self):
if self.Type in [0, 'L', 'LO', 'LOA', 'LOAD']:
self.Type = 'LOAD'
elif self.Type in [1, 'D', 'DI', 'DIS', 'DISP']:
self.Type = 'DISP'
elif self.Type in [2, 'V', 'VE', 'VEL', 'VELO']:
self.Type = 'VELO'
elif self.Type in [3, 'A', 'AC', 'ACC', 'ACCE']:
self.Type = 'ACCE'
elif self.Type in [5, 6, 7, 12, 13]: # MSC only
pass
else:
msg = 'invalid TLOAD2 type Type=%r' % self.Type
raise RuntimeError(msg)
assert self.sid > 0, self.sid
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a TLOAD2 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
sid = integer(card, 1, 'sid')
excite_id = integer(card, 2, 'excite_id')
delay = integer_double_or_blank(card, 3, 'delay', 0)
Type = integer_string_or_blank(card, 4, 'Type', 'LOAD')
T1 = double_or_blank(card, 5, 'T1', 0.0)
T2 = double_or_blank(card, 6, 'T2', T1)
frequency = double_or_blank(card, 7, 'frequency', 0.)
phase = double_or_blank(card, 8, 'phase', 0.)
c = double_or_blank(card, 9, 'c', 0.)
b = double_or_blank(card, 10, 'b', 0.)
us0 = double_or_blank(card, 11, 'us0', 0.)
vs0 = double_or_blank(card, 12, 'vs0', 0.)
assert len(card) <= 13, f'len(TLOAD2 card) = {len(card):d}\ncard={card}'
return TLOAD2(sid, excite_id, delay, Type, T1, T2, frequency, phase,
c, b, us0, vs0, comment=comment)
def get_load_at_time(self, time, scale=1.):
if isinstance(time, float):
time = np.array([time])
else:
time = np.asarray(time)
# A = 1. # points to DAREA or SPCD
#xy = array(self.tid.table.table)
#x = xy[:, 0]
#y = xy[:, 1]
#assert x.shape == y.shape, 'x.shape=%s y.shape=%s' % (str(x.shape), str(y.shape))
#f = interp1d(x, y)
if isinstance(self.delay, float):
tau = self.delay
elif self.delay == 0 or self.delay is None:
tau = 0.0
else:
tau = self.delay_ref.get_delay_at_time(time)
t1 = self.T1 + tau
t2 = self.T2 + tau
f = self.frequency
p = self.phase
f = np.zeros(time.shape, dtype=time.dtype)
i = np.where(t1 <= time)[0]
j = np.where(time[i] <= t2)[0]
i = i[j]
f[i] = scale * time[i] ** self.b * np.exp(self.c * time[i]) * np.cos(2 * np.pi * f * time[i] + p)
is_spcd = False
#resp = f
if self.Type == 'VELO' and is_spcd:
f[0] = self.us0
if self.Type == 'ACCE' and is_spcd:
f[0] = self.vs0
return f
def get_loads(self):
return [self]
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by TLOAD2 sid=%s' % (self.sid)
_cross_reference_excite_id(self, model, msg)
if isinstance(self.delay, integer_types) and self.delay > 0:
self.delay_ref = model.DELAY(self.delay_id, msg=msg)
# TODO: excite_id
def safe_cross_reference(self, model: BDF, xref_errors, debug=True):
msg = ', which is required by TLOAD2 sid=%s' % (self.sid)
_cross_reference_excite_id(self, model, msg)
if isinstance(self.delay, integer_types) and self.delay > 0:
self.delay_ref = model.DELAY(self.delay_id, msg=msg)
# TODO: excite_id
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.delay = self.delay_id
self.delay_ref = None
@property
def delay_id(self):
if self.delay_ref is not None:
return self.delay_ref.sid
elif self.delay == 0:
return 0
return self.delay
def raw_fields(self):
list_fields = ['TLOAD2', self.sid, self.excite_id, self.delay_id, self.Type,
self.T1, self.T2, self.frequency, self.phase, self.c, self.b,
self.us0, self.vs0]
return list_fields
def repr_fields(self):
frequency = set_blank_if_default(self.frequency, 0.0)
phase = set_blank_if_default(self.phase, 0.0)
c = set_blank_if_default(self.c, 0.0)
b = set_blank_if_default(self.b, 0.0)
us0 = set_blank_if_default(self.us0, 0.0)
vs0 = set_blank_if_default(self.vs0, 0.0)
list_fields = ['TLOAD2', self.sid, self.excite_id, self.delay_id, self.Type,
self.T1, self.T2, frequency, phase, c, b, us0, vs0]
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
if size == 8:
return self.comment + print_card_8(card)
if is_double:
return self.comment + print_card_double(card)
return self.comment + print_card_16(card)
def update_loadtype(load_type):
if load_type in [0, 'L', 'LO', 'LOA', 'LOAD']:
load_type = 'LOAD'
elif load_type in [1, 'D', 'DI', 'DIS', 'DISP']:
load_type = 'DISP'
elif load_type in [2, 'V', 'VE', 'VEL', 'VELO']:
load_type = 'VELO'
elif load_type in [3, 'A', 'AC', 'ACC', 'ACCE']:
load_type = 'ACCE'
return load_type
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,649
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/bdf/mesh_utils/utils.py
|
"""
defines:
bdf merge (IN_BDF_FILENAMES)... [-o OUT_BDF_FILENAME]\n'
bdf equivalence IN_BDF_FILENAME EQ_TOL\n'
bdf renumber IN_BDF_FILENAME [-o OUT_BDF_FILENAME]\n'
bdf mirror IN_BDF_FILENAME [-o OUT_BDF_FILENAME] [--plane PLANE] [--tol TOL]\n'
bdf export_mcids IN_BDF_FILENAME [-o OUT_GEOM_FILENAME]\n'
bdf split_cbars_by_pin_flags IN_BDF_FILENAME [-o OUT_BDF_FILENAME]\n'
"""
import os
import sys
from io import StringIO
from typing import List
from cpylog import SimpleLogger
import pyNastran
from pyNastran.bdf.mesh_utils.bdf_renumber import bdf_renumber, superelement_renumber
from pyNastran.bdf.mesh_utils.bdf_merge import bdf_merge
from pyNastran.bdf.mesh_utils.export_mcids import export_mcids
from pyNastran.bdf.mesh_utils.pierce_shells import pierce_shell_model
# testing these imports are up to date
# if something is imported and tested, it should be removed from here
from pyNastran.bdf.mesh_utils.shift import update_nodes
from pyNastran.bdf.mesh_utils.mirror_mesh import write_bdf_symmetric
from pyNastran.bdf.mesh_utils.collapse_bad_quads import convert_bad_quads_to_tris
from pyNastran.bdf.mesh_utils.delete_bad_elements import delete_bad_shells, get_bad_shells
from pyNastran.bdf.mesh_utils.split_cbars_by_pin_flag import split_cbars_by_pin_flag
from pyNastran.bdf.mesh_utils.dev.create_vectorized_numbered import create_vectorized_numbered
from pyNastran.bdf.mesh_utils.remove_unused import remove_unused
from pyNastran.bdf.mesh_utils.free_faces import write_skin_solid_faces
from pyNastran.bdf.mesh_utils.get_oml import get_oml_eids
def cmd_line_create_vectorized_numbered(argv=None, quiet=False): # pragma: no cover
if argv is None:
argv = sys.argv
msg = (
'Usage:\n'
' bdf create_vectorized_numbered IN_BDF_FILENAME [OUT_BDF_FILENAME]\n'
' bdf create_vectorized_numbered -h | --help\n'
' bdf create_vectorized_numbered -v | --version\n'
'\n'
'Positional Arguments:\n'
' IN_BDF_FILENAME the model to convert\n'
" OUT_BDF_FILENAME the converted model name (default=IN_BDF_FILENAME + '_convert.bdf')"
'\n'
'Info:\n'
' -h, --help show this help message and exit\n'
" -v, --version show program's version number and exit\n"
)
if len(argv) == 1:
sys.exit(msg)
from docopt import docopt
ver = str(pyNastran.__version__)
data = docopt(msg, version=ver, argv=argv[1:])
if not quiet: # pragma: no cover
print(data)
bdf_filename_in = data['IN_BDF_FILENAME']
if data['OUT_BDF_FILENAME']:
bdf_filename_out = data['OUT_BDF_FILENAME']
else:
base, ext = os.path.splitext(bdf_filename_in)
bdf_filename_out = base + '_convert' + ext
create_vectorized_numbered(bdf_filename_in, bdf_filename_out)
def cmd_line_equivalence(argv=None, quiet=False):
"""command line interface to bdf_equivalence_nodes"""
if argv is None:
argv = sys.argv
from docopt import docopt
msg = (
'Usage:\n'
' bdf equivalence IN_BDF_FILENAME EQ_TOL [-o OUT_BDF_FILENAME]\n'
' bdf equivalence -h | --help\n'
' bdf equivalence -v | --version\n'
'\n'
"Positional Arguments:\n"
" IN_BDF_FILENAME path to input BDF/DAT/NAS file\n"
" EQ_TOL the spherical equivalence tolerance\n"
#" OUT_BDF_FILENAME path to output BDF/DAT/NAS file\n"
'\n'
'Options:\n'
" -o OUT, --output OUT_BDF_FILENAME path to output BDF/DAT/NAS file\n\n"
'Info:\n'
' -h, --help show this help message and exit\n'
" -v, --version show program's version number and exit\n"
)
if len(argv) == 1:
sys.exit(msg)
ver = str(pyNastran.__version__)
#type_defaults = {
# '--nerrors' : [int, 100],
#}
data = docopt(msg, version=ver, argv=argv[1:])
if not quiet: # pragma: no cover
print(data)
bdf_filename = data['IN_BDF_FILENAME']
bdf_filename_out = data['--output']
if bdf_filename_out is None:
bdf_filename_out = 'merged.bdf'
tol = float(data['EQ_TOL'])
size = 16
from pyNastran.bdf.mesh_utils.bdf_equivalence import bdf_equivalence_nodes
level = 'debug' if not quiet else 'warning'
log = SimpleLogger(level=level, encoding='utf-8', log_func=None)
bdf_equivalence_nodes(bdf_filename, bdf_filename_out, tol,
renumber_nodes=False,
neq_max=10, xref=True,
node_set=None, size=size,
is_double=False,
remove_collapsed_elements=False,
avoid_collapsed_elements=False,
crash_on_collapse=False,
log=log, debug=True)
def cmd_line_bin(argv=None, quiet=False): # pragma: no cover
"""bins the model into nbins"""
if argv is None:
argv = sys.argv
from docopt import docopt
msg = (
"Usage:\n"
#" bdf bin IN_BDF_FILENAME AXIS1 AXIS2 [--cid CID] [--step SIZE]\n"
" bdf bin IN_BDF_FILENAME AXIS1 AXIS2 [--cid CID] [--nbins NBINS]\n"
' bdf bin -h | --help\n'
' bdf bin -v | --version\n'
'\n'
"Positional Arguments:\n"
" IN_BDF_FILENAME path to input BDF/DAT/NAS file\n"
" AXIS1 axis to loop over\n"
" AXIS2 axis to bin\n"
'\n'
'Options:\n'
" --cid CID the coordinate system to bin (default:0)\n"
" --step SIZE the step size for binning\n\n"
" --nbins NBINS the number of bins\n\n"
'Info:\n'
' -h, --help show this help message and exit\n'
" -v, --version show program's version number and exit\n\n"
'Plot z (2) as a function of y (1) in y-stepsizes of 0.1:\n'
' bdf bin fem.bdf 1 2 --cid 0 --step 0.1\n\n'
'Plot z (2) as a function of y (1) with 50 bins:\n'
' bdf bin fem.bdf 1 2 --cid 0 --nbins 50'
)
if len(argv) == 1:
sys.exit(msg)
ver = str(pyNastran.__version__)
#type_defaults = {
# '--nerrors' : [int, 100],
#}
data = docopt(msg, version=ver, argv=argv[1:])
bdf_filename = data['IN_BDF_FILENAME']
axis1 = int(data['AXIS1'])
axis2 = int(data['AXIS2'])
cid = 0
if data['--cid']:
cid = int(data['--cid'])
#stepsize = 0.1
#if data['--step']:
#stepsize = float(data['--step'])
nbins = 10
if data['--nbins']:
nbins = int(data['--nbins'])
assert nbins >= 2, nbins
if not quiet: # pragma: no cover
print(data)
import numpy as np
import matplotlib.pyplot as plt
from pyNastran.bdf.bdf import read_bdf
level = 'debug' if not quiet else 'warning'
log = SimpleLogger(level=level, encoding='utf-8', log_func=None)
model = read_bdf(bdf_filename, log=log)
xyz_cid = model.get_xyz_in_coord(cid=cid, fdtype='float64')
y = xyz_cid[:, axis1]
z = xyz_cid[:, axis2]
plt.figure(1)
#n, bins, patches = plt.hist( [x0,x1,x2], 10, weights=[w0, w1, w2], histtype='bar')
ys = []
#zs = []
zs_min = []
zs_max = []
y0 = y.min()
y1 = y.max()
dy = (y1 - y0) / nbins
y0i = y0
y1i = y0 + dy
for unused_i in range(nbins):
j = np.where((y0i <= y) & (y <= y1i))[0]
if not len(j):
continue
ys.append(y[j].mean())
zs_min.append(z[j].min())
zs_max.append(z[j].max())
y0i += dy
y1i += dy
zs_max = np.array(zs_max)
zs_min = np.array(zs_min)
if not quiet: # pragma: no cover
print('ys = %s' % ys)
print('zs_max = %s' % zs_max)
print('zs_min = %s' % zs_min)
plt.plot(ys, zs_max, 'r-o', label='max')
plt.plot(ys, zs_min, 'b-o', label='min')
plt.plot(ys, zs_max - zs_min, 'g-o', label='delta')
#plt.xlim([y0, y1])
plt.xlabel('Axis %s' % axis1)
plt.ylabel('Axis %s' % axis2)
plt.grid(True)
plt.legend()
plt.show()
def cmd_line_renumber(argv=None, quiet=False):
"""command line interface to bdf_renumber"""
if argv is None:
argv = sys.argv
from docopt import docopt
msg = (
"Usage:\n"
' bdf renumber IN_BDF_FILENAME OUT_BDF_FILENAME [--superelement] [--size SIZE]\n'
' bdf renumber IN_BDF_FILENAME [--superelement] [--size SIZE]\n'
' bdf renumber -h | --help\n'
' bdf renumber -v | --version\n'
'\n'
'Positional Arguments:\n'
' IN_BDF_FILENAME path to input BDF/DAT/NAS file\n'
' OUT_BDF_FILENAME path to output BDF/DAT/NAS file\n'
'\n'
'Options:\n'
'--superelement calls superelement_renumber\n'
'--size SIZE set the field size (default=16)\n\n'
'Info:\n'
' -h, --help show this help message and exit\n'
" -v, --version show program's version number and exit\n"
)
if len(argv) == 1:
sys.exit(msg)
ver = str(pyNastran.__version__)
#type_defaults = {
# '--nerrors' : [int, 100],
#}
data = docopt(msg, version=ver, argv=argv[1:])
if not quiet: # pragma: no cover
print(data)
bdf_filename = data['IN_BDF_FILENAME']
bdf_filename_out = data['OUT_BDF_FILENAME']
if bdf_filename_out is None:
bdf_filename_out = 'renumber.bdf'
size = 16
if data['--size']:
size = int(data['SIZE'])
assert size in [8, 16], f'size={size} args={argv}'
#cards_to_skip = [
#'AEFACT', 'CAERO1', 'CAERO2', 'SPLINE1', 'SPLINE2',
#'AERO', 'AEROS', 'PAERO1', 'PAERO2', 'MKAERO1']
cards_to_skip = []
level = 'debug' if not quiet else 'warning'
log = SimpleLogger(level=level, encoding='utf-8', log_func=None)
if data['--superelement']:
superelement_renumber(bdf_filename, bdf_filename_out, size=size, is_double=False,
starting_id_dict=None, #round_ids=False,
cards_to_skip=cards_to_skip, log=log)
else:
bdf_renumber(bdf_filename, bdf_filename_out, size=size, is_double=False,
starting_id_dict=None, round_ids=False,
cards_to_skip=cards_to_skip, log=log)
def cmd_line_mirror(argv=None, quiet=False):
"""command line interface to write_bdf_symmetric"""
if argv is None:
argv = sys.argv
from docopt import docopt
import pyNastran
msg = (
"Usage:\n"
" bdf mirror IN_BDF_FILENAME [-o OUT_BDF_FILENAME] [--punch] [--plane PLANE] [--tol TOL]\n"
" bdf mirror IN_BDF_FILENAME [-o OUT_BDF_FILENAME] [--punch] [--plane PLANE] [--noeq]\n"
' bdf mirror -h | --help\n'
' bdf mirror -v | --version\n'
'\n'
"Positional Arguments:\n"
" IN_BDF_FILENAME path to input BDF/DAT/NAS file\n"
#" OUT_BDF_FILENAME path to output BDF/DAT/NAS file\n"
'\n'
'Options:\n'
" -o OUT, --output OUT_BDF_FILENAME path to output BDF/DAT/NAS file\n"
' --punch flag to identify a *.pch/*.inc file\n'
" --plane PLANE the symmetry plane (xz, yz, xy); default=xz\n"
' --tol TOL the spherical equivalence tolerance; default=1e-6\n'
' --noeq disable equivalencing\n'
"\n" # (default=0.000001)
'Info:\n'
' -h, --help show this help message and exit\n'
" -v, --version show program's version number and exit\n"
)
if len(argv) == 1:
sys.exit(msg)
ver = str(pyNastran.__version__)
#type_defaults = {
# '--nerrors' : [int, 100],
#}
data = docopt(msg, version=ver, argv=argv[1:])
if data['--tol'] is False:
data['TOL'] = 0.000001
if isinstance(data['TOL'], str):
data['TOL'] = float(data['TOL'])
tol = data['TOL']
assert data['--noeq'] in [True, False]
if data['--noeq']:
tol = -1.
plane = 'xz'
if data['--plane'] is not None: # None or str
plane = data['--plane']
if not quiet: # pragma: no cover
print(data)
size = 16
punch = data['--punch']
bdf_filename = data['IN_BDF_FILENAME']
bdf_filename_out = data['--output']
if bdf_filename_out is None:
bdf_filename_out = 'mirrored.bdf'
#from io import StringIO
from pyNastran.bdf.bdf import read_bdf, BDF
from pyNastran.bdf.mesh_utils.bdf_equivalence import bdf_equivalence_nodes
level = 'debug' if not quiet else 'warning'
log = SimpleLogger(level=level, encoding='utf-8', log_func=None)
model = BDF(log=log)
model.set_error_storage(nparse_errors=100, stop_on_parsing_error=True,
nxref_errors=100, stop_on_xref_error=False)
model = read_bdf(bdf_filename, punch=punch, log=log)
#model.read_bdf(bdf_filename, validate=True, xref=False, punch=punch, read_includes=True, save_file_structure=False, encoding=None)
#grids = {}
#for set_id, seti in model.sets.items():
#for i in seti.ids:
#if i not in grids:
##x = set_id + float(i)
#y = float(i)
#grids[i] = f'GRID,{i:d},0,0.,{y},1.'
#for i, grid in sorted(grids.items()):
#print(grid)
#model.cross_reference(xref=True, xref_nodes=True, xref_elements=True, xref_nodes_with_elements=False, xref_properties=True,
#xref_masses=True, xref_materials=True, xref_loads=True, xref_constraints=True, xref_aero=True, xref_sets=False, xref_optimization=True, word='')
bdf_filename_stringio = StringIO()
unused_model, unused_nid_offset, eid_offset = write_bdf_symmetric(
model, bdf_filename_stringio, encoding=None, size=size,
is_double=False,
enddata=None, close=False,
plane=plane, log=log)
bdf_filename_stringio.seek(0)
if eid_offset > 0 and tol >= 0.0:
bdf_equivalence_nodes(bdf_filename_stringio, bdf_filename_out, tol,
renumber_nodes=False,
neq_max=10, xref=True,
node_set=None, size=size,
is_double=False,
remove_collapsed_elements=False,
avoid_collapsed_elements=False,
crash_on_collapse=False,
debug=True, log=log)
else:
if eid_offset == 0:
model.log.info('writing mirrored model %s without equivalencing because there are no elements' % bdf_filename_out)
else:
model.log.info('writing mirrored model %s without equivalencing' % bdf_filename_out)
with open(bdf_filename_out, 'w') as bdf_file:
bdf_file.write(bdf_filename_stringio.getvalue())
def cmd_line_merge(argv=None, quiet=False):
"""command line interface to bdf_merge"""
if argv is None:
argv = sys.argv
from docopt import docopt
import pyNastran
msg = (
"Usage:\n"
' bdf merge (IN_BDF_FILENAMES)... [-o OUT_BDF_FILENAME]\n'
' bdf merge -h | --help\n'
' bdf merge -v | --version\n'
'\n'
'Positional Arguments:\n'
' IN_BDF_FILENAMES path to input BDF/DAT/NAS files\n'
'\n'
'Options:\n'
' -o OUT, --output OUT_BDF_FILENAME path to output BDF/DAT/NAS file\n\n'
'Info:\n'
' -h, --help show this help message and exit\n'
" -v, --version show program's version number and exit\n"
)
if len(argv) == 1:
sys.exit(msg)
ver = str(pyNastran.__version__)
#type_defaults = {
# '--nerrors' : [int, 100],
#}
data = docopt(msg, version=ver, argv=argv[1:])
if not quiet: # pragma: no cover
print(data)
size = 16
bdf_filenames = data['IN_BDF_FILENAMES']
bdf_filename_out = data['--output']
if bdf_filename_out is None:
bdf_filename_out = 'merged.bdf'
#cards_to_skip = [
#'AEFACT', 'CAERO1', 'CAERO2', 'SPLINE1', 'SPLINE2',
#'AERO', 'AEROS', 'PAERO1', 'PAERO2', 'MKAERO1']
cards_to_skip = []
bdf_merge(bdf_filenames, bdf_filename_out, renumber=True,
encoding=None, size=size, is_double=False, cards_to_skip=cards_to_skip)
def cmd_line_convert(argv=None, quiet=False):
"""command line interface to bdf_merge"""
if argv is None:
argv = sys.argv
from docopt import docopt
msg = (
"Usage:\n"
' bdf convert IN_BDF_FILENAME [-o OUT_BDF_FILENAME] [--in_units IN_UNITS] [--out_units OUT_UNITS]\n'
' bdf convert -h | --help\n'
' bdf convert -v | --version\n'
'\n'
'Options:\n'
' -o OUT, --output OUT_BDF_FILENAME path to output BDF/DAT/NAS file\n\n'
' --in_units IN_UNITS length,mass\n\n'
' --out_units OUT_UNITS length,mass\n\n'
'Info:\n'
' -h, --help show this help message and exit\n'
" -v, --version show program's version number and exit\n"
)
if len(argv) == 1:
sys.exit(msg)
ver = str(pyNastran.__version__)
#type_defaults = {
# '--nerrors' : [int, 100],
#}
data = docopt(msg, version=ver, argv=argv[1:])
if not quiet: # pragma: no cover
print(data)
#size = 16
bdf_filename = data['IN_BDF_FILENAME']
bdf_filename_out = data['--output']
if bdf_filename_out is None:
#bdf_filename_out = 'merged.bdf'
bdf_filename_out = bdf_filename + '.convert.bdf'
in_units = data['IN_UNITS']
if in_units is None:
in_units = 'm,kg'
out_units = data['OUT_UNITS']
if out_units is None:
out_units = 'm,kg'
length_in, mass_in = in_units.split(',')
length_out, mass_out = out_units.split(',')
units_to = [length_out, mass_out, 's']
units = [length_in, mass_in, 's']
#cards_to_skip = [
#'AEFACT', 'CAERO1', 'CAERO2', 'SPLINE1', 'SPLINE2',
#'AERO', 'AEROS', 'PAERO1', 'PAERO2', 'MKAERO1']
from pyNastran.bdf.bdf import read_bdf
from pyNastran.bdf.mesh_utils.convert import convert
level = 'debug' if not quiet else 'warning'
log = SimpleLogger(level=level, encoding='utf-8', log_func=None)
model = read_bdf(bdf_filename, validate=True, xref=True,
punch=False, save_file_structure=False,
skip_cards=None, read_cards=None,
encoding=None, log=log, debug=True, mode='msc')
convert(model, units_to, units=units)
for prop in model.properties.values():
prop.comment = ''
model.write_bdf(bdf_filename_out)
def cmd_line_scale(argv=None, quiet=False):
if argv is None:
argv = sys.argv
import argparse
#import textwrap
parent_parser = argparse.ArgumentParser(
#prog = 'pyNastranGUI',
#usage = usage,
#description='A foo that bars',
#epilog="And that's how you'd foo a bar",
#formatter_class=argparse.RawDescriptionHelpFormatter,
#description=textwrap.dedent(text),
#version=pyNastran.__version__,
#add_help=False,
)
# positional arguments
parent_parser.add_argument('scale', type=str)
parent_parser.add_argument('INPUT', help='path to output BDF/DAT/NAS file', type=str)
parent_parser.add_argument('OUTPUT', nargs='?', help='path to output file', type=str)
#' --l LENGTH_SF length scale factor\n'
#' --m MASS_SF mass scale factor\n'
#' --f FORCE_SF force scale factor\n'
#' --p PRESSURE_SF pressure scale factor\n'
#' --t TIME_SF time scale factor\n'
#' --v VEL_SF velocity scale factor\n'
parent_parser.add_argument('-l', '--length', help='length scale factor')
parent_parser.add_argument('-m', '--mass', help='mass scale factor')
parent_parser.add_argument('-f', '--force', help='force scale factor')
parent_parser.add_argument('-p', '--pressure', help='pressure scale factor')
parent_parser.add_argument('-t', '--time', help='time scale factor')
parent_parser.add_argument('-V', '--velocity', help='velocity scale factor')
#parent_parser.add_argument('--user_geom', type=str, help='log msg')
#parent_parser.add_argument('-q', '--quiet', help='prints debug messages (default=True)', action='store_true')
#parent_parser.add_argument('-h', '--help', help='show this help message and exits', action='store_true')
parent_parser.add_argument('-v', '--version', action='version',
version=pyNastran.__version__)
args = parent_parser.parse_args(args=argv[1:])
if not quiet: # pragma: no cover
print(args)
scales = []
terms = []
bdf_filename = args.INPUT
bdf_filename_out = args.OUTPUT
if bdf_filename_out is None:
bdf_filename_base, ext = os.path.splitext(bdf_filename)
bdf_filename_out = '%s.scaled%s' % (bdf_filename_base, ext)
#assert bdf_filename_out is not None
if args.mass:
scale = float(args.mass)
scales.append(scale)
terms.append('M')
if args.length:
scale = float(args.length)
scales.append(scale)
terms.append('L')
if args.time:
scale = float(args.time)
scales.append(scale)
terms.append('T')
if args.force:
scale = float(args.force)
scales.append(scale)
terms.append('F')
if args.pressure:
scale = float(args.pressure)
scales.append(scale)
terms.append('P')
if args.velocity:
scale = float(args.velocity)
scales.append(scale)
terms.append('V')
from pyNastran.bdf.mesh_utils.convert import scale_by_terms
level = 'debug' if not quiet else 'warning'
log = SimpleLogger(level=level, encoding='utf-8', log_func=None)
scale_by_terms(bdf_filename, terms, scales, bdf_filename_out=bdf_filename_out, log=log)
def cmd_line_export_mcids(argv=None, quiet=False):
"""command line interface to export_mcids"""
if argv is None:
argv = sys.argv
from docopt import docopt
msg = (
'Usage:\n'
' bdf export_mcids IN_BDF_FILENAME [-o OUT_CSV_FILENAME] [--iplies PLIES] [--no_x | --no_y]\n'
' bdf export_mcids -h | --help\n'
' bdf export_mcids -v | --version\n'
'\n'
'Positional Arguments:\n'
' IN_BDF_FILENAME path to input BDF/DAT/NAS file\n'
'\n'
'Options:\n'
' -o OUT, --output OUT_CSV_FILENAME path to output CSV file\n'
' --iplies PLIES the plies indices to export; comma separated (default=0)\n'
'\n'
'Data Suppression:\n'
" --no_x, don't write the x axis\n"
" --no_y, don't write the y axis\n"
'\n'
'Info:\n'
' -h, --help show this help message and exit\n'
" -v, --version show program's version number and exit\n"
)
_filter_no_args(msg, argv, quiet=quiet)
ver = str(pyNastran.__version__)
#type_defaults = {
# '--nerrors' : [int, 100],
#}
data = docopt(msg, version=ver, argv=argv[1:])
if not quiet: # pragma: no cover
print(data)
#size = 16
bdf_filename = data['IN_BDF_FILENAME']
csv_filename_in = data['--output']
if csv_filename_in is None:
csv_filename_in = 'mcids.csv'
export_xaxis = True
export_yaxis = True
if data['--no_x']:
export_xaxis = False
if data['--no_y']:
export_yaxis = False
csv_filename_base = os.path.splitext(csv_filename_in)[0]
iplies = [0]
if data['--iplies']:
iplies = data['--iplies'].split(',')
iplies = [int(iply) for iply in iplies]
if not quiet: # pragma: no cover
print('iplies = %s' % iplies)
from pyNastran.bdf.bdf import read_bdf
level = 'debug' if not quiet else 'warning'
log = SimpleLogger(level=level, encoding='utf-8', log_func=None)
model = read_bdf(bdf_filename, log=log, xref=False)
model.safe_cross_reference()
for iply in iplies:
csv_filename = csv_filename_base + '_ply=%i.csv' % iply
export_mcids(model, csv_filename,
export_xaxis=export_xaxis, export_yaxis=export_yaxis, iply=iply)
model.log.info('wrote %s' % csv_filename)
def _filter_no_args(msg: str, argv: List[str], quiet: bool=False):
if len(argv) == 1:
if quiet:
sys.exit()
sys.exit(msg)
def cmd_line_free_faces(argv=None, quiet=False):
"""command line interface to bdf free_faces"""
if argv is None:
argv = sys.argv
encoding = sys.getdefaultencoding()
usage = (
'Usage:\n'
' bdf free_faces BDF_FILENAME SKIN_FILENAME [-d] [-l] [-f] [--encoding ENCODE]\n'
' bdf free_faces -h | --help\n'
' bdf free_faces -v | --version\n'
'\n'
)
arg_msg = (
"Positional Arguments:\n"
" BDF_FILENAME path to input BDF/DAT/NAS file\n"
" SKIN_FILENAME path to output BDF/DAT/NAS file\n"
'\n'
'Options:\n'
' -l, --large writes the BDF in large field, single precision format (default=False)\n'
' -d, --double writes the BDF in large field, double precision format (default=False)\n'
f' --encoding ENCODE the encoding method (default=None -> {encoding!r})\n'
'\n'
'Developer:\n'
' -f, --profile Profiles the code (default=False)\n'
'\n'
"Info:\n"
' -h, --help show this help message and exit\n'
" -v, --version show program's version number and exit\n"
)
_filter_no_args(arg_msg, argv, quiet=quiet)
arg_msg += '\n'
examples = (
'Examples\n'
'--------\n'
' bdf free_faces solid.bdf skin.bdf\n'
' bdf free_faces solid.bdf skin.bdf --large\n'
)
import argparse
parent_parser = argparse.ArgumentParser()
# positional arguments
parent_parser.add_argument('BDF_FILENAME', help='path to input BDF/DAT/NAS file', type=str)
parent_parser.add_argument('SKIN_FILENAME', help='path to output BDF/DAT/NAS file', type=str)
size_group = parent_parser.add_mutually_exclusive_group()
size_group.add_argument('-d', '--double', help='writes the BDF in large field, single precision format', action='store_true')
size_group.add_argument('-l', '--large', help='writes the BDF in large field, double precision format', action='store_true')
size_group.add_argument('--encoding', help='the encoding method (default=None -> {repr(encoding)})', type=str)
parent_parser.add_argument('--profile', help='Profiles the code', action='store_true')
parent_parser.add_argument('-v', '--version', action='version', version=pyNastran.__version__)
from pyNastran.utils.arg_handling import argparse_to_dict, update_message
update_message(parent_parser, usage, arg_msg, examples)
if not quiet:
print(argv)
args = parent_parser.parse_args(args=argv[2:])
data = argparse_to_dict(args)
if not quiet: # pragma: no cover
for key, value in sorted(data.items()):
print("%-12s = %r" % (key.strip('--'), value))
import time
time0 = time.time()
is_double = False
if data['double']:
size = 16
is_double = True
elif data['large']:
size = 16
else:
size = 8
bdf_filename = data['BDF_FILENAME']
skin_filename = data['SKIN_FILENAME']
from pyNastran.bdf.mesh_utils.bdf_equivalence import bdf_equivalence_nodes
tol = 1e-005
bdf_filename_merged = 'merged.bdf'
level = 'debug' if not quiet else 'warning'
log = SimpleLogger(level=level, encoding='utf-8', log_func=None)
bdf_equivalence_nodes(bdf_filename, bdf_filename_merged, tol,
renumber_nodes=False, neq_max=10, xref=True,
node_set=None,
size=8, is_double=is_double,
remove_collapsed_elements=False,
avoid_collapsed_elements=False,
crash_on_collapse=False, log=log, debug=True)
if not quiet: # pragma: no cover
print('done with equivalencing')
write_skin_solid_faces(
bdf_filename_merged, skin_filename,
write_solids=False, write_shells=True,
size=size, is_double=is_double, encoding=None, log=log,
)
if not quiet: # pragma: no cover
print('total time: %.2f sec' % (time.time() - time0))
def cmd_line_split_cbars_by_pin_flag(argv=None, quiet=False):
"""command line interface to split_cbars_by_pin_flag"""
if argv is None:
argv = sys.argv
from docopt import docopt
msg = (
'Usage:\n'
' bdf split_cbars_by_pin_flags IN_BDF_FILENAME [-o OUT_BDF_FILENAME] [-p PIN_FLAGS_CSV_FILENAME]\n'
' bdf split_cbars_by_pin_flags -h | --help\n'
' bdf split_cbars_by_pin_flags -v | --version\n'
'\n'
"Positional Arguments:\n"
" IN_BDF_FILENAME path to input BDF/DAT/NAS file\n"
'\n'
'Options:\n'
" -o OUT, --output OUT_BDF_FILENAME path to output BDF file\n"
" -p PIN, --pin PIN_FLAGS_CSV_FILENAME path to pin_flags_csv file\n\n"
'Info:\n'
' -h, --help show this help message and exit\n'
" -v, --version show program's version number and exit\n"
)
_filter_no_args(msg, argv, quiet=quiet)
ver = str(pyNastran.__version__)
#type_defaults = {
# '--nerrors' : [int, 100],
#}
data = docopt(msg, version=ver, argv=argv[1:])
if not quiet: # pragma: no cover
print(data)
#size = 16
bdf_filename_in = data['IN_BDF_FILENAME']
bdf_filename_out = data['--output']
if bdf_filename_out is None:
bdf_filename_out = 'model_new.bdf'
pin_flags_filename = data['--pin']
if pin_flags_filename is None:
pin_flags_filename = 'pin_flags.csv'
split_cbars_by_pin_flag(bdf_filename_in, pin_flags_filename=pin_flags_filename,
bdf_filename_out=bdf_filename_out)
def cmd_line_transform(argv=None, quiet=False):
"""command line interface to export_caero_mesh"""
if argv is None:
argv = sys.argv
from docopt import docopt
msg = (
'Usage:\n'
' bdf transform IN_BDF_FILENAME [-o OUT_BDF_FILENAME] [--shift XYZ]\n'
' bdf transform -h | --help\n'
' bdf transform -v | --version\n'
'\n'
'Positional Arguments:\n'
' IN_BDF_FILENAME path to input BDF/DAT/NAS file\n'
'\n'
'Options:\n'
' -o OUT, --output OUT_BDF_FILENAME path to output BDF file\n'
'\n'
'Info:\n'
' -h, --help show this help message and exit\n'
" -v, --version show program's version number and exit\n"
)
_filter_no_args(msg, argv, quiet=quiet)
ver = str(pyNastran.__version__)
#type_defaults = {
# '--nerrors' : [int, 100],
#}
data = docopt(msg, version=ver, argv=argv[1:])
if not quiet: # pragma: no cover
print(data)
#size = 16
bdf_filename = data['IN_BDF_FILENAME']
bdf_filename_out = data['--output']
if bdf_filename_out is None:
bdf_filename_out = 'transform.bdf'
dxyz = None
import numpy as np
if data['--shift']:
dxyz = np.array(data['XYZ'].split(','), dtype='float64')
assert len(dxyz) == 3, dxyz
from pyNastran.bdf.bdf import read_bdf
level = 'debug' if not quiet else 'warning'
log = SimpleLogger(level=level, encoding='utf-8', log_func=None)
model = read_bdf(bdf_filename, log=log)
nid_cp_cd, xyz_cid0, unused_xyz_cp, unused_icd_transform, unused_icp_transform = model.get_xyz_in_coord_array(
cid=0, fdtype='float64', idtype='int32')
update_nodes_flag = False
# we pretend to change the SPOINT location
if dxyz is not None:
xyz_cid0 += dxyz
update_nodes_flag = True
if update_nodes_flag:
update_nodes(model, nid_cp_cd, xyz_cid0)
model.write_bdf(bdf_filename_out)
def cmd_line_filter(argv=None, quiet=False): # pragma: no cover
"""command line interface to bdf filter"""
if argv is None:
argv = sys.argv
from docopt import docopt
msg = (
'Usage:\n'
' bdf filter IN_BDF_FILENAME [-o OUT_BDF_FILENAME]\n'
' bdf filter IN_BDF_FILENAME [-o OUT_BDF_FILENAME] [--x YSIGN_X] [--y YSIGN_Y] [--z YSIGN_Z]\n'
' bdf filter -h | --help\n'
' bdf filter -v | --version\n'
'\n'
'Positional Arguments:\n'
' IN_BDF_FILENAME path to input BDF/DAT/NAS file\n'
'\n'
'Options:\n'
' -o OUT, --output OUT_BDF_FILENAME path to output BDF file (default=filter.bdf)\n'
" --x YSIGN_X a string (e.g., '< 0.')\n"
" --y YSIGN_Y a string (e.g., '< 0.')\n"
" --z YSIGN_Z a string (e.g., '< 0.')\n"
'\n'
'Info:\n'
' -h, --help show this help message and exit\n'
" -v, --version show program's version number and exit\n"
'\n'
'Examples\n'
'1. remove unused cards:\n'
' >>> bdf filter fem.bdf'
'2. remove GRID points and associated cards with y value < 0:\n'
" >>> bdf filter fem.bdf --y '< 0.'"
)
_filter_no_args(msg, argv, quiet=quiet)
ver = str(pyNastran.__version__)
#type_defaults = {
# '--nerrors' : [int, 100],
#}
data = docopt(msg, version=ver, argv=argv[1:])
if not quiet: # pragma: no cover
print(data)
#size = 16
bdf_filename = data['IN_BDF_FILENAME']
bdf_filename_out = data['--output']
if bdf_filename_out is None:
bdf_filename_out = 'filter.bdf'
import numpy as np
func_map = {
'<' : np.less,
'>' : np.greater,
'<=' : np.less_equal,
'>=' : np.greater_equal,
}
xsign = None
ysign = None
zsign = None
if data['--x']:
xsign, xval = data['--x'].split(' ')
xval = float(xval)
assert xsign in ['<', '>', '<=', '>='], xsign
if data['--y']: # --y < 0
ysign, yval = data['--y'].split(' ')
yval = float(yval)
assert ysign in ['<', '>', '<=', '>='], ysign
if data['--z']:
zsign, zval = data['--z'].split(' ')
zval = float(zval)
assert zsign in ['<', '>', '<=', '>='], zsign
from pyNastran.bdf.bdf import read_bdf
level = 'debug' if not quiet else 'warning'
log = SimpleLogger(level=level, encoding='utf-8', log_func=None)
model = read_bdf(bdf_filename, log=log)
#nid_cp_cd, xyz_cid0, xyz_cp, icd_transform, icp_transform = model.get_xyz_in_coord_array(
#cid=0, fdtype='float64', idtype='int32')
eids = []
xyz_cid0 = []
for eid, elem in sorted(model.elements.items()):
xyz = elem.Centroid()
xyz_cid0.append(xyz)
eids.append(eid)
xyz_cid0 = np.array(xyz_cid0)
eids = np.array(eids)
# we pretend to change the SPOINT location
update_nodesi = False
# we pretend to change the SPOINT location
iunion = None
if xsign:
xvals = xyz_cid0[:, 0]
xfunc = func_map[xsign]
ix = xfunc(xvals, xval)
iunion = _union(xval, ix, iunion)
update_nodesi = True
if ysign:
yvals = xyz_cid0[:, 1]
yfunc = func_map[ysign]
iy = yfunc(yvals, yval)
iunion = _union(yval, iy, iunion)
update_nodesi = True
if zsign:
zvals = xyz_cid0[:, 2]
zfunc = func_map[zsign]
iz = zfunc(zvals, zval)
iunion = _union(zval, iz, iunion)
update_nodesi = True
if update_nodesi:
eids_to_remove = eids[iunion]
for eid in eids_to_remove:
etype = model.elements[eid].type
model._type_to_id_map[etype].remove(eid)
del model.elements[eid]
#update_nodes(model, nid_cp_cd, xyz_cid0)
# unxref'd model
remove_unused(model, remove_nids=True, remove_cids=True,
remove_pids=True, remove_mids=True)
model.write_bdf(bdf_filename_out)
def _union(xval, iunion, ix):
"""helper method for ``filter``"""
import numpy as np
if xval:
if iunion:
iunion = np.union1d(iunion, ix)
else:
pass
return iunion
def cmd_line_export_caero_mesh(argv=None, quiet=False):
"""command line interface to export_caero_mesh"""
if argv is None:
argv = sys.argv
from docopt import docopt
import pyNastran
msg = (
'Usage:\n'
' bdf export_caero_mesh IN_BDF_FILENAME [-o OUT_BDF_FILENAME] [--subpanels] [--pid PID]\n'
' bdf export_caero_mesh -h | --help\n'
' bdf export_caero_mesh -v | --version\n'
'\n'
'Positional Arguments:\n'
' IN_BDF_FILENAME path to input BDF/DAT/NAS file\n'
'\n'
'Options:\n'
' -o OUT, --output OUT_CAERO_BDF_FILENAME path to output BDF file\n'
' --subpanels write the subpanels (default=False)\n'
' --pid PID sets the pid; {aesurf, caero, paero} [default: aesurf]\n'
'\n'
'Info:\n'
' -h, --help show this help message and exit\n'
" -v, --version show program's version number and exit\n"
)
_filter_no_args(msg, argv, quiet=quiet)
ver = str(pyNastran.__version__)
#type_defaults = {
# '--nerrors' : [int, 100],
#}
data = docopt(msg, version=ver, argv=argv[1:])
if not quiet: # pragma: no cover
print(data)
#size = 16
bdf_filename = data['IN_BDF_FILENAME']
caero_bdf_filename = data['--output']
if caero_bdf_filename is None:
caero_bdf_filename = 'caero.bdf'
is_subpanel_model = data['--subpanels']
pid_method = 'aesurf'
if data['--pid']:
pid_method = data['--pid']
from pyNastran.bdf.bdf import read_bdf
from pyNastran.bdf.mesh_utils.export_caero_mesh import export_caero_mesh
skip_cards = [
# elements
'CELAS1', 'CELAS2', 'CELAS3', 'CELAS4', 'CONM2',
'CROD', 'CTUBE', 'CONROD', 'CBAR', 'CBEAM',
'CQUAD4', 'CTRIA3',
'CTETRA', 'CHEXA', 'CPENTA', 'CPYRAM',
'RBE1', 'RBE2', 'RBE3', 'RBAR',
# properties
'PELAS', 'PDAMP', 'PROD', 'PTUBE',
'PBAR', 'PBARL', 'PBEAM', 'PBEAML', 'PBCOMP',
'PSHEAR', 'PSHELL', 'PCOMP', 'PCOMPG', 'PSOLID',
'MAT1', 'MAT8',
# loads
'PLOAD', 'PLOAD2', 'PLOAD4', 'FORCE', 'FORCE1', 'FORCE2', 'MOMENT', 'MOMENT1', 'MOMENT2',
'GRAV', 'ACCEL', 'ACCEL1',
# constraints
'SPC', 'SPC1', 'MPC', 'SPCADD', 'MPCADD', 'DEQATN',
# optimization
'DVPREL1', 'DVPREL2', 'DVMREL1', 'DVMREL2', 'DVCREL1', 'DVCREL2', 'DCONADD',
'DRESP1', 'DRESP2', 'DRESP3', 'DESVAR',
# aero: mabye enable later
'TRIM', 'AESTAT', 'FLUTTER', 'FLFACT',
]
level = 'debug' if not quiet else 'warning'
log = SimpleLogger(level=level, encoding='utf-8', log_func=None)
model = read_bdf(bdf_filename, log=log, skip_cards=skip_cards)
export_caero_mesh(model, caero_bdf_filename,
is_subpanel_model=is_subpanel_model, pid_method=pid_method)
def cmd_line(argv=None, quiet=False):
"""command line interface to multiple other command line scripts"""
if argv is None:
argv = sys.argv
dev = True
msg = (
'Usage:\n'
' bdf merge (IN_BDF_FILENAMES)... [-o OUT_BDF_FILENAME]\n'
' bdf equivalence IN_BDF_FILENAME EQ_TOL\n'
' bdf renumber IN_BDF_FILENAME [OUT_BDF_FILENAME] [--superelement] [--size SIZE]\n'
' bdf filter IN_BDF_FILENAME [-o OUT_BDF_FILENAME] [--x YSIGN X] [--y YSIGN Y] [--z YSIGN Z]\n'
' bdf mirror IN_BDF_FILENAME [-o OUT_BDF_FILENAME] [--plane PLANE] [--tol TOL]\n'
' bdf convert IN_BDF_FILENAME [-o OUT_BDF_FILENAME] [--in_units IN_UNITS] [--out_units OUT_UNITS]\n'
' bdf scale IN_BDF_FILENAME [-o OUT_BDF_FILENAME] [--lsf LENGTH_SF] [--msf MASS_SF] [--fsf FORCE_SF] [--psf PRESSURE_SF] [--tsf TIME_SF] [--vsf VEL_SF]\n'
' bdf export_mcids IN_BDF_FILENAME [-o OUT_CSV_FILENAME] [--no_x | --no_y]\n'
' bdf free_faces BDF_FILENAME SKIN_FILENAME [-d | -l] [-f] [--encoding ENCODE]\n'
' bdf transform IN_BDF_FILENAME [-o OUT_BDF_FILENAME] [--shift XYZ]\n'
' bdf export_caero_mesh IN_BDF_FILENAME [-o OUT_BDF_FILENAME] [--subpanels] [--pid PID]\n'
' bdf split_cbars_by_pin_flags IN_BDF_FILENAME [-o OUT_BDF_FILENAME] [-p PIN_FLAGS_CSV_FILENAME]\n'
)
if dev:
msg += ' bdf create_vectorized_numbered IN_BDF_FILENAME [OUT_BDF_FILENAME]\n'
msg += ' bdf bin IN_BDF_FILENAME AXIS1 AXIS2 [--cid CID] [--step SIZE]\n'
msg += (
#'\n'
' bdf merge -h | --help\n'
' bdf equivalence -h | --help\n'
' bdf renumber -h | --help\n'
' bdf filter -h | --help\n'
' bdf mirror -h | --help\n'
' bdf convert -h | --help\n'
' bdf scale -h | --help\n'
' bdf export_mcids -h | --help\n'
' bdf free_faces -h | --help\n'
' bdf transform -h | --help\n'
' bdf filter -h | --help\n'
' bdf export_caero_mesh -h | --help\n'
' bdf split_cbars_by_pin_flags -h | --help\n'
)
if dev:
msg += (
' bdf create_vectorized_numbered -h | --help\n'
' bdf bin -h | --help\n'
)
msg += ' bdf -v | --version\n'
msg += '\n'
_filter_no_args(msg + 'Not enough arguments.\n', argv, quiet=quiet)
#assert sys.argv[0] != 'bdf', msg
if argv[1] == 'merge':
cmd_line_merge(argv, quiet=quiet)
elif argv[1] == 'equivalence':
cmd_line_equivalence(argv, quiet=quiet)
elif argv[1] == 'renumber':
cmd_line_renumber(argv, quiet=quiet)
elif argv[1] == 'mirror':
cmd_line_mirror(argv, quiet=quiet)
elif argv[1] == 'convert':
cmd_line_convert(argv, quiet=quiet)
elif argv[1] == 'scale':
cmd_line_scale(argv, quiet=quiet)
elif argv[1] == 'export_mcids':
cmd_line_export_mcids(argv, quiet=quiet)
elif argv[1] == 'split_cbars_by_pin_flags':
cmd_line_split_cbars_by_pin_flag(argv, quiet=quiet)
elif argv[1] == 'export_caero_mesh':
cmd_line_export_caero_mesh(argv, quiet=quiet)
elif argv[1] == 'transform':
cmd_line_transform(argv, quiet=quiet)
elif argv[1] == 'filter':
cmd_line_filter(argv, quiet=quiet)
elif argv[1] == 'free_faces':
cmd_line_free_faces(argv, quiet=quiet)
elif argv[1] == 'bin' and dev:
cmd_line_bin(argv, quiet=quiet)
elif argv[1] == 'create_vectorized_numbered' and dev:
cmd_line_create_vectorized_numbered(argv, quiet=quiet)
elif argv[1] in ['-v', '--version']:
print(pyNastran.__version__)
else:
print(argv)
sys.exit(msg)
#raise NotImplementedError('arg1=%r' % sys.argv[1])
if __name__ == '__main__': # pragma: no cover
# for the exe, we pass all the args, but we hack them to have the bdf prefix
from copy import deepcopy
argv = deepcopy(sys.argv)
argv[0] = 'bdf'
cmd_line(argv=argv)
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,650
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/bdf/cards/deqatn.py
|
# coding: utf-8
"""
Defines the DEQATN class and sub-functions.
The capitalization of the sub-functions is important.
"""
from __future__ import annotations
import re
from typing import TYPE_CHECKING
import numpy as np
from numpy import (
cos, sin, tan, log, log10, mean, exp, sqrt, square, mod, abs, sum,
arcsin as asin, arccos as acos, arctan as atan, arctan2 as atan2,
arcsinh as asinh, arccosh as acosh, arctanh as atanh)
# atan2h
from numpy.linalg import norm # type: ignore
from pyNastran.bdf.cards.base_card import BaseCard
if TYPE_CHECKING: # pragma: no cover
from pyNastran.bdf.bdf import BDF
def pi(num):
"""weird way to multiply π by a number"""
return np.pi * num
def rss(*args): # good
"""2-norm; generalized magnitude of vector for N components"""
return norm(args)
def avg(*args):
"""average"""
return np.mean(args)
def ssq(*args):
"""sum of squares"""
return np.square(args).sum()
def logx(x, y):
"""log base_x(y)"""
return log(y**x) / log(x)
def dim(x, y):
"""positive difference"""
return x - min(x, y)
def db(p, pref):
"""sound pressure in decibels"""
return 20. * log(p / pref)
#def _Log(z):
def atan2h(x, y):
"""
Hyperbolic arctangent
>>> arctanh(z) = 1/2 Log((1+z)/(1-z))
real:
the following must be true: |x1| > |x2| and x2 ≠ 0.
complex:
x1 = a + bi
x2 = b + di
a = b = 0 and (sign of c) = (sign of d):
the result is 0.
a = b = 0 and (sign of c) ≠ (sign of d):
the result is π.
c = d = 0 and (sign of a) = (sign of b):
the result is π/2.
c = d = 0 and (sign of a) ≠ (sign of b):
the result is −π/2
"""
#integer_float_types = (int, np.int32, float, np.float32)
#if isinstance(x, integer_float_types):
#assert x >= 0, 'x=%s y=%s' % (x, y)
#assert y > 0, 'x=%s y=%s' % (x, y)
#return np.arctanh(x, y)
raise NotImplementedError()
def invdb(dbi: float, pref: float) -> float:
"""inverse Db"""
return 10. ** (dbi / 20. + log(pref))
def dba(p: float, pref: float, f: float) -> float:
"""
sound pressure in decibels (perceived)
Parameters
----------
p : float
structural responses or acoustic pressure
f : float
forcing frequency
pref : float
reference pressure
Returns
-------
dbi : float
acoustic pressure in Decibels
"""
ta1, ta2 = _get_ta(f)
return 20. * log(p / pref) + 10 * log(ta1) + 10. * log(ta2)
def invdba(dbai: float, pref: float, f: float) -> float:
"""
Inverse Dba
Parameters
----------
dbai : float
acoustic pressure in Decibels (perceived)
f : float
forcing frequency
pref : float
reference pressure
Returns
-------
p : float
structural responses or acoustic pressure
"""
ta1, ta2 = _get_ta(f)
#dbai = dba(p, pref, f)
return 10. ** ((dbai - 10. * log(ta1) - 10. * log(ta2))/20)
def _get_ta(f: float) -> float:
"""gets the factors for dba, invdba"""
k1 = 2.242882e16
k3 = 1.562339
p1 = 20.598997
p2 = 107.65265
p3 = 737.86223
p4 = 12194.22
ta1 = k3 * f**4 / ((f**2 + p2**2) * (f**2 + p3**2))
ta2 = k1 * f**4 / ((f**2 + p1**2)**2 * (f**2 + p4**2)**2)
return ta1, ta2
# we'll add _ to the beginning of these variables
BUILTINS = ['del', 'eval', 'yield', 'async', 'await', 'property',
'slice', 'filter', 'map']
class DEQATN(BaseCard): # needs work...
"""
Design Equation Defintion
Defines one or more equations for use in design sensitivity analysis.
+--------+------+-----+-----+-----+-----+-------+-----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
+========+======+=====+=====+=====+=====+=======+=====+
| DEQATN | EQID | EQUATION |
+--------+------+-------------------------------------+
| | EQUATION (cont.) |
+--------+--------------------------------------------+
"""
type = 'DEQATN'
_properties = ['dtable']
def __init__(self, equation_id, eqs, comment=''):
"""
Creates a DEQATN card
Parameters
----------
equation_id : int
the id of the equation
eqs : List[str]
the equations, which may overbound the field
split them by a semicolon (;)
comment : str; default=''
a comment for the card
DEQATN 41 F1(A,B,C,D,R) = A+B *C–(D**3 + 10.0) + sin(PI(1) * R)
+ A**2 / (B - C); F = A + B - F1 * D
def F1(A, B, C, D, R):
F1 = A+B *C-(D**3 + 10.0) + sin(PI(1) * R) + A**2 / (B – C)
F = A + B - F1 * D
return F
eqs = [
'F1(A,B,C,D,R) = A+B *C–(D**3 + 10.0) + sin(PI(1) * R) + A**2 / (B – C)',
'F = A + B – F1 * D',
]
>>> deqatn = DEQATN(41, eq, comment='')
"""
if comment:
self.comment = comment
self.dtable = None
self.func = None
self.equation_id = equation_id
self.eqs = eqs
self.func_str = ''
@classmethod
def _init_from_empty(cls):
equation_id = 1
eqs = []
return DEQATN(equation_id, eqs, comment='')
@classmethod
def add_card(cls, card: List[str], comment: str=''):
"""
Adds a DEQATN card from ``BDF.add_card(...)``
Parameters
----------
card : List[str]
this card is special and is not a ``BDFCard`` like other cards
comment : str; default=''
a comment for the card
"""
#print(card)
line0 = card[0]
if '\t' in line0:
line0 = line0.expandtabs()
name_eqid = line0[:16]
#print('name_eqid = %r' % name_eqid)
assert ',' not in name_eqid, name_eqid
try:
name, eq_id = name_eqid.split()
assert name.strip().upper() == 'DEQATN', card
except ValueError:
msg = 'cannot split %r\n' % name_eqid
msg += "Expected data of the form 'DEQATN 100'\n"
msg += 'card=%s' % card
raise ValueError(msg)
equation_id = int(eq_id)
# combine the equations into a single organized block
line0_eq = line0[16:]
eqs_temp = [line0_eq] + card[1:]
#eqs_temp2 = [line.replace(';;', ';') for line in eqs_temp]
#for line in eqs_temp2:
#print(line)
eqs = lines_to_eqs(eqs_temp)
return DEQATN(equation_id, eqs, comment=comment)
def _setup_equation(self) -> None:
"""
creates an executable equation object from self.eqs
x = 10.
>>> deqatn.func(x)
42.0
>>> deqatn.func_str
def stress(x):
x = float(x)
return x + 32.
"""
default_values = {}
if self.dtable is not None:
default_values = self.dtable_ref.default_values
func_name, nargs, func_str = fortran_to_python(
self.equation_id, self.eqs, default_values, str(self))
self.func_str = func_str
self.func_name = func_name
try:
exec(func_str)
except SyntaxError:
print(func_str)
raise
#print(locals().keys())
func = locals()[func_name]
setattr(self, func_name, func)
#print(func)
self.func = func
self.nargs = nargs
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
# TODO: get defaults from DTABLE
# TODO: get limits from DCONSTR
self.dtable = model.dtable
self.dtable_ref = self.dtable
self._setup_equation()
def safe_cross_reference(self, model: BDF) -> None:
self.cross_reference(model)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
del self.func
#del self.f
#del getattr(self, self.func_name)
setattr(self, self.func_name, None)
del self.func_name
del self.nargs
del self.dtable, self.dtable_ref
def _verify(self, xref: bool) -> None:
pass
def evaluate(self, *args) -> float:
"""Makes a call to self.func"""
#args2 = args[:self.nargs]
#print('args =', args2)
if len(args) > self.nargs:
msg = 'len(args) > nargs\n'
msg += 'nargs=%s len(args)=%s; func_name=%s' % (
self.nargs, len(args), self.func_name)
raise RuntimeError(msg)
return self.func(*args)
#self.func(*args)
def raw_fields(self) -> List[str]:
return [self.write_card()]
def repr_fields(self) -> List[str]:
return self.raw_fields()
def write_card(self, size: int=8, is_double: bool=False) -> str:
#self.evaluate(1, 2)
eqs = split_equations(self.eqs)
equation_line0 = eqs[0]
#assert len(equation_line0) <= 56, equation_line0
msg = 'DEQATN %-8i%-56s\n' % (self.equation_id, equation_line0)
assert len(equation_line0) <= 56, equation_line0
for eq in eqs[1:]:
msg += ' %-64s\n' % eq
assert len(eq) <= 64, eq
#print(msg)
return msg
def lines_to_eqs(eqs_in: List[str]) -> List[str]:
"""splits the equations"""
eqs_wrapped = _split_equations_by_semicolon(eqs_in)
eqs = _join_wrapped_equation_lines(eqs_in, eqs_wrapped)
assert len(eqs) > 0, eqs
return eqs
def _split_equations_by_semicolon(eqs_in: List[str]) -> List[str]:
"""helper for ``lines_to_eqs``"""
eqs_temp_out = []
nchars = 72 - 16
for iline, eq in enumerate(eqs_in):
if iline == 0:
eq2 = eq[:nchars].strip(' \t\n')
else:
eq2 = eq.expandtabs()[8:nchars].strip(' \t\n')
semicolon = ';' if eq2.rstrip().endswith(';') else ''
eq2 = eq2.rstrip(' \t;')
#nline = len(eq.rstrip('; \n')) + 16
#print('eq2=%r' % eq2)
if ';' in eq2:
eq2s = eq2.split(';')
eq_tempi = [eqi.strip() + ';' for eqi in eq2s if eqi.strip()]
#for check_line in eq2s:
#print(check_line)
#_check_for_valid_line(check_line, eq)
#print('eq_tempi = %r' % eq_tempi)
#eq_tempi[-1] += semicolon
eqs_temp_out += eq_tempi
else:
check_line = eq2 + semicolon
#print('check_line = %r' % (check_line))
#_check_for_valid_line(check_line, eq)
eqs_temp_out.append(check_line)
nchars = 72
return eqs_temp_out
#def _check_for_valid_line(check_line, full_line):
#if '=' not in check_line:
#msg = (
#'expected an equal sign (the first 8 characters are removed)\n'
#'line =%r\n'
#'full_line=%r' % (check_line, full_line))
#raise SyntaxError(msg)
def _join_wrapped_equation_lines(unused_eqs_temp_in, eqs_temp: List[str]) -> List[str]:
"""helper for ``lines_to_eqs``"""
eqs = []
neqs = len(eqs_temp)
is_join = False
eqi = ''
for i, eq in enumerate(eqs_temp):
#print(f'i={i} join={is_join} eq={eq!r}')
if is_join:
eq = eqi.rstrip() + eq.lstrip()
eqi = eq.strip().replace(' ', '')
if i == 0 and eqi == '':
#self.eqs.append(eqi)
continue
if i == 0:
# first line
if eqi.endswith(';'):
eqi = eqi[:-1]
assert not eqi.endswith(';'), eq
else:
is_join = True
assert len(eqi) <= 56, eqi
elif i != neqs-1:
# mid line
#assert len(eqi) <= 64, 'len(eqi)=%s eq=%r' % (len(eqi), eqi)
if eqi.endswith(';'):
eqi = eqi[:-1]
is_join = False
assert not eqi.endswith(';'), eq
else:
is_join = True
else:
# last line
is_join = False
if not is_join:
if '=' not in eqi:
raise SyntaxError('line=%r expected an equal sign' % eqi)
eqs.append(eqi)
#print(i, eqi)
#assert not is_join
if is_join:
eqs.append(eqi)
return eqs
def split_equations(lines: List[str]) -> List[str]:
"""takes an overbounded DEQATN card and shortens it"""
# first line must be < 56
# second line may be < 64
lines2 = []
for i, line in enumerate(lines):
#print('-------------------------')
# we'll add ; to the end of each line
if i == 0:
lines2 += _split_equation([], line.strip() + ';', 56)
else:
lines2 += _split_equation([], line.strip() + ';', 64)
# remove the trailing semicolon
lines2[-1] = lines2[-1][:-1]
return lines2
def _split_equation(lines_out: List[str], line: str, n: int,
isplit: int=0) -> List[str]:
"""
Takes an overbounded DEQATN line and shortens it using recursion
Parameters
----------
lines_out : List[str]
len(lines) = 0 : first iteration
len(lines) = 1 : second iteration
line : str
the line to split
n : int
the maximum number of characters allowed
the first line of the DEQATN has a different number of fields
allowed vs. subsequent lines
isplit : int; default=0
the number of levels deep in the recursive function we are
Returns
-------
lines_out : List[str]
the long line broken into shorter lines
"""
#print('n=%s -> line=%r len=%s' % (n, line, len(line)))
if len(line) <= n:
lines_out.append(line.strip())
return lines_out
# equation must be split
line0 = line[:n][::-1].replace('**', '^')
# fore, aft = line0.split('+-()*', 1)
#print('line0 = %r; len=%s' % (str(line0[::-1]), len(line0)))
out = {}
for operator in ('+', '*', '^', '-', ')', ',', '='):
if operator in line0:
i = line0.index(operator)
out[i] = operator
try:
imin = min(out)
except ValueError:
msg = "Couldn't find an operator ()+-/*= in %r\n" % line[n:]
msg += 'line = %r' % line
raise ValueError(msg)
operator = out[imin]
#print('operator = %r' % operator)
unused_fore, aft = line0.split(operator, 1)
i = len(aft) + 1
line_out = line[:i]
#print('appending %r; len=%s' % (line_out, len(line_out)))
#print('fore = %r' % fore[::-1])
#print('aft = %r' % aft[::-1])
lines_out.append(line_out.replace('^', '**').strip())
isplit += 1
if isplit > 360:
raise RuntimeError('Recursion issue splitting line; isplit=%i' % isplit)
lines_out = _split_equation(lines_out, line[i:], n, isplit+1)
return lines_out
def fortran_to_python_short(line: str, unused_default_values: Any) -> Any:
"""the function used by the DRESP2"""
func_str = 'def func(args):\n'
func_str += ' return %s(args)\n' % line.strip()
local_dict = {}
exec(func_str, globals(), local_dict)
return local_dict['func']
def split_to_equations(lines: List[str]) -> List[str]:
"""
Splits a line like::
b = a + z; c = 42
into::
b = a + z
c = 42
"""
equation_lines = []
for line in lines:
line = line.rstrip(' ;')
if ';' in line:
lines2 = line.split(';')
equation_lines.extend(lines2)
else:
equation_lines.append(line)
return equation_lines
def fortran_to_python(deqatn_id: int,
lines: List[str],
default_values: Dict[str, Union[float, np.ndarray]],
comment: str='') -> Tuple[str, int, str]:
"""
Creates the python function
Parameters
----------
lines : List[str]
the equations to write broken up by statement
default_values : dict[name] = value
the default values from the DTABLE card
Returns
-------
func_name : str
the name of the function
nargs : int
the number of variables to the function
func_msg : str
the python function
def f(x, y=10.):
'''
$ deqatn
DEQATN 1000 f(x,y) = x+y
'''
try:
if isinstance(x, (int, float, str)):
x = float(x)
if isinstance(y, (int, float, str)):
y = float(y)
except Exception:
print(locals())
raise
f = x + y
return f
"""
func_msg = ''
variables = []
assert len(lines) > 0, lines
equation_lines = split_to_equations(lines)
for i, line in enumerate(equation_lines):
#print('--------------------')
line = line.lower()
#func_msg += '#i=%s\n' % i
assert ';' not in line, line
try:
# f(x, y) = 10.
# f(x, y) = abs(x) + y
# f = 42.
f, eq = line.split('=')
except ValueError:
if '=' not in line:
raise SyntaxError('= not found in %r' % (line))
msg = 'only 1 = sign may be found a line\n'
msg += 'line = %r\n' % line
if len(lines) > 1:
msg += 'lines:\n%s' % '\n'.join(lines)
raise SyntaxError(msg)
f = f.strip()
eq = eq.strip().rstrip(';')
#print('f=%r eq=%r' % (f, eq))
for builtin in BUILTINS:
if builtin == f:
f = f.replace(builtin, builtin + '_')
if builtin == eq:
eq = eq.replace(builtin, builtin + '_')
if i == 0:
func_name, func_msg, variables = write_function_header(
f, eq, default_values, comment)
f = func_name # return the value...
func_msg += ' # i=0 write_function_header\n'
#print(func_msg)
else:
out = f
func_msg += ' %s = %s\n' % (out, eq)
#print('out = %r' % out)
#print('eq = %r' % eq)
func_msg += ' return %s' % f
#print(func_msg)
if func_name in variables:
raise RuntimeError(f'The function name {func_name!r} for DEQATN,{deqatn_id:d} '
f'must not also be an argument; arguments={variables}')
nargs = len(variables)
return func_name, nargs, func_msg
def write_function_header(func_header: str, eq: str,
default_values: Dict[str, float],
comment: str='') -> Tuple[str, str, List[str]]:
"""
initializes the python function
def f(x, y=10.):
'''
$ deqatn
DEQATN 1000 f(x,y) = x+y
'''
try:
if isinstance(x, (int, float, str)):
x = float(x)
if isinstance(y, (int, float, str)):
y = float(y)
except Exception:
print(locals())
raise
Parameters
----------
f : str
the function header
f(a, b, c)
eq : str
the value on the other side of the equals sign (f=eq)
1.
max(a, b, c)
default_values : dict[name] = value
the default values from the DTABLE card
Returns
-------
func_name : str
the name of the function ``f``
msg : str
see above
variables : List[str]
the variables used by the equation header
a, b, c
"""
msg = ''
try:
float(eq)
is_float = True
except ValueError:
is_float = False
func_name, arguments = func_header.strip('(,)').split('(')
func_name = func_name.strip(' ')
variables = arguments.split(',')
variables = ['_' + var if var in BUILTINS else var
for var in variables]
if func_name in BUILTINS:
func_name = '_' + func_name
if is_float:
# f(a,b,c) = 1.
#
# means
#
# def f(a,b,c):
# f = 1.
#
func_line = _write_function_line(func_name, variables, default_values)
else:
# f(a,b,c) = min(a,b,c)
#
# means
#
# def f(a,b,c):
# f = min(a,b,c)
#
func_line = _write_function_line(func_name, variables, default_values)
msg += func_line
msg += _write_comment(comment)
msg += _write_variables(variables)
for builtin in BUILTINS:
ubuiltin = '_' + builtin
if re.search(r'\b%s\b' % builtin, func_line):
#if builtin in func_line and ubuiltin not in func_line:
raise RuntimeError(f'cannot have an equation with {builtin!r}\n{func_line}')
#if re.search(r'\b%s\b' % builtin, variables):
if builtin in variables and ubuiltin not in variables:
raise RuntimeError(f'cannot have an equation with {builtin:!r}\n{variables}')
#import re
#eq = 'YIELD_A_YIELD'
#eq = '/YIELD'
#p = re.compile(r"\byield\b", flags=re.IGNORECASE)
#p2 = p.sub(eq,'_yield')
#print('P2 = %r' % p2)
#y = re.search(r"\byield\b", eq, flags=re.IGNORECASE)
#if y is not None:
#print('groups= ', y.groups())
#for group in y.groups():
#print('group = %r' % group)
#print(y.group(0))
#print('***eq = %r' % eq)
for builtin in BUILTINS:
if builtin in eq and '_' + builtin not in eq:
eq = eq.replace(builtin, '_'+builtin)
msg += ' %s = %s\n' % (func_name, eq)
return func_name, msg, variables
def _write_function_line(func_name: str, variables: List[str],
default_values: Dict[str, float]) -> str:
"""writes the ``def f(x, y, z=1.):`` part of the function"""
vals = []
is_default = False
for var in variables:
if var in BUILTINS:
var += '_'
if var in default_values:
vals.append('%s=%s' % (var, default_values[var]))
is_default = True
else:
vals.append('%s' % (var))
if is_default:
msg = 'default variables must be set at the end of the function\n'
msg += 'variables = %s\n' % variables
msg += 'default_values = %s' % default_values
raise RuntimeError(msg)
vals2 = ', '.join(vals)
msg = 'def %s(%s):\n' % (func_name, vals2)
return msg
def _write_comment(comment: str) -> str:
"""writes the deqatn to the comment block"""
lines = comment.split('\n')
msgi = '\n '.join(lines)
msg = ' """\n %s"""\n' % msgi
return msg
def _write_variables(variables: List[str]) -> str:
"""type checks the inputs"""
msg = ' try:\n'
for var in variables:
if var in BUILTINS:
var += '_'
#msg += " assert isinstance(%s, float), '%s is not a float; type(%s)=%s' % (%s)")
#msg += ' %s = float(%s)\n' % (var, var)
msg += ' if isinstance(%s, (int, str)):\n' % var
msg += ' %s = float(%s)\n' % (var, var)
msg += ' except Exception:\n'
msg += ' print(locals())\n'
msg += ' raise\n'
return msg
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,651
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/gui/dev/gui2/gui2.py
|
import os
import sys
from typing import List, Dict, Optional, Any
#import ctypes
# kills the program when you hit Cntl+C from the command line
# doesn't save the current state as presumably there's been an error
import signal
signal.signal(signal.SIGINT, signal.SIG_DFL)
from cpylog import SimpleLogger
from cpylog.html_utils import str_to_html
import numpy as np
import vtk
import pyNastran
from qtpy import QtCore, QtGui #, API
from qtpy.QtWidgets import (
QMainWindow, QFrame, QHBoxLayout, QAction, QMenu, QToolButton)
from qtpy.QtWidgets import QApplication
from pyNastran.gui.menus.application_log import ApplicationLogWidget
from pyNastran.gui.menus.python_console import PythonConsoleWidget
from pyNastran.gui.gui_objects.settings import Settings
from pyNastran.gui.qt_files.view_actions import ViewActions
from pyNastran.gui.qt_files.tool_actions import ToolActions
from pyNastran.gui.qt_files.QVTKRenderWindowInteractor import QVTKRenderWindowInteractor
from pyNastran.gui.dev.gui2.utils import build_actions, fill_menus
from pyNastran.gui.dev.gui2.help_actions import HelpActions
from pyNastran.gui.dev.gui2.load_actions import LoadActions
#from pyNastran.gui.formats import CLASS_MAP
from pyNastran.gui.dev.gui2.vtk_interface import VtkInterface, ScalarBar, fill_render_window
from pyNastran.gui.dev.gui2.format_setup import build_fmts, CLASS_MAP
from pyNastran.gui.menus.legend.legend_object import LegendObject
from pyNastran.gui.menus.highlight.highlight_object import HighlightObject, MarkObject
from pyNastran.gui.menus.preferences.preferences_object import PreferencesObject
IS_CUTTING_PLANE = False
IS_MATPLOTLIB = False
if IS_MATPLOTLIB:
from pyNastran.gui.menus.cutting_plane.cutting_plane_object import CuttingPlaneObject
from pyNastran.gui.menus.cutting_plane.shear_moment_torque_object import ShearMomentTorqueObject
IS_CUTTING_PLANE = True
from pyNastran.gui.menus.clipping.clipping_object import ClippingObject
from pyNastran.gui.menus.camera.camera_object import CameraObject
from pyNastran.gui.menus.edit_geometry_properties.edit_geometry_properties_object import (
EditGeometryPropertiesObject)
PKG_PATH = pyNastran.__path__[0]
class MainWindow2(QMainWindow):
"""
+-----------------------------------------+
| menubar: File Edit View Help |
+-----------------------------------------+
| Toolbar |
+-------------------------------+---------+
| VTK Window | Sidebar |
+-------------------------------+---------+
| Console / Logger |
+-----------------------------------------+
"""
def __init__(self):
super().__init__()
#self.setSize(500, 500)
self.last_dir = ''
self.is_gui = True
self.dev = False
self.debug = True
# should vtk be enabled
# True: typical
# False: useful for testing a new version of qt
self.run_vtk = True
# should the python console be enabled
self.execute_python = False
# True: add Application Log
# False: print to console (useful when there's a crash and you run from command line)
self.html_logging = True
# performance mode limits log messages to the application log as HTML is faster
# to render in one go
self._performance_mode = False
self._log_messages = []
# TODO: what is this for?
self.title = ''
self.cases = {} # type: Dict[int, Any]
self.form = [] # type: List[Any]
# -----------------------------------------
self.name = 'main'
self.model_type = None
self.nid_maps = {}
self.eid_maps = {}
# the info in the lower left part of the screen
self.text_actors = {} # type: Dict[int, vtk.vtkTextActor]
# the various coordinate systems (e.g., cid=0, 1)
self.axes = {} # type: Dict[int, vtk.vtkAxesActor]
self.models = {} # type: Dict[str, Any]
self.grid_mappers = {} # type: Dict[str, Any]
self.main_grids = {} # type: Dict[str, vtk.vtkUnstructuredGrid]
self.alt_grids = {} # type: Dict[str, vtk.vtkUnstructuredGrid]
self.geometry_actors = {} # type: Dict[str, vtkLODActor]
self.actions = {} # type: Dict[str, QAction]
#geometry_actors
# -----------------------------------------
self.settings = Settings(self)
settings = QtCore.QSettings()
self.settings.load(settings)
self.actions = {} # type: Dict[str, QAction]
self.load_actions = LoadActions(self)
self.view_actions = ViewActions(self)
self.tool_actions = ToolActions(self)
#-----------------------------------------------
# menus
self.preferences_obj = PreferencesObject(self)
self.edit_geometry_properties_obj = EditGeometryPropertiesObject(self)
#-----------------------------------------------
self.log = None
self._start_logging()
if self.html_logging is True:
self.log_dock_widget = ApplicationLogWidget(self)
self.log_widget = self.log_dock_widget.log_widget
self.addDockWidget(QtCore.Qt.BottomDockWidgetArea, self.log_dock_widget)
else:
self.log_widget = self.log
#self.addToolBar
self.toolbar = self.addToolBar('Show toolbar')
self.toolbar.setObjectName('main_toolbar')
self.toolbar.show()
self.menubar = self.menuBar()
self._fill_menubar()
self.format_class_map = CLASS_MAP
fmt_order = ['cart3d', 'stl']
self.fmts, self.supported_formats = build_fmts(
self, self.format_class_map, fmt_order,
self.log, stop_on_failure=False)
#self.create_vtk_actors(create_rend=True)
self.vtk_frame = QFrame()
self.vtk_interface = VtkInterface(self, self.vtk_frame)
# put the vtk_interactor inside the vtk_frame
self.set_vtk_frame_style()
# put the corner axis into the renderer
self.tool_actions.create_corner_axis()
if self.execute_python:
self.python_dock_widget = PythonConsoleWidget(self)
self.python_dock_widget.setObjectName('python_console')
self.addDockWidget(QtCore.Qt.BottomDockWidgetArea, self.python_dock_widget)
self._load_models()
self.statusBar().showMessage('Ready')
self.show()
def _load_models(self) -> None:
cart3d_filename = r'C:\NASA\m4\formats\git\pyNastran\pyNastran\converters\cart3d\models\threePlugs.a.tri'
#self.on_load_geometry()
self.load_actions.on_load_geometry(
infile_name=cart3d_filename, geometry_format='cart3d',
name='cart3d', plot=True, raise_error=False)
stl_filename = r'C:\NASA\m4\formats\git\pyNastran\pyNastran\converters\stl\sphere.stl'
self.load_actions.on_load_geometry(
infile_name=stl_filename, geometry_format='stl',
name='stl', plot=True, raise_error=False)
# Render again to set the correct view
self.render()
def _start_logging(self) -> None:
if self.log is not None:
return
if self.html_logging is True:
log = SimpleLogger(
level='debug', encoding='utf-8',
log_func=lambda w, x, y, z: self._logg_msg(w, x, y, z))
# logging needs synchronizing, so the messages from different
# threads would not be interleave
self.log_mutex = QtCore.QReadWriteLock()
else:
log = SimpleLogger(
level='debug', encoding='utf-8',
#log_func=lambda x, y: print(x, y) # no colorama
)
self.log = log
def _logg_msg(self, log_type: str, filename: str, lineno: int, msg: str) -> None:
"""
Add message to log widget trying to choose right color for it.
Parameters
----------
log_type : str
{DEBUG, INFO, ERROR, COMMAND, WARNING} or prepend 'GUI '
filename : str
the active file
lineno : int
line number
msg : str
message to be displayed
"""
if not self.html_logging:
# standard logger
name = '%-8s' % (log_type + ':')
filename_n = '%s:%s' % (filename, lineno)
msg2 = ' %-28s %s\n' % (filename_n, msg)
print(name, msg2)
return
if 'DEBUG' in log_type and not self.settings.show_debug:
return
elif 'INFO' in log_type and not self.settings.show_info:
return
elif 'COMMAND' in log_type and not self.settings.show_command:
return
elif 'WARNING' in log_type and not self.settings.show_warning:
return
elif 'ERROR' in log_type and not self.settings.show_error:
return
if log_type in ['GUI ERROR', 'GUI COMMAND', 'GUI DEBUG', 'GUI INFO', 'GUI WARNING']:
log_type = log_type[4:] # drop the GUI
html_msg = str_to_html(log_type, filename, lineno, msg)
if self.performance_mode or self.log_widget is None:
self._log_messages.append(html_msg)
else:
self._log_msg(html_msg)
def _log_msg(self, msg: str) -> None:
"""prints an HTML log message"""
self.log_mutex.lockForWrite()
text_cursor = self.log_widget.textCursor()
end = text_cursor.End
text_cursor.movePosition(end)
text_cursor.insertHtml(msg)
self.log_widget.ensureCursorVisible() # new message will be visible
self.log_mutex.unlock()
def log_info(self, msg: str) -> None:
""" Helper funtion: log a message msg with a 'INFO:' prefix """
if msg is None:
msg = 'msg is None; must be a string'
return self.log.simple_msg(msg, 'GUI ERROR')
return self.log.simple_msg(msg, 'GUI INFO')
def log_debug(self, msg: str) -> None:
""" Helper funtion: log a message msg with a 'DEBUG:' prefix """
if msg is None:
msg = 'msg is None; must be a string'
return self.log.simple_msg(msg, 'GUI ERROR')
return self.log.simple_msg(msg, 'GUI DEBUG')
def log_command(self, msg: str) -> None:
""" Helper funtion: log a message msg with a 'COMMAND:' prefix """
if msg is None:
msg = 'msg is None; must be a string'
return self.log.simple_msg(msg, 'GUI ERROR')
return self.log.simple_msg(msg, 'GUI COMMAND')
def log_error(self, msg: str) -> None:
""" Helper funtion: log a message msg with a 'GUI ERROR:' prefix """
if msg is None:
msg = 'msg is None; must be a string'
return self.log.simple_msg(msg, 'GUI ERROR')
return self.log.simple_msg(msg, 'GUI ERROR')
def log_warning(self, msg: str) -> None:
""" Helper funtion: log a message msg with a 'WARNING:' prefix """
if msg is None:
msg = 'msg is None; must be a string'
return self.log.simple_msg(msg, 'GUI ERROR')
return self.log.simple_msg(msg, 'GUI WARNING')
#def on_escape_null(self) -> None:
#"""
#The default state for Escape key is nothing.
#"""
#pass
def _on_execute_python_button(self, clear=False):
"""executes the docked python console"""
try:
enter_data = self.python_dock_widget.enter_data
except Exception as error:
self.log_error(str(error))
self.log_error('problem getting enter_data from python console')
return
txt = str(enter_data.toPlainText()).rstrip()
is_passed = self._execute_python_code(txt)
if is_passed and clear:
enter_data.clear()
def set_vtk_frame_style(self):
"""uses the vtk objects to set up the window (frame)"""
vtk_hbox = QHBoxLayout()
vtk_hbox.setContentsMargins(2, 2, 2, 2)
vtk_hbox.addWidget(self.vtk_interactor)
self.vtk_frame.setLayout(vtk_hbox)
self.vtk_frame.setFrameStyle(QFrame.NoFrame | QFrame.Plain)
# this is our main, 'central' widget
self.setCentralWidget(self.vtk_frame)
#print('build_vtk_frame')
@property
def grid(self) -> vtk.vtkUnstructuredGrid:
return self.main_grids[self.name]
@property
def vtk_interactor(self) -> QVTKRenderWindowInteractor:
return self.vtk_interface.vtk_interactor
@property
def rend(self) -> vtk.vtkRenderer:
return self.vtk_interface.rend
@property
def iren(self) -> QVTKRenderWindowInteractor:
return self.vtk_interface.vtk_interactor
@property
def render_window(self) -> vtk.vtkRenderWindow:
return self.vtk_interactor.GetRenderWindow()
def render(self) -> None:
self.vtk_interactor.GetRenderWindow().Render()
def get_camera(self) -> vtk.vtkCamera:
return self.rend.GetActiveCamera()
def turn_text_off(self) -> None:
self.log.warning('turn_text_off')
#-----------------------------------------------------------------------
# geometry
def set_quad_grid(self, box_name: str,
nodes: np.ndarray, elements: np.ndarray,
color: Optional[List[float]]=None,
line_width: float=1, opacity: float=1.) -> None:
self.vtk_interface.set_quad_grid(box_name, nodes, elements,
color=color, line_width=line_width, opacity=opacity)
def create_global_axes(self, dim_max: float) -> None:
self.vtk_interface.create_global_axes(dim_max)
@property
def scalar_bar_actor(self) -> ScalarBar:
return self.vtk_interface.scalar_bar_actor
# geometry
#-----------------------------------------------------------------------
# results post-processing
def _finish_results_io2(self, model_name: str, form: List[Any], cases: Dict[int, Any]):
self.form = form
#self.cases = cases
self.log.warning('_finish_results_io2')
def get_new_icase(self) -> int:
return 0
def update_result_cases(self, cases: Dict[int, Any]) -> None:
for case_id, case in cases.items():
self.cases[case_id] = case
return
def get_form(self) -> List[Any]:
return self.form
#def _setup_formats(self):
#fmt_name, _major_name, geom_wildcard, geom_func, res_wildcard, _res_func = fmt
#from pyNastran.converters.cart3d.cart3d_io import Cart3dIO
#from pyNastran.converters.stl.stl_io import STL_IO
#cart3d_class = Cart3dIO(self)
#stl_class = STL_IO(self).get_stl_wildcard_geometry_results_functions()
#fmts = []
#return []
#-----------------------------------------------------------------------
# gui setup
def _fill_menubar(self) -> None:
file_actions_list = [
'load_geometry', 'load_results', '',
'load_custom_result', '',
'load_csv_user_points', 'load_csv_user_geom', 'script', '', 'exit', ]
help = HelpActions(self)
toolbar_tools = [
#'camera_reset', 'view',
'screenshot',
'exit',
#'min', 'max', 'map_element_fringe',
'', # 'exit'
]
toolbar_tools = [
'reload', 'load_geometry', 'load_results',
'front_view', 'back_view', 'top_view', 'bottom_view',
'left_view', 'right_view',
'magnify', 'shrink', # 'zoom',
'rotate_clockwise', 'rotate_cclockwise',
#'rotation_center', 'measure_distance', 'probe_result',
#'highlight_cell', 'highlight_node',
#'area_pick', 'highlight_nodes_elements', 'mark_nodes_elements',
#'wireframe', 'surface', 'edges',
]
toolbar_tools += [
'camera_reset', # 'view',
'screenshot', # 'min', 'max', 'map_element_fringe',
'', # 'exit'
]
menu_view = [
'screenshot', '', 'wireframe', 'surface', 'camera_reset', '',
'set_preferences', #'cutting_plane',
'',
'label_clear', 'label_reset', '',
'legend', 'animation', 'geo_properties',
#['Anti-Aliasing', 'anti_alias_0', 'anti_alias_1', 'anti_alias_2',
#'anti_alias_4', 'anti_alias_8',],
]
menu_window = []
if self.html_logging:
self.actions['log_dock_widget'] = self.log_dock_widget.toggleViewAction()
self.actions['log_dock_widget'].setStatusTip("Show/Hide application log")
menu_view += ['', 'show_info', 'show_debug', 'show_command', 'show_warning', 'show_error']
menu_window += ['log_dock_widget']
if self.execute_python:
self.actions['python_dock_widget'] = self.python_dock_widget.toggleViewAction()
self.actions['python_dock_widget'].setStatusTip("Show/Hide Python Console")
menu_window += ['python_dock_widget']
menus_list = [
('file', '&File', file_actions_list),
('window', '&Window', menu_window),
('help', '&Help', help.actions_list),
('toolbar', self.toolbar, toolbar_tools),
]
self.actions = self._setup_actions(
help, self.view_actions,
base_actions=self.actions) # type: Dict[str, QAction]
#self.actions['pulldown'] =
#self.combo = QtGui.QComboBox()
#toolBar.addWidget(self.combo)
#self.combo.insertItems(1,["One","Two","Three"])
self.menus = fill_menus(self, menus_list, self.actions, allow_missing_actions=False)
def _setup_actions(self,
help: HelpActions,
view_actions: ViewActions,
base_actions: Optional[Dict[str, QAction]]) -> Dict[str, QAction]:
assert isinstance(base_actions, dict), base_actions
icon_path = os.path.join(PKG_PATH, 'gui', 'icons')
file_tools = [
('exit', '&Exit', 'texit.png', 'Ctrl+Q', 'Exit application', self.closeEvent),
('reload', 'Reload Model...', 'treload.png', '', 'Remove the model and reload the same geometry file', self.on_reload),
('load_geometry', 'Load &Geometry...', 'load_geometry.png', 'Ctrl+O', 'Loads a geometry input file', self.on_load_geometry),
('load_results', 'Load &Results...', 'load_results.png', 'Ctrl+R', 'Loads a results file', self.on_load_results),
('load_csv_user_geom', 'Load CSV User Geometry...', '', None, 'Loads custom geometry file', self.on_load_user_geom),
('load_csv_user_points', 'Load CSV User Points...', 'user_points.png', None, 'Loads CSV points', self.on_load_csv_points),
('load_custom_result', 'Load Custom Results...', '', None, 'Loads a custom results file', self.on_load_custom_results),
('script', 'Run Python Script...', 'python48.png', None, 'Runs pyNastranGUI in batch mode', self.on_run_script),
]
view_tools = [
# view actions
('back_view', 'Back View', 'back.png', 'x', 'Flips to +X Axis', lambda: self.view_actions.update_camera('+x')),
('right_view', 'Right View', 'right.png', 'y', 'Flips to +Y Axis', lambda: self.view_actions.update_camera('+y')),
('top_view', 'Top View', 'top.png', 'z', 'Flips to +Z Axis', lambda: self.view_actions.update_camera('+z')),
('front_view', 'Front View', 'front.png', 'Shift+X', 'Flips to -X Axis', lambda: self.view_actions.update_camera('-x')),
('left_view', 'Left View', 'left.png', 'Shift+Y', 'Flips to -Y Axis', lambda: self.view_actions.update_camera('-y')),
('bottom_view', 'Bottom View', 'bottom.png', 'Shift+Z', 'Flips to -Z Axis', lambda: self.view_actions.update_camera('-z')),
# zoom
('magnify', 'Magnify', 'plus_zoom.png', 'm', 'Increase Magnfication', self.view_actions.on_increase_magnification),
('shrink', 'Shrink', 'minus_zoom.png', 'Shift+M', 'Decrease Magnfication', self.view_actions.on_decrease_magnification),
# rotation
('rotate_clockwise', 'Rotate Clockwise', 'tclock.png', 'o', 'Rotate Clockwise', self.view_actions.on_rotate_clockwise),
('rotate_cclockwise', 'Rotate Counter-Clockwise', 'tcclock.png', 'Shift+O', 'Rotate Counter-Clockwise', self.view_actions.on_rotate_cclockwise),
('camera_reset', 'Reset Camera View', 'trefresh.png', 'r', 'Reset the camera view to default', self.view_actions.on_reset_camera),
#('view', 'Camera View', 'view.png', None, 'Load the camera menu', self.camera_obj.set_camera_menu),
('screenshot', 'Take a Screenshot...', 'tcamera.png', 'CTRL+I', 'Take a Screenshot of current view', self.tool_actions.on_take_screenshot),
# logging
('show_info', 'Show INFO', 'show_info.png', None, 'Show "INFO" messages', self.on_show_info),
('show_debug', 'Show DEBUG', 'show_debug.png', None, 'Show "DEBUG" messages', self.on_show_debug),
('show_command', 'Show COMMAND', 'show_command.png', None, 'Show "COMMAND" messages', self.on_show_command),
('show_warning', 'Show WARNING', 'show_warning.png', None, 'Show "COMMAND" messages', self.on_show_warning),
('show_error', 'Show ERROR', 'show_error.png', None, 'Show "COMMAND" messages', self.on_show_error),
# core menus
#('legend', 'Modify Legend...', 'legend.png', 'CTRL+L', 'Set Legend', self.legend_obj.set_legend_menu),
#('animation', 'Create Animation...', 'animation.png', 'CTRL+A', 'Create Animation', self.legend_obj.set_animation_menu),
#('clipping', 'Set Clipping...', '', None, 'Set Clipping', self.clipping_obj.set_clipping_menu),
('set_preferences', 'Preferences...', 'preferences.png', 'CTRL+P', 'Set GUI Preferences', self.preferences_obj.set_preferences_menu),
('geo_properties', 'Edit Geometry Properties...', '', 'CTRL+E', 'Change Model Color/Opacity/Line Width', self.edit_geometry_properties_obj.edit_geometry_properties),
#('map_element_fringe', 'Map Element Fringe', '', 'CTRL+F', 'Map Elemental Centroidal Fringe Result to Nodes', self.map_element_centroid_to_node_fringe_result),
]
checkables_set = set([])
# setup the actions
actions_list = file_tools + view_tools + help.tools_list
actions = build_actions(self, base_actions, icon_path, actions_list, checkables_set, self.log)
assert len(actions) > 0, actions
return actions
# ------------------------------------------
# file
def on_reload(self):
pass
def on_load_geometry(self):
self.load_actions.on_load_geometry(
infile_name=None, geometry_format=None,
name='main', plot=True, raise_error=False)
#def _reset_model(self, name: str) -> None:
#self.log.info('_reset_model')
def create_vtk_actors(self, create_rend: bool=True) -> None:
"""creates the vtk actors used by the GUI"""
if create_rend:
self.rend = vtk.vtkRenderer()
@property
def grid_selected(self):
return self.main_grids[self.name]
def _remove_old_geometry(self, filename: str):
"""
>>> self.geometry_actors
{'cart3d': (vtkRenderingLODPython.vtkLODActor)000002B26C562C48,
'stl': (vtkRenderingLODPython.vtkLODActor)000002B25024C7C8
}
"""
if filename in self.grid_mappers:
#mapper = self.grid_mappers[filename]
del self.grid_mappers[filename]
grid = self.main_grids[filename]
grid.FastDelete()
del self.main_grids[filename]
actor = self.geometry_actors[filename]
self.rend.RemoveActor(actor)
del self.geometry_actors[filename]
#self.models = {} # type: Dict[str, Any]
#self.grid_mappers = {} # type: Dict[str, Any]
#self.main_grids = {} # type: Dict[str, vtk.vtkUnstructuredGrid]
#self.alt_grids = {} # type: Dict[str, vtk.vtkUnstructuredGrid]
#self.geometry_actors = {} # type: Dict[str, vtkLODActor]
def _reset_model(self, name: str) -> None:
"""resets the grids; sets up alt_grids"""
if hasattr(self, 'main_grids') and name not in self.main_grids:
grid = vtk.vtkUnstructuredGrid()
grid_mapper = vtk.vtkDataSetMapper()
grid_mapper.SetInputData(grid)
geometry_actor = vtk.vtkLODActor()
geometry_actor.DragableOff()
geometry_actor.SetMapper(grid_mapper)
self.rend.AddActor(geometry_actor)
self.name = name
self.models = {}
self.main_grids[name] = grid
self.grid_mappers[name] = grid_mapper
self.geometry_actors[name] = geometry_actor
grid.Modified()
if 0:
# link the current "main" to the scalar bar
scalar_range = self.grid_selected.GetScalarRange()
grid_mapper.ScalarVisibilityOn()
grid_mapper.SetScalarRange(scalar_range)
grid_mapper.SetLookupTable(self.color_function)
#self.edge_actor = vtk.vtkLODActor()
#self.edge_actor.DragableOff()
#self.edge_mapper = vtk.vtkPolyDataMapper()
# create the edges
#self.get_edges()
elif name in self.main_grids:
grid = self.main_grids[name]
grid.Reset()
grid.Modified()
else:
self._setup_main_grid()
# reset alt grids
alt_grids = self.alt_grids
alt_names = self.alt_grids.keys()
for alt_name in alt_names:
alt_grid = alt_grids[alt_name]
alt_grid.Reset()
alt_grid.Modified()
def on_load_results(self):
self.log.warning('on_load_results')
def on_load_user_geom(self):
self.log.warning('on_load_user_geom')
def on_load_csv_points(self):
self.log.warning('on_load_csv_points')
def on_load_custom_results(self):
self.log.warning('on_load_custom_results')
@property
def performance_mode(self):
"""get the performance mode"""
return self._performance_mode
@performance_mode.setter
def performance_mode(self, performance_mode):
"""
Set the performance mode. If performance mode flips
to False, we dump the log buffer.
"""
if not performance_mode and self._log_messages:
msg = ''.join(self._log_messages)
#setUpdatesEnabled(False)
#TxtBrows.append(SomeBigHTMLString)
self._log_msg(msg)
#setUpdatesEnabled(True)
self._log_messages = []
self._performance_mode = performance_mode
def start_stop_performance_mode(func):
"""
Supresses logging. If we started with logging suppressed,
we won't unsupress logging at the end of the function.
"""
def new_func(self, *args, **kwargs):
"""The actual function exec'd by the decorated function."""
performance_mode_initial = self.performance_mode
if not performance_mode_initial:
self.performance_mode = True
try:
n = func(self, *args, **kwargs)
except Exception:
if not performance_mode_initial:
self.performance_mode = False
raise
if not performance_mode_initial:
self.performance_mode = False
return n
return new_func
@start_stop_performance_mode
def on_run_script(self, python_file: bool=False) -> bool:
"""pulldown for running a python script"""
is_passed = False
if python_file in [None, False]:
title = 'Choose a Python Script to Run'
wildcard = "Python (*.py)"
infile_name = self._create_load_file_dialog(
wildcard, title, self._default_python_file)[1]
if not infile_name:
return is_passed # user clicked cancel
#python_file = os.path.join(script_path, infile_name)
python_file = os.path.join(infile_name)
if not os.path.exists(python_file):
msg = 'python_file = %r does not exist' % python_file
self.log_error(msg)
return is_passed
with open(python_file, 'r') as python_file_obj:
txt = python_file_obj.read()
is_passed = self._execute_python_code(txt, show_msg=False)
if not is_passed:
return is_passed
self._default_python_file = python_file
self.log_command('self.on_run_script(%r)' % python_file)
print('self.on_run_script(%r)' % python_file)
return is_passed
# file
# help
# ------------------------------------------
def _check_for_latest_version(self) -> bool:
self.log.warning('_check_for_latest_version')
return False
# help
# ------------------------------------------
# basic functions
#---------------------------------------------------------------------------
# basic interaction
def on_show_debug(self) -> None:
"""sets a flag for showing/hiding DEBUG messages"""
self.settings.show_debug = not self.settings.show_debug
def on_show_info(self) -> None:
"""sets a flag for showing/hiding INFO messages"""
self.settings.show_info = not self.settings.show_info
def on_show_command(self) -> None:
"""sets a flag for showing/hiding COMMAND messages"""
self.settings.show_command = not self.settings.show_command
def on_show_warning(self) -> None:
"""sets a flag for showing/hiding WARNING messages"""
self.settings.show_warning = not self.settings.show_warning
def on_show_error(self) -> None:
"""sets a flag for showing/hiding ERROR messages"""
self.settings.show_error = not self.settings.show_error
@property
def window_title(self) -> str:
return self.getWindowTitle()
@window_title.setter
def window_title(self, msg: str) -> None:
#msg2 = "%s - " % self.base_window_title
#msg2 += msg
self.setWindowTitle(msg)
def closeEvent(self, *args) -> None:
"""
Handling saving state before application when application is
being closed.
"""
#settings = QtCore.QSettings()
#settings.clear()
#self.settings.save(settings)
q_app = QApplication.instance()
if q_app is None:
sys.exit()
q_app.quit()
def main():
if sys.platform == 'win32':
import ctypes
myappid = 'pynastran.pynastrangui.%s' % (pyNastran.__version__) # arbitrary string
ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(myappid)
app = QApplication(sys.argv)
QApplication.setOrganizationName('pyNastran')
QApplication.setOrganizationDomain(pyNastran.__website__)
QApplication.setApplicationName('pyNastran')
QApplication.setApplicationVersion(pyNastran.__version__)
w = MainWindow2()
app.exec_()
if __name__ == '__main__': # pragma: no cover
main()
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,652
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/op2/dev/pyyeti/n2y.py
|
# -*- coding: utf-8 -*-
"""
The Python version of the nas2cam tools. Works with data created by
op2.rdnas2cam. Adapted from the Yeti version.
@author: Tim Widrick
"""
import math
import sys
import warnings
import scipy.linalg as linalg
import numpy as np
import pyNastran.op2.dev.pyyeti.locate as locate
def rigid_body_geom(grids, refpoint=np.array([[0, 0, 0]])):
"""
Compute 6 rigid-body modes from geometry.
Parameters
----------
grids : 3-column matrix
Coordinates in basic; columns are [x, y, z].
refpoint : integer or 3-element vector
Defines location that rb modes will be relative to: either row
index into `grids` (starting at 0) or the [x, y, z] location.
Returns
-------
rb : ndarray
Rigid-body modes; rows(grids)*6 x 6.
Notes
-----
All grids are assumed to be in the same rectangular coordinate
system. For a much more sophisticated routine, see
n2y.rigid_body_geom_uset().
Examples
--------
>>> import numpy as np
>>> import n2y
>>> grids = np.array([[0., 0., 0.], [30., 10., 20.]])
>>> n2y.rbgeom(grids)
array([[ 1., 0., 0., 0., 0., -0.],
[ 0., 1., 0., -0., 0., 0.],
[ 0., 0., 1., 0., -0., 0.],
[ 0., 0., 0., 1., 0., 0.],
[ 0., 0., 0., 0., 1., 0.],
[ 0., 0., 0., 0., 0., 1.],
[ 1., 0., 0., 0., 20., -10.],
[ 0., 1., 0., -20., 0., 30.],
[ 0., 0., 1., 10., -30., 0.],
[ 0., 0., 0., 1., 0., 0.],
[ 0., 0., 0., 0., 1., 0.],
[ 0., 0., 0., 0., 0., 1.]])
"""
grids = np.reshape(grids, (-1, 3))
r = np.shape(grids)[0]
if np.size(refpoint) == 1:
shift = np.ones((r, 1)) @ grids[refpoint:refpoint+1]
grids = grids - shift
elif np.any(refpoint != [0, 0, 0]):
shift = np.ones((r, 1)) @ np.reshape(refpoint, (1, 3))
grids = grids - shift
rbmodes = np.zeros((r*6, 6))
rbmodes[1::6, 3] = -grids[:, 2]
rbmodes[2::6, 3] = grids[:, 1]
rbmodes[::6, 4] = grids[:, 2]
rbmodes[2::6, 4] = -grids[:, 0]
rbmodes[::6, 5] = -grids[:, 1]
rbmodes[1::6, 5] = grids[:, 0]
for i in range(6):
rbmodes[i::6, i] = 1.
return rbmodes
def rigid_body_geom_uset(uset, refpoint=np.array([[0, 0, 0]])):
"""
Compute 6 rigid-body modes from geometry using a USET table.
Parameters
----------
uset : ndarray
A 6-column matrix as output by :func:`op2.rdn2cop2`.
refpoint : integer or vector
Defines location that rb modes will be relative to. Either an
integer specifying the node ID (which is in the uset table),
or it is a coordinates vector [x, y, z] in basic.
Returns
-------
rb : 6-column array
Rigid-body modes in "global" coordinates.
The return `rb` is analogous to the output of Nastran VECPLOT
option 4. Here, "global" means the combination of all local
coordinate systems. In other words, the rigid-body modes are in
all the local coordinates of the grids. The refpoint is given
unit translations and rotations in the basic coordinate system.
Notes
-----
- All SPOINTs, all EPOINTs, and GRIDS in the Q-set or in the "left
over" C-set will have 0's.
- This routine will handle grids in rectangular, cylindrical, and
spherical coordinates.
See also nastran.bulk2uset, n2y.rbgeom, op2.rdnas2cam,
op2.read_nas2cam_op2, n2y.usetprt.
Examples
--------
>>> import n2y
>>> import numpy as np
>>> # first, make a uset table:
>>> # node 100 in basic is @ [5, 10, 15]
>>> # node 200 in cylindrical coordinate system is @
>>> # [r, th, z] = [32, 90, 10]
>>> cylcoord = np.array([[1, 2, 0], [0, 0, 0], [1, 0, 0],
... [0, 1, 0]])
>>> uset = None
>>> uset = n2y.add_grid(uset, 100, 'b', 0, [5, 10, 15], 0)
>>> uset = n2y.add_grid(uset, 200, 'b', cylcoord, [32, 90, 10],
... cylcoord)
>>> np.set_print_options(precision=2, suppress=True)
>>> n2y.rigid_body_geom_uset(uset) # rb modes relative to [0, 0, 0]
array([[ 1., 0., 0., 0., 15., -10.],
[ 0., 1., 0., -15., 0., 5.],
[ 0., 0., 1., 10., -5., 0.],
[ 0., 0., 0., 1., 0., 0.],
[ 0., 0., 0., 0., 1., 0.],
[ 0., 0., 0., 0., 0., 1.],
[ 0., 0., 1., 0., -10., 0.],
[ 0., -1., 0., 32., -0., -10.],
[ 1., 0., 0., 0., 32., -0.],
[ 0., 0., 0., 0., 0., 1.],
[ 0., 0., 0., 0., -1., 0.],
[ 0., 0., 0., 1., 0., 0.]])
"""
# find the grids (ignore spoints and epoints)
r = np.shape(uset)[0]
grid_rows = uset[:, 1] != 0
# get the q-set and the left-over c-set:
qset = mksetpv(uset, "p", "q")
if any(qset):
qdof1 = uset[qset, 1] == 1
qgrids = uset[qset, :1][qdof1]
if any(qgrids):
# expand qgrids to include all 6 dof:
nq = len(qgrids)
dof = np.arange(1., 7.).reshape(6, 1)
qdof = dof @ np.ones((1, nq))
qgrids = np.ones((6, 1)) @ qgrids.T
qgrids = np.hstack((qgrids.reshape(-1, 1),
qdof.reshape(-1, 1)))
# get partition vector:
pvq = locate.get_intersection(uset[:, :2].astype(np.int64),
qgrids, 1)[0]
plain_grids = np.logical_not(locate.find2zo(pvq, r))
grid_rows = np.logical_and(grid_rows, plain_grids)
rbmodes = np.zeros((r, 6))
if not any(grid_rows):
return rbmodes
grids = uset[grid_rows]
ngrids = grids.shape[0] // 6
# rigid-body modes in basic coordinate system:
if np.size(refpoint) == 1:
refpoint = np.nonzero(grids[::6, 0] == refpoint)[0]
rb = rigid_body_geom(grids[::6, 3:], refpoint)
# treat as rectangular here; fix cylindrical & spherical below
rb2 = np.zeros((np.shape(rb)))
for j in range(ngrids):
i = 6*j
t = grids[i+3:i+6, 3:].T
rb2[i:i+3] = t @ rb[i:i+3]
rb2[i+3:i+6] = t @ rb[i+3:i+6]
# fix up cylindrical:
grid_loc = np.arange(0, grids.shape[0], 6)
cyl = grids[1::6, 4] == 2
if np.any(cyl):
grid_loc_cyl = grid_loc[cyl]
for i in grid_loc_cyl:
t = grids[i+3:i+6, 3:].T
loc = grids[i, 3:]
loc2 = np.dot(t, loc - grids[i+2, 3:])
if abs(loc2[1]) + abs(loc2[0]) > 1e-8:
th = math.atan2(loc2[1], loc2[0])
c = math.cos(th)
s = math.sin(th)
t = np.array([[c, s], [-s, c]])
rb2[i:i+2] = np.dot(t, rb2[i:i+2])
rb2[i+3:i+5] = np.dot(t, rb2[i+3:i+5])
# fix up spherical:
sph = grids[1::6, 4] == 3
if np.any(sph):
grid_loc_sph = grid_loc[sph]
for i in grid_loc_sph:
t = grids[i+3:i+6, 3:].T
loc = grids[i, 3:]
loc2 = np.dot(t, loc - grids[i+2, 3:])
if abs(loc2[1]) + abs(loc2[0]) > 1e-8:
phi = math.atan2(loc2[1], loc2[0])
c = math.cos(phi)
s = math.sin(phi)
t = np.array([[c, s], [-s, c]])
rb2[i:i+2] = np.dot(t, rb2[i:i+2])
rb2[i+3:i+5] = np.dot(t, rb2[i+3:i+5])
loc2[:2] = np.dot(t, loc2[:2])
if abs(loc2[2]) + abs(loc2[0]) > 1e-8:
th = math.atan2(loc2[0], loc2[2])
else:
th = 0
c = math.cos(th)
s = math.sin(th)
t = np.array([[s, 0, c], [c, 0, -s], [0, 1, 0]])
rb2[i:i+3] = np.dot(t, rb2[i:i+3])
rb2[i+3:i+6] = np.dot(t, rb2[i+3:i+6])
# prepare final output:
rbmodes[grid_rows] = rb2
return rbmodes
def rigid_body_move(rb, oldref, newref):
"""
Returns rigid-body modes relative to new reference point.
Parameters
----------
rb : 6 column ndarray
Original rigid-body modes; assumed to be n x 6.
oldref : 3 element array
Original [x, y, z] reference location in basic coordinates.
newref : 3 element array
New [x, y, z] reference location in basic coordinates.
Returns
-------
rbnew : 6 column ndarray
New rigid-body modes from: rb*rigid_body_geom(oldref, newref).
Examples
--------
>>> import numpy as np
>>> import n2y
>>> grids = np.array([[0., 0., 0.], [30., 10., 20.]])
>>> rb0 = n2y.rigid_body_geom(grids)
>>> rb1 = n2y.rigid_body_geom(grids, [2., 4., -5.])
>>> rb1_b = n2y.rigid_body_move(rb0, [0., 0., 0.], [2., 4., -5.])
>>> np.all(rb1_b == rb1)
True
"""
return np.dot(rb, rigid_body_geom(oldref, newref))
def rigid_body_coords(rb, verbose=2):
"""
Return coordinates of each node given rigid-body modes.
Parameters
----------
rb : 6 column ndarray
Rigid-body modes. Nodes can be in any mixture of coordinate
systems. Number of rows is assumed to be (6 x nodes) ... other
DOF (like SPOINTs) must be partitioned out before calling this
routine.
verbose : integer
If 1, print 1 summary line; and if > 1, print warnings for
nodes as well.
Returns
-------
tuple: (coords, maxdev, maxerr)
coords : ndarray
A 3-column matrix of [x, y, z] locations of each node,
relative to same location as `rb`.
maxdev : float
Maximum absolute error of any deviation from the expected
pattern.
maxerr : float
Maximum percent deviation; this is the maximum deviation
for a node divided by the maximum x, y or z coordinate
location for the node.
Notes
-------
The expected pattern for each node in the rigid-body modes is::
[ 1 0 0 0 Z -Y
0 1 0 -Z 0 X
0 0 1 Y -X 0
0 0 0 1 0 0
0 0 0 0 1 0
0 0 0 0 0 1 ]
That pattern shown assumes the node is in the same coordinate
system as the reference node. If this is not the case, the 3x3
coordinate transformation matrix (from reference to local) will
show up in place of the the 3x3 identity matrix shown above. This
routine will use that 3x3 matrix to convert coordinates to that of
the reference before checking for the expected pattern. The
matrix inversion is done in a least squares sense. This all means
is that the use of local coordinate systems is acceptable for this
routine. Zero rows (like what could happen for q-set dof) get
zero coordinates.
Raises
------
ValueError
When `rb` is not 6*n x 6.
Examples
--------
>>> import n2y
>>> import numpy as np
>>> # generate perfect rigid-body modes to test this routine
>>> coords = np.array([[0, 0, 0],
... [1, 2, 3],
... [4, -5, 25]])
>>> rb = n2y.rigid_body_geom(coords)
>>> coords_out, mxdev, mxerr = n2y.rigid_body_coords(rb)
>>> np.allclose(coords_out, coords)
True
>>> np.allclose(0., mxdev)
True
>>> np.allclose(0., mxerr)
True
Now show example when non-rb modes are passed in:
>>> import n2y
>>> import numpy as np
>>> not_rb = np.dot(np.arange(12).reshape(12, 1),
... np.arange(6).reshape(1, 6))
>>> np.set_printoptions(precision=4, suppress=True)
>>> n2y.rbcoords(not_rb)
Warning: deviation from standard pattern, node #1 starting at index 0:
Max deviation = 2.6 units.
Max % error = 217%.
Rigid-Body Rotations:
0.0000 0.0000 0.0000
0.6000 0.8000 1.0000
1.2000 1.6000 2.0000
<BLANKLINE>
Warning: deviation from standard pattern, node #2 starting at index 6:
Max deviation = 2.6 units.
Max % error = 217%.
Rigid-Body Rotations:
0.0000 0.0000 0.0000
0.6000 0.8000 1.0000
1.2000 1.6000 2.0000
<BLANKLINE>
Maximum absolute coordinate location error: 2.6 units
Maximum % error: 217%.
(array([[ 1. , 1.2, 0. ],
[ 1. , 1.2, 0. ]]), 2.6000000000000005, 216.66666666666674)
"""
r, c = np.shape(rb)
if c != 6:
raise ValueError("`rb` must have 6 columns")
if (r // 6)*6 != r:
raise ValueError("`rb` must have a multiple of 6 rows")
n = r // 6
coords = np.zeros((n, 3))
maxerr = 0
maxdev = 0
haderr = 0
for j in range(n):
row = j*6
T = rb[row:row+3, :3]
R = linalg.lstsq(T, rb[row:row+3, 3:])[0]
deltax = R[1, 2]
deltay = R[2, 0]
deltaz = R[0, 1]
deltax2 = -R[2, 1]
deltay2 = -R[0, 2]
deltaz2 = -R[1, 0]
dev = np.max(np.vstack((np.max(np.abs(np.diag(R))),
np.abs(deltax-deltax2),
np.abs(deltay-deltay2),
np.abs(deltaz-deltaz2))))
coords[j] = [deltax, deltay, deltaz]
mc = np.max(np.abs(coords[j]))
if mc > np.finfo(float).eps:
err = dev / mc * 100.
else:
err = dev / np.finfo(float).eps * 100.
maxdev = max([maxdev, dev])
maxerr = max([maxerr, err])
if verbose > 0 and (dev > mc*1.e-6 or math.isnan(dev)):
if verbose > 1:
print("Warning: deviation from standard pattern, "
"node #{} starting at index {}:".
format(j+1, row))
print(" Max deviation = {:.3g} units.".format(dev))
print(" Max % error = {:.3g}%.".format(err))
print(" Rigid-Body Rotations:")
for k in range(3):
print(" {:10.4f} {:10.4f} {:10.4f}"
.format(R[k, 0], R[k, 1], R[k, 2]))
print("")
haderr = 1
if verbose > 0 and haderr:
print("Maximum absolute coordinate location error: "
"{:.3g} units".format(maxdev))
print("Maximum % error: {:.3g}%.".format(maxerr))
return coords, maxdev, maxerr
# return vector of 6 dof per element in 'dof', negatives for non-used
def expand_dof(dof):
"""
Expands Nastran DOF specification to 6-element vector, negatives
for unused DOF. Typically called by :func:`mkdofpv`.
Parameters
----------
dof : vector of DOF
Each element in `dof` is either 0 or an integer containing any
combination of digits 1-6 (Nastran style).
Returns
-------
edof : ndarray
Expanded version of `dof` with 1 DOF per element. Size =
len(`dof`)*6 x 1. Unused DOF will be negative. See example.
See also n2y.mkdofpv, n2y.expand_trim.
Examples
--------
>>> import numpy as np
>>> import n2y
>>> np.set_printoptions(linewidth=75)
>>> n2y.expand_dof(12).T
array([[ 1, 2, -3, -4, -5, -6]])
>>> dof = np.array([123456, 2, 345, 0])
>>> n2y.expand_dof(dof).T
array([[ 1, 2, 3, 4, 5, 6, -1, 2, -3, -4, -5, -6, -1, -2, 3, 4, 5,
-6, 0, -1, -1, -1, -1, -1]])
"""
dof = np.atleast_1d(dof)
edof = np.zeros((6*len(dof), 1), dtype=np.int64) - 1
sdof = '123456'
for j, dofj in enumerate(dof):
R = j*6
if dofj == 0:
edof[R] = 0
else:
d = str(dofj)
for r in range(6):
s = d.find(sdof[r])
if s != -1:
edof[R+r] = r+1
else:
edof[R+r] = -r-1
return edof
def expand_trim(dof):
"""
Expands and trims Nastran DOF specification up to a 6-element
vector. Typically called by other higher-level routines.
Parameters
----------
dof : array
Vector of DOF to expand (via :func:`expand_dof`) and then trim
down to keep only the used DOF.
Returns
-------
edof : ndarray
Expanded version of DOF with up to 6-elements per input
element.
Examples
--------
>>> import n2y
>>> dof = [123456, 2, 345, 0]
>>> n2y.expand_trim(dof).T
array([[1, 2, 3, 4, 5, 6, 2, 3, 4, 5, 0]])
"""
edof = expand_dof(dof)
return edof[edof[:, 0] >= 0]
def mkusetmask(nasset=None):
r"""
Get bit-masks for use with the Nastran USET table.
Parameters
----------
nasset : None or string
Specifies Nastran set or sets. If a string, can be a single
set (eg, 'a') or multiple sets combined with the '+' (eg,
'a+o+m').
Returns
-------
mask : integer or dict
If `nasset` is None, returns a dictionary of bit-masks that is
indexed by the lowercase set letter(s). Otherwise, `mask` is
the bit mask for the specific set(s).
Notes
-----
The analyst rarely needs to use this function directly;
other routines will call this routine automatically and use the
resulting vector or mask internally.
The sets (and supersets) currently accounted for are::
Sets Supersets
M -------------------------------------\\
S ------------------------------\ > G --\\
O -----------------------\ > N --/ \\
Q ----------------\ > F --/ \ \\
R ---------\ > A --/ \ \ > P
C --\ > T --/ \ > FE > NE /
B ---> L --/ > D / / /
E ------------------------/-----/-------/-----/
User-defined sets: U1, U2, U3, U4, U5, and U6.
MSC.Nastran apparently changes the B-set bitmask not only
between different versions but also between different machines.
Sometimes the 2nd bit goes to the B-set and sometimes it goes to
the S-set. However, so far, the S-set always has other bits set
that can be (and are) checked. Therefore, to work around this
difficulty, the :func:`op2.rdn2cop2` routine clears the 2nd bit
for all S-set DOF. Because of that, this routine can safely
assume that the 2nd bit belongs to the B-set and no manual changes
are required.
See Also
--------
n2y.mksetpv, op2.read_nas2cam, op2.read_nas2cam_op2, n2y.usetprt.
Examples
--------
>>> import n2y
>>> n2y.mkusetmask('q')
4194304
>>> n2y.mkusetmask('b')
2097154
>>> n2y.mkusetmask('q+b')
6291458
"""
m = 1
b = 2 | (1 << 21)
o = 4
r = 8
s = 1024 | 512
q = 1 << 22
c = 1 << 20
e = 1 << 11
a = q | r | b | c
l = c | b
t = l | r
f = a | o
n = f | s
g = n | m
p = g | e
usetmask = {'m': m,
'b': b,
'o': o,
'r': r,
's': s,
'q': q,
'c': c,
'e': e,
'a': a,
'l': l,
't': t,
'f': f,
'n': n,
'g': g,
'p': p,
'fe': f | e,
'd': e | a,
'ne': n | e,
'u1': 1 << 31,
'u2': 1 << 30,
'u3': 1 << 29,
'u4': 1 << 28,
'u5': 1 << 27,
'u6': 1 << 26}
if isinstance(nasset, str):
sets = nasset.split('+')
usetmask1 = 0
for seti in sets:
usetmask1 = usetmask1 | usetmask[seti]
return usetmask1
return usetmask
def uset_print(filename, uset, printsets="M,S,O,Q,R,C,B,E,L,T,A,F,N,G",
form=0, perpage=float('inf')):
"""
Print Nastran DOF set membership information from USET table.
Parameters
----------
filename : string or file handle
Either a name of a file or a file handle as returned by
open(). Use 1 to write to the screen, 0 to write nothing --
just get output.
uset : ndarray
A 6-column matrix as output by :func:`op2.rdn2cop2`.
printsets : string
A comma delimited string specifying which sets to print, see
description below.
form : integer
If `form` == 0, print a set at a time (like sets are grouped
together). If `form` > 0, print a table showing set
membership in columns and `form` is used as the minimum field
width (see more notes below). `form` is ignored if `file` is
0.
perpage : integer
Number of lines to print per page. `perpage` is ignored if
`file` is 0.
Returns
-------
table : ndarray
Up to a 27 column matrix::
[ DOF_number, ID, DOF, sets(up to 24 cols) ]
Columns 4 and up will have 0's and DOF numbers; the 0's show
non-membership. The columns will correspond to printsets, in
the order given below. The rows will be truncated to non-zero
rows.
`printsets` is a comma delimited strings that specifies which sets
to print. It can be input in lower or upper case. Sets that are
identical are printed together (as G and P often are). The value
of "*" is equivalent to specifying all sets:
"M,S,O,Q,R,C,B,E,L,T,A,D,F,FE,N,NE,G,P,U1,U2,U3,U4,U5,U6"
For example, `printsets` = "R, C, B, A" will print only those sets
(but not necessarily in that order).
If `form` is greater than 0, table is printed but, for easier
visualization, the 0's are not printed being replaced with spaces.
The non-zero values for each set are the DOF numbers in each set.
The value of form specifies the minimum width for each of the last
24 columns of the table. Note that the width will be more than
form if a set DOF number requires more space.
The sets (and supersets) currently accounted for are::
Sets Supersets
M -------------------------------------\\
S ------------------------------\ > G --\\
O -----------------------\ > N --/ \\
Q ----------------\ > F --/ \ \\
R ---------\ > A --/ \ \ > P
C --\ > T --/ \ > FE > NE /
B ---> L --/ > D / / /
E ------------------------/-----/-------/-----/
User-defined sets: U1, U2, U3, U4, U5, and U6.
See Also
--------
:func:`op2.read_nas2can_op2`, :func:`n2y.mksetpv`,
:func:`n2y.rigid_body_geom_uset`, :func:`op2.read_nas2cam`
Examples
--------
>>> import n2y
>>> import numpy as np
>>> # first, make a uset table:
>>> # node 100 in basic is @ [5, 10, 15]
>>> # node 200 in cylindrical coordinate system is @
>>> # [r, th, z] = [32, 90, 10]
>>> cylcoord = np.array([[1, 2, 0], [0, 0, 0], [1, 0, 0],
... [0, 1, 0]])
>>> uset = None
>>> uset = n2y.add_grid(uset, 100, 'b', 0, [5, 10, 15], 0)
>>> uset = n2y.add_grid(uset, 200, 'c', cylcoord, [32, 90, 10],
... cylcoord)
>>> table = n2y.uset_print(1, uset,
... printsets='r, c') # doctest: +ELLIPSIS
R-set
-None-
<BLANKLINE>
C-set
-1- -2- ... -6- ... -10-
1= 200-1 200-2 ... 200-6
<BLANKLINE>
>>> table = n2y.uset_print(1, uset,
... printsets='*') # doctest: +ELLIPSIS
M-set, S-set, O-set, Q-set, R-set, E-set, U1-set, ... U6-set
-None-
<BLANKLINE>
C-set
-1- -2- ... -6- ... -10-
1= 200-1 200-2 ... 200-6
<BLANKLINE>
B-set
-1- -2- ... -6- ... -10-
1= 100-1 100-2 ... 100-6
<BLANKLINE>
L-set, T-set, A-set, D-set, F-set, FE-set, ..., G-set, P-set
-1- -2- ... -10-
1= 100-1 100-2 ... 200-4 = 10
11= 200-5 200-6
<BLANKLINE>
>>> table = n2y.uset_print(1, uset) # doctest: +ELLIPSIS
M-set, S-set, O-set, Q-set, R-set, E-set
-None-
<BLANKLINE>
C-set
-1- -2- ... -6- ... -10-
1= 200-1 200-2 ... 200-6
<BLANKLINE>
B-set
-1- -2- ... -6- ... -10-
1= 100-1 100-2 ... 100-6
<BLANKLINE>
L-set, T-set, A-set, F-set, N-set, G-set
-1- -2- ... -10-
1= 100-1 100-2 ... 200-4 = 10
11= 200-5 200-6
<BLANKLINE>
>>> print(table) # doctest: +ELLIPSIS
[[ 1 100 1 0 ... 0 0 1 0 1 1 1 1 1 1]
[ 2 100 2 0 ... 0 0 2 0 2 2 2 2 2 2]
[ 3 100 3 0 ... 0 0 3 0 3 3 3 3 3 3]
[ 4 100 4 0 ... 0 0 4 0 4 4 4 4 4 4]
[ 5 100 5 0 ... 0 0 5 0 5 5 5 5 5 5]
[ 6 100 6 0 ... 0 0 6 0 6 6 6 6 6 6]
[ 7 200 1 0 ... 0 1 0 0 7 7 7 7 7 7]
[ 8 200 2 0 ... 0 2 0 0 8 8 8 8 8 8]
[ 9 200 3 0 ... 0 3 0 0 9 9 9 9 9 9]
[ 10 200 4 0 ... 0 4 0 0 10 10 10 10 10 10]
[ 11 200 5 0 ... 0 5 0 0 11 11 11 11 11 11]
[ 12 200 6 0 ... 0 6 0 0 12 12 12 12 12 12]]
>>> table = n2y.usetprt(1, uset, form=1) # doctest: +ELLIPSIS
DOF # GRID DOF ... R C B E L T A F N G
------- -------- --- ... -- -- -- -- -- -- -- -- -- --
1 100 1 ... 1 1 1 1 1 1 1
2 100 2 ... 2 2 2 2 2 2 2
3 100 3 ... 3 3 3 3 3 3 3
4 100 4 ... 4 4 4 4 4 4 4
5 100 5 ... 5 5 5 5 5 5 5
6 100 6 ... 6 6 6 6 6 6 6
7 200 1 ... 1 7 7 7 7 7 7
8 200 2 ... 2 8 8 8 8 8 8
9 200 3 ... 3 9 9 9 9 9 9
10 200 4 ... 4 10 10 10 10 10 10
11 200 5 ... 5 11 11 11 11 11 11
12 200 6 ... 6 12 12 12 12 12 12
"""
usetmask = mkusetmask()
dof = uset[:, 2:3].astype(np.int64)
table = 0 != np.hstack((dof & usetmask["m"],
dof & usetmask["s"],
dof & usetmask["o"],
dof & usetmask["q"],
dof & usetmask["r"],
dof & usetmask["c"],
dof & usetmask["b"],
dof & usetmask["e"],
dof & usetmask["l"],
dof & usetmask["t"],
dof & usetmask["a"],
dof & usetmask["d"],
dof & usetmask["f"],
dof & usetmask["fe"],
dof & usetmask["n"],
dof & usetmask["ne"],
dof & usetmask["g"],
dof & usetmask["p"],
dof & usetmask["u1"],
dof & usetmask["u2"],
dof & usetmask["u3"],
dof & usetmask["u4"],
dof & usetmask["u5"],
dof & usetmask["u6"]))
# replace True's with set membership number: 1 to ?
table = table.astype(np.int64)
r, c = np.shape(table)
n = np.sum(table, 0)
for i in range(c):
pv = table[:, i].astype(bool)
table[pv, i] = 1 + np.arange(n[i])
allsets = list('MSOQRCBELTADF') + ['FE', 'N', 'NE', 'G', 'P', 'U1',
'U2', 'U3', 'U4', 'U5', 'U6']
if printsets == '*':
printsets = allsets
else:
printsets = (''.join(printsets.split())).upper().split(',')
# keep only columns in table that are printed:
printpv, pv2 = locate.list_intersection(allsets, printsets)
# make sure printsets is in order of table:
printsets = [printsets[i] for i in pv2]
table = table[:, printpv]
pv = np.any(table, axis=1)
if np.any(pv):
# add 3 more cols to table:
return_table = np.hstack((1+np.arange(r).reshape(r, 1),
uset[:, :2].astype(np.int64), table))
return_table = return_table[pv]
else:
return_table = None
if filename == 0:
return return_table
if isinstance(filename, str):
f = open(filename, "w")
elif filename == 1:
f = sys.stdout
else:
f = filename
nsets = len(printsets)
if form == 0:
colheader = (" "
" -1- -2- -3- -4-"
" -5- -6- -7- -8-"
" -9- -10-")
curlines = 0
printed = np.zeros((nsets), dtype=np.int64)
def pager():
# nonlocal curlines
if curlines >= perpage:
f.write(chr(12))
f.write("{} (continued)\n{}\n".format(header,
colheader))
return 2
return curlines
s = 0
while s < nsets: # loop over printing-sets:
header = printsets[s]+"-set"
printed[s] = 1
S = s+1
while S < nsets:
if np.all(table[:, S] == table[:, s]):
header += ", " + printsets[S] + "-set"
printed[S] = 1
S += 1
# form a modified version of USET for printing this set
pv = table[:, s].nonzero()[0]
# set s for next loop:
s = (printed == 0).nonzero()[0]
if s.size > 0:
s = s[0]
else:
s = nsets
if curlines >= perpage-2:
f.write(chr(12))
curlines = 0
if np.any(pv):
f.write("{}\n{}\n".format(header, colheader))
curlines += 2
uset_mod = uset[pv, :2].astype(np.int64)
full_rows = pv.size // 10
rem = pv.size - 10*full_rows
count = 1
if full_rows:
usetfr = uset_mod[:full_rows*10]
for j in range(full_rows):
curlines = pager()
f.write('{:6d}='.format(count))
for k in range(10):
r = j*10+k
f.write(' {:8d}-{:1d}'.format(usetfr[r, 0],
usetfr[r, 1]))
f.write(' ={:6d}\n'.format(count+9))
curlines += 1
count += 10
if rem:
curlines = pager()
uset_rem = uset_mod[-rem:].astype(np.int64)
f.write('{:6d}='.format(count))
for j in range(rem):
f.write(' {:8d}-{:1d}'.format(uset_rem[j, 0],
uset_rem[j, 1]))
f.write("\n")
curlines += 1
f.write("\n")
curlines += 1
else:
f.write("{}\n -None-\n\n".format(header))
curlines += 3
if isinstance(file, str):
f.close()
return return_table
# width of field depends on number of DOF:
mx = np.max(table)
n = int(math.ceil(math.log10(mx)))
if form > n:
n = form
# format for header line:
pre = (n-1)//2 + 1
post = n-pre + 2
format1 = "{{:{}}}{{:{}}}".format(pre, post)
if n < 3:
format2 = "{{:{}}}{{:{}}}".format(pre, post-1)
else:
format2 = format1
s = " "
headersets = format1.format(printsets[0], s)
underline = "-"*n
underlsets = (underline+" ") * nsets
numformat = ("{{:{}}} ".format(n)) * nsets
for j in range(1, nsets):
if len(printsets[j]) == 1:
headersets += format1.format(printsets[j], s)
else:
headersets += format2.format(printsets[j], s)
# chop off 2 trailing spaces:
headersets = headersets[:-2]
underlsets = underlsets[:-2]
numformat = numformat[:-2]
header = (" DOF # GRID DOF "+headersets).rstrip()
underl = "------- -------- --- "+underlsets
numformat = "{:7} {:8} {:2} "+numformat+"\n"
r = return_table.shape[0]
if perpage == np.inf:
perpage = r + 10 # on Windows, i % inf gives nan
else:
perpage -= 2
for i in range(r):
if i % perpage == 0:
if i > 0:
f.write(chr(12))
f.write('{}\n{}\n'.format(header, underl))
string = numformat.format(*return_table[i])
string = string.replace(' 0 ', ' ')
string = string.replace(' 0\n', '').rstrip() + '\n'
f.write(string)
if isinstance(file, str):
f.close()
return return_table
def mksetpv(uset, major, minor):
r"""
Make a set partition vector from a Nastran USET table.
Parameters
----------
uset : ndarray
A 6-column matrix as output by :func:`op2.rdn2cop2`.
majorset : integer or string
An integer bitmask or a set letter or letters (see below).
minorset : integer or string
An integer bitmask or a set letter or letters.
Returns
-------
pv : 1d ndarray
A True/False vector for partitioning `minorset` from
`majorset`. Length = number of DOF in `majorset`.
The inputs majorset and minorset can be specified as a combination
of sets by using the '+' sign. See help in mkusetmask() for more
information on how to specify the sets.
The sets (and supersets) currently accounted for are::
Sets Supersets
M -------------------------------------\\
S ------------------------------\ > G --\\
O -----------------------\ > N --/ \\
Q ----------------\ > F --/ \ \\
R ---------\ > A --/ \ \ > P
C --\ > T --/ \ > FE > NE /
B ---> L --/ > D / / /
E ------------------------/-----/-------/-----/
User-defined sets: U1, U2, U3, U4, U5, and U6.
See also n2y.mkdofpv, n2y.upqsetpv, op2.rdnas2cam, n2y.formulvs,
n2y.usetprt, n2y.rbgeom_uset, n2y.mkusetmask, op2.rdn2cop2.
Raises
------
ValueError
When `minorset` is not completely contained in `majorset`.
Examples
--------
>>> import numpy as np
>>> import n2y
>>> # First, make a uset table
>>> # node 100 in basic is @ [5, 10, 15]
>>> # node 200 in cylindrical is @ [r, th, z] = [32, 90, 10]
>>> # z_cyl = x-basic; r_cyl = y-basic
>>> # Also, put 100 in b-set and 200 in m-set.
>>> cylcoord = np.array([[1, 2, 0], [0, 0, 0], [1, 0, 0],
... [0, 1, 0]])
>>> uset = None
>>> uset = n2y.addgrid(uset, 100, 'b', 0, [5, 10, 15], 0)
>>> uset = n2y.addgrid(uset, 200, 'm', cylcoord, [32, 90, 10],
... cylcoord)
>>> bset = n2y.mksetpv(uset, 'p', 'b') # 1:6 are true
>>> np.set_printoptions(linewidth=75)
>>> bset
array([ True, True, True, True, True, True, False, False, False,
False, False, False], dtype=bool)
>>> mset = n2y.mksetpv(uset, 'p', 'm') # 7:12 are true
>>> mset
array([False, False, False, False, False, False, True, True, True,
True, True, True], dtype=bool)
>>> rcqset = n2y.mksetpv(uset, 'p', 'r+c+q') # all false
>>> rcqset
array([False, False, False, False, False, False, False, False, False,
False, False, False], dtype=bool)
"""
if isinstance(major, str):
major = mkusetmask(major)
if isinstance(minor, str):
minor = mkusetmask(minor)
uset_set = uset[:, 2].astype(np.int64)
pvmajor = 0 != (uset_set & major)
pvminor = 0 != (uset_set & minor)
if np.any(~pvmajor & pvminor):
raise ValueError("`minorset` is not completely contained"
"in `majorset`")
pv = pvminor[pvmajor]
return pv
def make_dof_partition_vector(uset, nasset, dof):
"""
Make a DOF partition vector for a particular set from a Nastran
USET table.
Parameters
----------
uset : ndarray
A 6-column matrix as output by :func:`op2.rdn2cop2`. Can have
only the first two columns.
nasset : string or integer
The set(s) to partition the dof out of (eg, 'p' or 'b+q').
Can also be integer bitmask (see :func:`mkusetmask` for more
information).
dof : array
`dof` can be input in 2 different ways:
1) 1 column, each row is an ID (grid, spoint, etc). All
dof associated with the ID that are in the set will be
included. An error will be generated if any ID is
missing.
2) 2 column dof array, each row is: [ID dof]. Here, dof
specifies which degrees-of-freedom of the ID to find.
The dof can be input in the same way as Nastran accepts
it: 0 or any combo of digits 1-6; eg, 123456 for all 6.
An error is generated if any dof are missing. See
examples.
Returns
-------
tuple: (pv, outdof)
pv : vector
Index vector for partitioning dof out of set; this
maintains the order of DOF as specified.
outdof : vector
The expanded version of the dof input, in order of output.
Raises
------
ValueError
When requested `dof` are not found in the `nasset`.
Examples
--------
>>> import numpy as np
>>> import n2y
>>> # Want an A-set partition vector for all available a-set dof of
>>> # grids 100 and 200:
>>> ids = np.array([[100], [200]])
>>> uset = None
>>> uset = n2y.addgrid(uset, 100, 'b', 0, [5, 10, 15], 0)
>>> uset = n2y.addgrid(uset, 200, 'b', 0, [32, 90, 10], 0)
>>> n2y.mkdofpv(uset, "a", ids)
(array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]), array([[100, 1],
[100, 2],
[100, 3],
[100, 4],
[100, 5],
[100, 6],
[200, 1],
[200, 2],
[200, 3],
[200, 4],
[200, 5],
[200, 6]]))
>>> uset = np.vstack((uset, [991, 0, 4194304, 0, 0, 0]))
>>> # request spoint 991 and dof 123 for grid 100 (in that order):
>>> ids2 = [[991, 0], [100, 123]]
>>> n2y.mkdofpv(uset, "a", ids2)
(array([12, 0, 1, 2]), array([[991, 0],
[100, 1],
[100, 2],
[100, 3]]))
"""
if nasset == 'p':
uset_set = (uset[:, 0]*10 + uset[:, 1]).astype(np.int64)
else:
setpv = mksetpv(uset, "p", nasset)
uset_set = (uset[setpv, 0]*10 + uset[setpv, 1]).astype(np.int64)
dof = np.asarray(dof).astype(np.int64)
if dof.ndim < 2:
dof = np.reshape(dof, (-1, 1))
r, c = dof.shape
if c == 2:
# does the second column need to be expanded (ie, 123456 type):
if any(dof[:, 1] > 6):
O = np.ones((1, 6), dtype=np.int64)
gid = np.dot(dof[:, 0:1], O).flatten('C')
dof2 = np.hstack((gid.reshape(-1, 1),
expand_dof(dof[:, 1])))
pv = dof2[:, 1] >= 0
dof = dof2[pv]
dof = dof[:, 0]*10 + dof[:, 1]
else:
# expand dof to include six dof per grid
nodes = np.dot(np.ones((6, 1), dtype=np.int64),
dof.T).flatten('F')
dof = np.dot(np.arange(1, 7).reshape(-1, 1),
np.ones((1, r), dtype=np.int64)).flatten('F')
dof = nodes*10 + dof
i = np.argsort(uset_set)
pvi = np.searchsorted(uset_set, dof, sorter=i)
# since searchsorted can return length as index:
pvi[pvi == i.size] -= 1
pv = i[pvi]
chk = uset_set[pv] != dof
if np.any(chk):
print("set '{}' does not contain all of the dof in `dof`."
" These are missing:".format(nasset))
ids = (dof[chk] // 10)
dof = dof[chk] - 10*ids
missing_dof = np.hstack((ids.reshape(-1, 1),
dof.reshape(-1, 1)))
print('missing_dof = ', missing_dof)
raise ValueError('see missing dof message above')
ids = dof // 10
dof = dof - 10*ids
outdof = np.hstack((ids.reshape(-1, 1), dof.reshape(-1, 1)))
return pv, outdof
def coord_card_info(uset, cid=None):
"""
Returns 'coordinate card' data from information in USET table
Parameters
----------
uset : ndarray
A 6-column matrix as output by :func:`op2.rdn2cop2`.
cid : None or integer
If integer, it is the id of the coordinate system to get data
for. If None, all coordinate system information is returned.
Returns
-------
ci : list or dictionary
If `cid` was an integer, the return is a list::
[name, [[4x3 matrix as shown below]] ]
The 4x3 matrix is (as described in :func:`addgrid`)::
[ cid type reference_id ]
[ Ax Ay Az ]
[ Bx By Bz ]
[ Cx Cy Cz ]
If cid was None, the return is a dictionary of lists for all
coordinate systems in `uset` (not including 0)::
{cid1 : [name, ...], cid2 : [...]}.
`name` is either 'CORD2R' (`type` == 1), 'CORD2C' (`type` ==
2), or 'CORD2S' (`type` ==3). `ref` is always 0, regardless
what the original reference coordinate system was. `A`, `B`,
`C` are the 3-element vectors defining the origin (`A`), the
Z-axis direction (`B`), and the X-axis direction (`C`).
Notes
-----
The only way to get the basic system (cid = 0) is to request it
specifically (and `uset` could be anything in this case)::
c0 = coordcardinfo(uset, 0)
The return dictionary will be empty if `cid` is None and there are
no coordinate systems other than 0 in the `uset` table.
Raises
------
ValueError
When requested `cid` is not found.
Examples
--------
>>> import numpy as np
>>> import n2y
>>> sccoord = np.array([[501, 1, 0], [2345.766, 0, 0],
... [2345.766, 10, 0], [3000, 0, 0]])
>>> uset = n2y.addgrid(None, 1001, 'b', sccoord, [0, 0, 0],
... sccoord)
>>> np.set_printoptions(precision=4, suppress=True)
>>> n2y.coordcardinfo(uset)
{501: ['CORD2R', array([[ 501. , 1. , 0. ],
[ 2345.766, 0. , 0. ],
[ 2345.766, 1. , 0. ],
[ 2346.766, 0. , 0. ]])]}
>>> n2y.coordcardinfo(uset, 0)
['CORD2R', array([[ 0., 1., 0.],
[ 0., 0., 0.],
[ 0., 0., 1.],
[ 1., 0., 0.]])]
>>> n2y.coordcardinfo(uset, 501)
['CORD2R', array([[ 501. , 1. , 0. ],
[ 2345.766, 0. , 0. ],
[ 2345.766, 1. , 0. ],
[ 2346.766, 0. , 0. ]])]
>>> # add random-ish cylindrical and spherical systems to test:
>>> cylcoord = np.array([[601, 2, 501], [10, 20, 30],
... [100, 20, 30], [10, 1, 1]])
>>> sphcoord = np.array([[701, 3, 601], [35, 15, -10],
... [55, 15, -10], [45, 30, 1]])
>>> uset = n2y.addgrid(uset, 1002, 'b', cylcoord, [2, 90, 5],
... cylcoord)
>>> uset = n2y.addgrid(uset, 1003, 'b', sphcoord, [12, 40, 45],
... sphcoord)
>>> cyl601 = n2y.coordcardinfo(uset, 601)
>>> sph701 = n2y.coordcardinfo(uset, 701)
>>> uset = n2y.addgrid(uset, 2002, 'b', cyl601[1], [2, 90, 5],
... cyl601[1])
>>> uset = n2y.addgrid(uset, 2003, 'b', sph701[1], [12, 40, 45],
... sph701[1])
>>> np.allclose(uset[6, 3:], uset[18, 3:])
True
>>> np.allclose(uset[12, 3:], uset[24, 3:])
True
"""
if cid == 0:
return ['CORD2R', np.array([[0, 1, 0],
[0., 0., 0.],
[0., 0., 1.],
[1., 0., 0.]])]
pv = (uset[:, 1] == 2).nonzero()[0]
if pv.size == 0:
if cid is not None:
raise ValueError('{} not found ... USET table '
'has no grids?'.format(cid))
return {}
def getlist(coordinfo):
A = coordinfo[1]
# transpose so T transforms from basic to local:
T = coordinfo[2:].T
B = A+T[2]
C = A+T[0]
typ = int(coordinfo[0, 1])
name = ['CORD2R', 'CORD2C', 'CORD2S'][typ-1]
return [name, np.vstack(([cid, typ, 0], A, B, C))]
if cid is not None:
pv2 = (uset[pv, 3] == cid).nonzero()[0]
if pv2.size == 0:
raise ValueError('{} not found in USET table.'.
format(cid))
pv2 = pv2[0]
r = pv[pv2]
coordinfo = uset[r:r+5, 3:]
return getlist(coordinfo)
CI = {}
pv2 = (uset[pv, 3] > 0).nonzero()[0]
if pv2.size == 0:
return CI
pv = pv[pv2]
ids = set(uset[pv, 3].astype(np.int64))
for cid in ids:
pv2 = (uset[pv, 3] == cid).nonzero()[0][0]
r = pv[pv2]
coordinfo = uset[r:r+5, 3:]
CI[cid] = getlist(coordinfo)
return CI
def _get_coordinfo_byid(refid, uset):
"""
Returns 5x3 coordinate system information for the reference
coordinate system.
Parameters
----------
refid : integer
Coordinate system id.
uset : ndarray
A 6-column matrix as output by :func:`op2.rdn2cop2`.
Returns
-------
cordinfo : 2d ndarray
5x3 coordinate system information for `refid`.
See :func:`get_coordinfo` for more information.
"""
if refid == 0:
return np.vstack((np.array([[0, 1, 0], [0., 0., 0.]]),
np.eye(3)))
try:
pv = (uset[:, 1] == 2).nonzero()[0]
pos = (uset[pv, 3] == refid).nonzero()[0][0]
if np.size(pos) > 0:
i = pv[pos]
return uset[i:i+5, 3:]
except Exception:
raise ValueError('reference coordinate id {} not '
'found in `uset`.'.format(refid))
def get_coord_info(cord, uset, coordref):
"""
Function for getting coordinfo as needed by the USET table.
Called by addgrid.
Parameters
----------
cord : scalar or 4x3 array
If scalar, it is a coordinate system id (must be 0 or appear
in either `uset` or `coordref`). If 4x3 matrix, format is as
on a Nastran CORD2* card::
[ id type reference_id ]
[ Ax Ay Az ]
[ Bx By Bz ]
[ Cx Cy Cz ]
where type is 0 (rectangular), 1 (cylindrical), or 2
(spherical).
uset : ndarray
A 6-column matrix as output by :func:`op2.rdn2cop2`. Not used
unless needed.
coordref : dictionary
Read/write dictionary with the keys being the coordinate
system id and the values being the 5x3 matrix returned below.
For speed reasons, this routine will look in `coordref` before
`uset` for a coordinate system. Can be empty.
Raises
------
ValueError
When requested `cord` is a scalar but is not found in either
`uset` or `coordref`.
Returns
-------
cordout : 5x3 matrix
Coordinate information in a 5x3 matrix::
[id type 0] # output coord. sys. id and type
[xo yo zo] # origin of coord. system
[ T ] # 3x3 transformation to basic
Note that T is for the coordinate system, not a grid
(unless type = 0 which means rectangular)
If neither `uset` nor `coordref` have a needed coordinate system,
this routine will error out.
See also n2y.addgrid, op2.rdn2cop2.
"""
if np.size(cord) == 1:
try:
return coordref[cord]
except KeyError:
ci = _get_coordinfo_byid(cord, uset)
coordref[cord] = ci
return ci
cid_type = cord[0, :2].astype(np.int64)
try:
refinfo = coordref[cord[0, 2]]
except KeyError:
refinfo = _get_coordinfo_byid(cord[0, 2], uset)
coordref[cord[0, 2]] = refinfo
a = cord[1]
b = cord[2]
c = cord[3]
a2r = math.pi/180.
if refinfo[0, 1] == 2: # cylindrical
a = np.hstack((a[0]*math.cos(a[1]*a2r),
a[0]*math.sin(a[1]*a2r),
a[2]))
b = np.hstack((b[0]*math.cos(b[1]*a2r),
b[0]*math.sin(b[1]*a2r),
b[2]))
c = np.hstack((c[0]*math.cos(c[1]*a2r),
c[0]*math.sin(c[1]*a2r),
c[2]))
if refinfo[0, 1] == 3: # spherical
s = math.sin(a[1]*a2r)
a = a[0] * np.hstack((s*math.cos(a[2]*a2r),
s*math.sin(a[2]*a2r),
math.cos(a[1]*a2r)))
s = math.sin(b[1]*a2r)
b = b[0] * np.hstack((s*math.cos(b[2]*a2r),
s*math.sin(b[2]*a2r),
math.cos(b[1]*a2r)))
s = math.sin(c[1]*a2r)
c = c[0] * np.hstack((s*math.cos(c[2]*a2r),
s*math.sin(c[2]*a2r),
math.cos(c[1]*a2r)))
ab = b-a
ac = c-a
z = ab/linalg.norm(ab)
y = np.cross(z, ac)
y = y/linalg.norm(y)
x = np.cross(y, z)
x = x/linalg.norm(x)
Tg = refinfo[2:]
location = refinfo[1] + np.dot(Tg, a)
T = np.dot(Tg, np.vstack((x, y, z)).T)
row1 = np.hstack((cid_type, 0))
coordinfo = np.vstack((row1, location, T))
coordref[cid_type[0]] = coordinfo
return coordinfo
def build_coords(cords):
"""
Builds the coordinate system dictionary from array of coordinate
card information.
Parameters
----------
cords : 2d array-like
2d array, n x 12::
[cid, ctype, refcid, a1, a2, a3, b1, b2, b3, c1, c2, c3]
where:
ctype = 1 for rectangular
ctype = 2 for cylindrical
ctype = 3 for spherical
Returns
-------
coordref : dictionary
Dictionary with the keys being the coordinate system id
(`cid`) and the values being the 5x3 matrix::
[cid ctype 0] # output coord. sys. id and type
[xo yo zo] # origin of coord. system
[ T ] # 3x3 transformation to basic
Note that T is for the coordinate system, not a grid
(unless type = 0 which means rectangular)
This routine loops over the coordinate systems according to
reference cid order.
Raises
------
RuntimeError
When a reference cid is not found.
When non-equal duplicate coordinate systems are found. (Equal
duplicates are quietly ignored).
"""
# resolve coordinate systems, and store them in a dictionary:
cords = np.atleast_1d(cords)
coordref = {}
if np.size(cords, 0) > 0:
j = np.argsort(cords[:, 0])
cords = cords[j, :]
cids = cords[:, 0]
duprows = np.nonzero(np.diff(cids) == 0)[0]
if duprows.size > 0:
delrows = []
for i in duprows:
if np.all(cords[i] == cords[i+1]):
delrows.append(i+1)
else:
raise RuntimeError('duplicate but unequal '
'coordinate systems detected.'
' cid = {}'.format(cids[i]))
cords = np.delete(cords, delrows, axis=0)
cids = cords[:, 0]
# make a uset table for the cord cards ...
# but have to do it in order:
ref_ids = 0
n = np.size(cords, 0)
selected = np.zeros(n, dtype=np.int64)
loop = 1
while np.any(selected == 0):
#print('coords = \n%s' % str(cords[:, 2]))
#print('coords = \n%s' % str(cords.shape))
pv = locate.findvals(cords[:, 2], ref_ids)
if pv.size == 0:
print('Need these coordinate cards:', ref_ids)
raise RuntimeError('Could not resolve coordinate '
'systems. See message above for '
'missing ids.')
selected[pv] = loop
loop += 1
ref_ids = cords[pv, 0]
J = np.argsort(selected)
for j in range(n):
cs = np.reshape(cords[J[j], :], (4, 3)) # , order='C')
add_grid_to_uset(None, j+1, 'b', 0, [0, 0, 0], cs, coordref)
return coordref
def get_coords(uset, gid, csys, coordref=None):
r"""
Get coordinates of a grid or location in a specified coordinate
system.
Parameters
----------
uset : ndarray
A 6-column matrix as output by :func:`op2.rdn2cop2`.
gid : integer or 3 element vector
If integer, it is a grid id in `uset`. Otherwise, it is a 3
element vector: [x, y, z] specifiy location in basic.
csys : integer or 4x3 matrix
Specifies coordinate system to get coordinates of `gid` in.
If integer, it is the id of the coordinate system which must
be defined in either `uset` or `coordref` (unless it is 0).
If a 4x3 matrix, it completely defines the coordinate system::
[ cid type reference_id ]
[ Ax Ay Az ]
[ Bx By Bz ]
[ Cx Cy Cz ]
See help on :func:`addgrid` for more information on the 4x3.
coordref : dictionary or None
If None, this input is ignored. Otherwise, it is a read/write
dictionary with the keys being the coordinate system id and
the values being the 5x3 matrix returned below. For speed
reasons, this routine will look in `coordref` before `uset`
for a coordinate system. Can be empty.
Returns
-------
coords : 3-element ndarray of location in `csys`:
- Rectangular: [x, y, z]
- Cylindrical: [R, theta, z] (theta is in deg)
- Spherical: [R, theta, phi] (theta and phi are in deg)
Coordinate conversions from global to basic are (where
[xo; yo; zo] is the coordinate system location in basic and T is
the coordinate transform to basic):
Rectangular (type = 1)::
[xb; yb; zb] = T*[x; y; z] + [xo; yo; zo]
.. math::
\left\{
\begin{array}{c} x_b \\ y_b \\ z_b \end{array}
\right\}
= \textbf{T}
\left\{
\begin{array}{c} x \\ y \\ z \end{array}
\right\}
+
\left\{
\begin{array}{c} x_o \\ y_o \\ z_o \end{array}
\right\}
Cylindrical (type = 2)::
# c = cos(theta); s = sin(theta)
[xb; yb; zb] = T*[R c; R s; z] + [xo; yo; zo]
.. math::
\left\{
\begin{array}{c} x_b \\ y_b \\ z_b \end{array}
\right\}
= \textbf{T}
\left\{
\begin{array}{c} R \cos \theta \\ R \sin \theta \\ z
\end{array}
\right\}
+
\left\{
\begin{array}{c} x_o \\ y_o \\ z_o \end{array}
\right\}
Spherical (type = 3)::
# s1 = sin(theta); s2 = sin(phi)
[xb; yb; zb] = T*[R s1 c2; R s1 s2; R c1] + [xo; yo; zo]
.. math::
\left\{
\begin{array}{c} x_b \\ y_b \\ z_b \end{array}
\right\}
= \textbf{T}
\left\{
\begin{array}{c}
R \sin \theta \cos \phi \\
R \sin \theta \sin \phi \\
R \cos \theta
\end{array}
\right\}
+
\left\{
\begin{array}{c} x_o \\ y_o \\ z_o \end{array}
\right\}
This routine does the inverse of those equations, as follows:
Rectangular (type = 1)::
[x; y; z] = T'*([xb; yb; zb] - [xo; yo; zo])
.. math::
\left\{
\begin{array}{c} x \\ y \\ z \end{array}
\right\}
= \textbf{T}^{\rm T}
\left\{
\begin{array}{c} x_b - x_o \\ y_b - y_o \\ z_b - z_o
\end{array}
\right\}
Cylindrical (type = 2)::
[x; y; z] = T'*([xb; yb; zb] - [xo; yo; zo])
R = rss(x, y)
theta = atan2(y, x)
.. math::
\left\{
\begin{array}{c} x \\ y \\ z \end{array}
\right\}
= \textbf{T}^{\rm T}
\left\{
\begin{array}{c} x_b - x_o \\ y_b - y_o \\ z_b - z_o
\end{array}
\right\}
R = \sqrt{x^2 + y^2}
\theta = \mathrm{atan2}(y, x)
Spherical (type = 3)::
[x; y; z] = T'*([xb; yb; zb] - [xo; yo; zo])
R = rss(x, y, z)
phi = atan2(y, x)
if abs(sin(phi)) > abs(cos(phi)):
theta = atan2(y/sin(phi), z)
else:
theta = atan2(x/cos(phi), z)
.. math::
\left\{
\begin{array}{c} x \\ y \\ z \end{array}
\right\}
= \textbf{T}^{\rm T}
\left\{
\begin{array}{c} x_b - x_o \\ y_b - y_o \\ z_b - z_o
\end{array}
\right\}
R = \sqrt{x^2 + y^2 + z^2}
\phi = \mathrm{atan2}(y, x)
\theta =
\begin{cases}
\mathrm{atan2}(y/(\sin \phi), z),
&\text{if }
\left|{\sin \phi}\right| > \left|{\cos \phi}\right| \\
\mathrm{atan2}(x/(\cos \phi), z),
&\text{otherwise}
\end{cases}
See Also
--------
:func:`op2.rdn2cop2`, :func:`nastran.bulk2uset`,
:func:`coordcardinfo`, :func:`nastran.wtcoordcards`,
:func:`rbgeom_uset`.
Examples
--------
>>> import n2y
>>> import numpy as np
>>> # node 100 in basic is @ [5, 10, 15]
>>> # node 200 in cylindrical coordinate system is @
>>> # [r, th, z] = [32, 90, 10]
>>> cylcoord = np.array([[1, 2, 0], [0, 0, 0], [1, 0, 0],
... [0, 1, 0]])
>>> sphcoord = np.array([[2, 3, 0], [0, 0, 0], [0, 1, 0],
... [0, 0, 1]])
>>> uset = None
>>> uset = n2y.addgrid(uset, 100, 'b', 0, [5, 10, 15], 0)
>>> uset = n2y.addgrid(uset, 200, 'b', cylcoord,
... [32, 90, 10], cylcoord)
>>> uset = n2y.addgrid(uset, 300, 'b', sphcoord,
... [50, 90, 90], sphcoord)
>>> np.set_printoptions(precision=2, suppress=True)
>>> # get coordinates of node 200 in basic:
>>> n2y.getcoords(uset, 200, 0)
array([ 10., 0., 32.])
>>> # get coordinates of node 200 in cylindrical (cid 1):
>>> n2y.getcoords(uset, 200, 1)
array([ 32., 90., 10.])
>>> # get coordinates of node 200 in spherical (cid 2):
>>> r = np.hypot(10., 32.)
>>> th = 90.
>>> phi = math.atan2(10., 32.)*180/math.pi
>>> n2y.getcoords(uset, 200, 2) - np.array([r, th, phi])
array([ 0., 0., 0.])
"""
if np.size(gid) == 1:
pv = make_dof_partition_vector(uset, 'p', gid)[0][0]
xyz_basic = uset[pv, 3:]
else:
xyz_basic = np.asarray(gid).flatten()
if np.size(csys) == 1 and csys == 0:
return xyz_basic
# get input "coordinfo" [ cid type 0; location(1x3); T(3x3) ]:
if coordref is None:
coordref = {}
coordinfo = get_coord_info(csys, uset, coordref)
xyz_coord = coordinfo[1]
T = coordinfo[2:] # transform to basic for coordinate system
g = np.dot(T.T, xyz_basic-xyz_coord)
ctype = coordinfo[0, 1].astype(np.int64)
if ctype == 1:
return g
if ctype == 2:
R = math.hypot(g[0], g[1])
theta = math.atan2(g[1], g[0])
return np.array([R, theta*180/math.pi, g[2]])
R = linalg.norm(g)
phi = math.atan2(g[1], g[0])
s = math.sin(phi)
c = math.cos(phi)
if abs(s) > abs(c):
theta = math.atan2(g[1]/s, g[2])
else:
theta = math.atan2(g[0]/c, g[2])
return np.array([R, theta*180/math.pi, phi*180/math.pi])
def get_loc_a_basic(coordinfo, a):
"""
Function for getting location of point "a" in basic; called by
:func:`addgrid`.
`coordinfo` is 5x3 and `a` is [x, y, z]
"""
# tranformation from global to basic:
Tg = coordinfo[2:]
coordloc = coordinfo[1]
if coordinfo[0, 1] == 1:
location = coordloc + np.dot(Tg, a)
else:
a2r = math.pi/180.
if coordinfo[0, 1] == 2: # cylindrical
vec = np.array([a[0]*math.cos(a[1]*a2r),
a[0]*math.sin(a[1]*a2r),
a[2]])
else: # spherical
s = math.sin(a[1]*a2r)
vec = a[0]*np.array([s*math.cos(a[2]*a2r),
s*math.sin(a[2]*a2r),
math.cos(a[1]*a2r)])
location = coordloc + np.dot(Tg, vec)
return location
def add_grid_to_uset(uset, gid, nasset, coordin, xyz, coordout, coordref=None):
"""
Add a grid to a USET table.
Parameters
----------
uset : ndarray or None
A 6-column matrix as output by :func:`op2.rdn2cop2`; can be
None.
gid : integer
Grid id, must be unique.
nasset : string
The set to put the grid in (eg "m"); must be one of these
letters: m, s, o, q, r, c, b, e. Can also be a 6-character
string of set letters, one for each dof.
coordin : integer or 4x3 matrix
If integer, it is the id of the input coordinate system which
is defined in uset (or coordref). If a 4x3 matrix, it
defines the input coordinate system (see below). Note the id
0 is the basic coordinate system and is always available.
xyz : three element vector
Defines grid location in `coordin` coordinates::
rectangular: [X, Y, Z]
cylindrical: [R, Theta, Z]
spherical: [R, Theta, Phi]
- angles are specified in degrees
coordout: integer or 4x3 matrix
Same format as `coordin`. Defines the output coordinate
system of the grid (see description below for more
information).
coordref : dictionary or None
If None, this input is ignored. Otherwise, it is a read/write
dictionary (which can be empty) with the keys being the
coordinate system id and the values being the 5x3 matrix::
[cid type 0] # output coord. sys. id and type
[xo yo zo] # origin of coord. system
[ T ] # 3x3 transformation to basic
Note that T is for the coordinate system, not a grid
(unless type = 0 which means rectangular)
For example, to create a `coordref` with coordinate system
104, you can do this::
coordref = {}
addgrid(None, 1, "b", 0, [0, 0, 0], crd104, coordref)
Returns
-------
Returns updated version of `uset` and also updates the `coordref`
dictionary (if it is a dictionary).
To define a coordinate system, coordin or coordout must be 4x3
size matrices containing the same information that would be on a
CORD2R, CORD2C, or CORD2S entry::
[ cid type reference_id ]
[ Ax Ay Az ]
[ Bx By Bz ]
[ Cx Cy Cz ]
where 'cid' is the id of the new coordinate system (must be
unique), 'type' is defined as::
1 - rectangular
2 - cylindrical
3 - spherical
and the locations of A, B, and C are given in the coordinate
system indicated by 'reference_id'.
Notes
-----
In the demo below, the uset matrix is expanded each call.
For a large number of nodes, it is more efficient to allocate the
matrix first and then fill in every 6 rows. For example, if there
are n nodes::
uset = np.zeros((n*6, 6))
coordref = {}
for i in range(n):
j = i*6
uset[j:j+6] = n2y.addgrid(None, ids[i], 'b', cdin[i],
xyz[i], cdout[i], coordref)
See Also
--------
nastran.bulk2uset, n2y.rbgeom_uset, n2y.formrbe3,
op2.rdnas2cam, op2.rdn2cop2, n2y.usetprt.
Raises
------
ValueError
If the grid id `gid` is already in `uset` or if a referenced
coordinate system is not found in `uset` or `coordref`.
Examples
--------
>>> import n2y
>>> # node 100 in basic is @ [5, 10, 15]
>>> # node 200 in cylindrical coordinate system is @
>>> # [r, th, z] = [32, 90, 10]
>>> cylcoord = np.array([[1, 2, 0], [0, 0, 0], [1, 0, 0],
... [0, 1, 0]])
>>> uset = None
>>> uset = n2y.add_grid(uset, 100, 'b', 0, [5, 10, 15], 0)
>>> uset = n2y.add_grid(uset, 200, 'b', cylcoord,
... [32, 90, 10], cylcoord)
>>> uset.astype(np.int64)
array([[ 100, 1, 2097154, 5, 10, 15],
[ 100, 2, 2097154, 0, 1, 0],
[ 100, 3, 2097154, 0, 0, 0],
[ 100, 4, 2097154, 1, 0, 0],
[ 100, 5, 2097154, 0, 1, 0],
[ 100, 6, 2097154, 0, 0, 1],
[ 200, 1, 2097154, 10, 0, 32],
[ 200, 2, 2097154, 1, 2, 0],
[ 200, 3, 2097154, 0, 0, 0],
[ 200, 4, 2097154, 0, 0, 1],
[ 200, 5, 2097154, 1, 0, 0],
[ 200, 6, 2097154, 0, 1, 0]])
"""
if uset is not None and np.any(uset[:, 0] == gid):
raise ValueError("grid {} already in `uset` table.".
format(gid))
# get input "coordinfo" [ cid type 0; location(1x3); T(3x3) ]:
if coordref is None:
coordref = {0: np.vstack((np.array([[0, 1, 0], [0., 0., 0.]]),
np.eye(3)))}
else:
try:
coordref[0]
except KeyError:
coordref[0] = np.vstack((np.array([[0, 1, 0],
[0., 0., 0.]]),
np.eye(3)))
coordinfo = get_coord_info(coordin, uset, coordref)
# prepare set(s):
if len(nasset) == 6:
sets = np.array([[mkusetmask(nasset[0])],
[mkusetmask(nasset[1])],
[mkusetmask(nasset[2])],
[mkusetmask(nasset[3])],
[mkusetmask(nasset[4])],
[mkusetmask(nasset[5])]], dtype=np.int64)
else:
sets = mkusetmask(nasset) * np.ones((6, 1), dtype=np.int64)
# get location of point in basic:
xyz = get_loc_a_basic(coordinfo, xyz)
# get output "coordinfo" [ id type 0; location(1x3); T(3x3) ]:
if np.any(coordout != coordin):
coordinfo = get_coord_info(coordout, uset, coordref)
# prepare uset entry for grid:
last3 = np.vstack((xyz[None], coordinfo))
usetid = np.hstack((gid*np.ones((6, 1)),
np.arange(1, 7).reshape(-1, 1),
sets, last3))
if uset is not None:
# insert uset entry in numeric order
j = np.searchsorted(uset[:, 0], gid)
if j == uset.shape[0]:
usetid = np.vstack((uset, usetid))
else:
usetid = np.vstack((uset[:j], usetid, uset[j:]))
return usetid
def _solve(a, b):
"""This is :func:`scipy.linalg.solve` but with a matrix condition
check on `a`. Call by :func:`formrbe3`."""
c = np.linalg.cond(a)
if c > 1. / np.finfo(float).eps:
warnings.warn('matrix is poorly conditioned (cond={0:.3e}). '
'Solution will likely be inaccurate.'.format(c),
RuntimeWarning)
return linalg.solve(a, b)
def form_rbe3(uset, GRID_dep, DOF_dep, Ind_List, UM_List=None):
"""
Form a least squares interpolation matrix, like RBE3 in Nastran.
Parameters
----------
uset : ndarray
A 6-column matrix as output by :func:`op2.rdn2cop2`.
GRID_dep : integer
Id of dependent grid.
DOF_dep : integer
Contains all or a subset of the digits 123456 giving the
dependent component DOF.
Ind_List : list
[ DOF_Ind1, GRIDS_Ind1, DOF_Ind2, GRIDS_Ind2, ... ], where::
DOF_Ind1 : 1 or 2 element vector containing the
component DOF (ie, 123456) of the nodes in
GRIDS_Ind1 and, optionally, the weighting
factor for these DOF. If not input, the
weighting factor defaults to 1.0.
GRIDS_Ind1 : list of node ids corresponding to DOF_Ind1
...
eg: [ [123, 1.2], [95, 195, 1000], 123456, 95]
UM_List : None or list
[ GRID_MSET1, DOF_MSET1, GRID_MSET2, DOF_MSET2, ... ] where::
GRID_MSET1 : first grid in the M-set
DOF_MSET1 : DOF of first grid in M-set (integer subset
of 123456). No weighting factors are
allowed here.
GRID_MSET2 : second grid in the M-set
DOF_MSET2 : DOF of second grid in M-set
...
The `UM_List` option changes what is dependent and what is
independent. The M-set DOF will become the dependent DOF
instead of `GRID_dep`, `DOF_dep` (though it can include these
DOF). The total number of M-set DOF must equal the original
amount defined in `GRID_dep`, `DOF_dep` (max of 6). All M-set
DOF must be within the the set of previously entered DOF
(either dependent or independent).
Returns
-------
rbe3 : ndarray
The interpolation matrix. Size is # dependent DOF rows by #
independent DOF columns. The order of rows and columns
corresponds to the order the DOF occur in the USET table
`uset`.
Notes
-----
The approach used is:
- Use :func:`rbgeom_uset` to form the rigid-body modes based on
geometry and relative to `GRID_dep`.
- Partition the rows of the rigid-body modes down to the
independent DOF.
- Use least squares approach to 'invert' the rigid-body modes.
- Transform the result to global coordinates.
- Partition the rows to the desired dependent DOF.
If the UM_List option is used, these additional steps are done:
- Partition current rbe3 matrix into four parts to separate:
- dependent DOF into M-set and non-M-set
- independent DOF into M-set and non-M-set
- Solve for both parts of the M-set and merge.
- Reorder if necessary.
Simplifications are made if the M-set is completely contained
within either the dependent or independent set.
When the `UM_List` option is used, unless the M-set is equal to
the dependent set (which would be the same as not including the
`UM_List` input), there will be a matrix inversion. This matrix
must be non-singular. If it is close to singular (or singular),
this routine will print a warning message and, if singular,
scipy.linalg will raise the LinAlgError exception. In this case,
choose a different, non-singular set for the M-set. This is
similar to choosing DOF for the SUPORT card in Nastran.
Raises
------
ValueError
When the `UM_List` input is used but the size does not match
the `GRID_dep` and `DOF_dep` size.
See also addgrid, rbgeom_uset.
Examples
--------
>>> import n2y
>>> # First, make a uset table using all basic coords to simplify
>>> # visual inspection:
>>> # node 100 in basic is @ [ 1, 0, 0]
>>> # node 200 in basic is @ [ 0, 1, 0]
>>> # node 300 in basic is @ [-1, 0, 0]
>>> # node 400 in basic is @ [ 0, -1, 0]
>>> # node 500 in basic is @ [ 0, 0, 0]
>>> uset = None
>>> uset = n2y.addgrid(uset, 100, 'b', 0, [1, 0, 0], 0)
>>> uset = n2y.addgrid(uset, 200, 'b', 0, [0, 1, 0], 0)
>>> uset = n2y.addgrid(uset, 300, 'b', 0, [-1, 0, 0], 0)
>>> uset = n2y.addgrid(uset, 400, 'b', 0, [0, -1, 0], 0)
>>> uset = n2y.addgrid(uset, 500, 'b', 0, [0, 0, 0], 0)
>>> #
>>> # Define the motion of grid 500 to be average of translational
>>> # motion of grids: 100, 200, 300, and 400.
>>> rbe3 = n2y.formrbe3(uset, 500, 123456, [123, [100, 200, 300, 400]])
>>> print(rbe3+0)
[[ 0.25 0. 0. 0.25 0. 0. 0.25 0. 0. 0.25 0. 0. ]
[ 0. 0.25 0. 0. 0.25 0. 0. 0.25 0. 0. 0.25 0. ]
[ 0. 0. 0.25 0. 0. 0.25 0. 0. 0.25 0. 0. 0.25]
[ 0. 0. 0. 0. 0. 0.5 0. 0. 0. 0. 0. -0.5 ]
[ 0. 0. -0.5 0. 0. 0. 0. 0. 0.5 0. 0. 0. ]
[ 0. 0.25 0. -0.25 0. 0. 0. -0.25 0. 0.25 0. 0. ]]
>>> #
>>> # Example showing UM_List option:
>>> rbe3um = n2y.formrbe3(uset, 500, 123456, [123, [100, 200, 300, 400]],
... [100, 12, 200, 3, 300, 23, 400, 3])
>>> print(rbe3um+0)
[[ 0. -1. 0. -1. -1. 0. 4. 0. 0. 0. 0. 0. ]
[ 0. 0.5 -0.5 0. -0.5 -0.5 0. 2. 0. 0. 0. 2. ]
[-1. 0. 0. 0. 0. 0. 0. 0. 2. 1. -1. 0. ]
[ 0. -0.5 -0.5 0. 0.5 -0.5 0. 2. 0. 0. 0. -2. ]
[ 1. 0. 0. 0. 0. 0. 0. 0. 0. 0. 2. 0. ]
[-1. 0. 0. 0. 0. 0. 0. 0. 2. -1. -1. 0. ]]
>>> #
>>> # Example showing UM_List option including some dependent dof:
>>> rbe3um2 = n2y.formrbe3(uset, 500, 123456, [123, [100, 200, 300, 400]],
... [100, 12, 200, 3, 300, 3, 500, 23])
>>> print(rbe3um2+0)
[[ 0. -1. 0. -1. 0. -1. 0. 0. 4. 0. 0. 0. ]
[ 0. 1. 0. 0. 1. -1. 0. 0. 0. 0. 0. 4. ]
[ 0. 0. 0. 0. 0. 0. 0. 1. 0. 2. 0. 0. ]
[ 1. 0. 0. 0. 0. 0. 0. 0. 0. 0. 2. 0. ]
[ 0. 0.25 0.25 0. 0.5 -0.25 0.25 0. 0. 0. 0. 1. ]
[ 0.5 0. 0. 0. 0. 0. 0. 0.5 0. 0.5 0.5 0. ]]
"""
# form dependent DOF table:
ddof = expand_trim(DOF_dep)
ddof = np.hstack((GRID_dep+0*ddof, ddof))
# form independent DOF table:
idof = np.array([], dtype=np.int64).reshape(0, 2)
wtdof = np.array([])
usetdof = uset[:, :2].astype(np.int64)
for j in range(0, len(Ind_List), 2):
DOF_ind = np.atleast_1d(Ind_List[j])
GRIDS_ind = np.atleast_1d(Ind_List[j+1])
if len(DOF_ind) == 2:
wtcur = DOF_ind[1]
DOF_ind = DOF_ind[0]
else:
wtcur = 1.
idofcur = expand_trim(DOF_ind)
lend = len(idofcur)
leng = len(GRIDS_ind)
grids = np.dot(np.ones((lend, 1), dtype=np.int64),
GRIDS_ind.reshape(1, -1)).T
dof = np.dot(idofcur, np.ones((1, leng), dtype=np.int64)).T
wtdof = np.hstack((wtdof, wtcur*np.ones((lend*leng),
dtype=np.int64)))
idof = np.vstack((idof, np.hstack((grids.reshape(-1, 1),
dof.reshape(-1, 1)))))
# Sort idof according to uset:
pv = locate.get_intersection(idof, usetdof, 2)[0]
idof = idof[pv]
wtdof = wtdof[pv]
if UM_List is not None:
mdof = np.array([], dtype=np.int64).reshape(0, 2)
for j in range(0, len(UM_List), 2):
GRID_MSET = np.atleast_1d(UM_List[j])
DOF_MSET = np.atleast_1d(UM_List[j+1])
mdofcur = expand_trim(DOF_MSET)
grids = np.dot(np.ones((len(mdofcur), 1), dtype=np.int64),
GRID_MSET.reshape(1, -1))
mdof = np.vstack((mdof, np.hstack((grids, mdofcur))))
if np.size(mdof, 0) != np.size(ddof, 0):
raise ValueError("incorrect size of M-set DOF ({}): "
"must equal size of Dep DOF ({})."
"".format(np.size(mdof, 0),
np.size(ddof, 0)))
# The rest of the code uses 'mdof' to sort rows of the output
# matrix. We could leave it as input, or sort it according to
# the uset table. For now, sort it according to uset:
pv = locate.get_intersection(mdof, usetdof, 2)[0]
mdof = mdof[pv]
# partition uset table down to needed dof only:
npids = np.vstack((ddof[0, :1], idof[:, :1]))
ids = sorted(list(set(npids[:, 0])))
alldof = make_dof_partition_vector(uset, "p", ids)[0]
alldof = locate.find2zo(alldof, np.size(uset, 0))
uset = uset[alldof]
# form partition vectors:
ipv = make_dof_partition_vector(uset, "p", idof)[0]
# need transformation from basic to global for dependent grid
pv = make_dof_partition_vector(uset, "p", GRID_dep)[0]
# need to scale rotation weights by characteristic length:
rot = idof[:, 1] > 3
if np.any(rot):
# get characterstic length of rbe3:
deploc = uset[pv[0], 3:]
n = np.size(uset, 0) // 6
delta = uset[::6, 3:] - np.dot(np.ones((n, 1)),
np.reshape(deploc, (1, 3)))
Lc = np.sum(np.sqrt(np.sum(delta * delta, axis=1))) / (n-1)
if Lc > 1.e-12:
wtdof[rot] = wtdof[rot]*(Lc*Lc)
# form rigid-body modes relative to GRID_dep
rbb = rigid_body_geom_uset(uset, GRID_dep)
T = rbb[pv]
rb = rbb[ipv]
rbw = rb.T * wtdof
rbwrb = rbw @ rb
rbe3 = _solve(rbwrb, rbw)
rbe3 = np.dot(T, rbe3)[ddof[:, 1]-1]
if UM_List is None:
return rbe3
# find m-set dof that belong to current dependent set:
dpv_m = locate.get_intersection(ddof, mdof, 2)[0]
# this works when the m-set is a subset of the independent set:
if not np.any(dpv_m):
mpv = make_dof_partition_vector(uset[ipv], "p", mdof)[0]
rbe3_um = rbe3[:, mpv]
notmpv = locate.flippv(mpv, len(ipv))
rhs = np.hstack((np.eye(len(mpv)), -rbe3[:, notmpv]))
rbe3 = _solve(rbe3_um, rhs)
# rearrange columns to uset order:
curdof = np.vstack((ddof, idof[notmpv]))
pv = locate.get_intersection(curdof, usetdof, 2)[0]
return rbe3[:, pv]
# some dependent retained, so find m-set dof that belong to current
# independent set:
ipv_m = locate.get_intersection(idof, mdof, 2)[0]
if not np.any(ipv_m):
# already done, except reordering:
rbe3 = rbe3[dpv_m]
# rearrange columns to uset order:
pv = locate.get_intersection(idof, usetdof, 2)[0]
return rbe3[:, pv]
# To include UM option:
# Note: if the M-set is entirely within either the dependent
# set or the independent set, simplifications are made (see
# above). The steps here assume the M-set is composed of DOF
# from both the dependent and independent sets.
#
# 1. partition rbe3 to four parts, A, B, C & D:
# - before rearranging to have m-set on left:
#
# Dm | A, B | Im
# Dn = | C, D | In
#
# 2. solve for Im and Dm in terms of Dn and In:
#
# Dm | A inv(C), -A inv(C) D + B | Dn
# Im = | inv(C), -inv(C) D | In
#
# Matrix C must be square and non-singular. The resulting
# matrix is reordered as described in the help section.
# partition rbe3 -- taking care NOT to rearrange columns
nd, ni = np.shape(rbe3)
dpv_mzo = locate.find2zo(dpv_m, nd)
notdpv_m = np.logical_not(dpv_mzo)
ipv_mzo = locate.find2zo(ipv_m, ni)
notipv_m = np.logical_not(ipv_mzo)
A = rbe3[np.ix_(dpv_mzo, ipv_mzo)]
B = rbe3[np.ix_(dpv_mzo, notipv_m)]
C = rbe3[np.ix_(notdpv_m, ipv_mzo)] # must be square
D = rbe3[np.ix_(notdpv_m, notipv_m)]
n = np.size(C, 0)
# m-set rows from independent part
E = _solve(C, np.hstack((np.eye(n), -D)))
r = np.size(A, 0)
c = np.size(C, 1)
# m-set rows from dependent part
F = np.dot(A, E) + np.hstack((np.zeros((r, c)), B))
rbe3a = np.vstack((F, E))
didof = np.vstack((ddof[dpv_mzo], idof[ipv_mzo]))
pv = locate.get_intersection(didof, mdof, 2)[0]
rbe3 = rbe3a[pv]
# rearrange columns to uset order:
curdof = np.vstack((ddof[notdpv_m], idof[notipv_m]))
pv = locate.get_intersection(curdof, usetdof, 2)[0]
return rbe3[:, pv]
def _findse(nas, se):
"""
Find row in nas['selist'] the superelement `se`.
Parameters
----------
nas : dictionary
This is the nas2cam dictionary: ``nas = op2.rdnas2cam()``
se : integer
The id of the superelement.
Returns
-------
r : index
Rows index to where `se` is.
"""
r = np.nonzero(nas['selist'][:, 0] == se)[0]
if r.size == 0:
print('selist = ', nas['selist'])
raise ValueError("superelement {} not found!"
" See selist echo above".format(se))
return r[0]
def upstream_aset_partition_vector(nas, seup):
"""
Form upstream A-set partition vector for a downstream
superelement.
Parameters
----------
nas : dictionary
This is the nas2cam dictionary: ``nas = op2.rdnas2cam()``
seup : integer
The id of the upstream superelement.
Returns
-------
pv : array
An index partition vector for partitioning the upstream A-set
degrees of freedom of superelement SEUP from the P-set of the
downstream superelement. This partition vector is not a 0-1
type because the A-set DOF order may be different downstream
than from upstream (from reordering done on a CSUPER entry).
See also :func:`mksetpv`, :func:`rdnas2cam`, :func:`formulvs`.
Example usage::
# External superelement 100 is upstream of the residual. On
# the CSUPER entry, the A-set of 100 were assigned new ids and
# the order was changed. Form the ULVS matrix:
import n2y
import op2
nas = op2.rdnas2cam('nas2cam')
pv = n2y.upasetpv(nas, 100)
ulvs100 = nas['phg'][0][pv] # this will reorder as needed
"""
r = _findse(nas, seup)
sedn = nas['selist'][r, 1]
usetdn = nas['uset'][sedn]
dnids = nas['dnids'][seup]
maps = nas['maps'][seup]
# number of rows in pv should equal size of upstream a-set
pv = locate.findvals(usetdn[:, 0], dnids)
if len(pv) < len(dnids):
# must be an external se, but non-csuper type (the extseout,
# seconct, etc, type)
upids = nas['upids'][sedn]
pv = locate.findvals(upids, dnids)
ids = usetdn[usetdn[:, 1] <= 1, 0]
# number of rows should equal size of upstream a-set
pv = locate.findvals(usetdn[:, 0], ids[pv])
if len(pv) < len(dnids):
raise ValueError('not all upstream DOF could'
' be found in downstream')
if len(maps) > 0:
if not np.all(maps[:, 1] == 1):
raise ValueError('column 2 of MAPS for {} is not all 1.'
' Stopping.'.format(seup))
# definition of maps: dn = up(maps) ... want up = dn(maps2)
# example:
# maps = [ 2, 0, 1 ]
# maps2 = [ pos_of_1st pos_of_2nd pos_of_3rd ] = [ 1, 2, 0 ]
maps2 = np.argsort(maps[:, 0])
pv = pv[maps2]
return pv
def _proc_mset(nas, se, dof):
"""
Private utility routine to get m-set information for
:func:`formtran`.
Returns: (hasm, m, pvdofm, gm)
"""
# see if any of the DOF are in the m-set
hasm = 0
uset = nas['uset'][se]
m = np.nonzero(mksetpv(uset, "g", "m"))[0]
pvdofm = gm = None
if m.size > 0:
pvdofm = locate.get_intersection(uset[m, :2].astype(np.int64),
dof)[0]
if pvdofm.size > 0:
hasm = 1
m = m[pvdofm]
# need gm
gm = nas['gm'][se]
gm = gm[pvdofm]
return hasm, m, pvdofm, gm
def _formtran_0(nas, dof, gset):
"""
Utility routine called by :func:`formtran` when se == 0. See that
routine for more information.
"""
uset = nas['uset'][0]
pvdof, dof = make_dof_partition_vector(uset, "g", dof)
if gset:
ngset = sum(mksetpv(uset, 'p', 'g'))
tran = np.zeros((len(pvdof), ngset))
tran[:, pvdof] = np.eye(len(pvdof))
return tran, dof
if 'phg' in nas and 0 in nas['phg']:
return nas['phg'][0][pvdof], dof
if 'pha' not in nas or 0 not in nas['pha']:
raise RuntimeError("neither nas['phg'][0] nor "
"nas['pha'][0] are available.")
o = np.nonzero(mksetpv(uset, "g", "o"))[0]
if o.size > 0:
v = locate.get_intersection(uset[o, :2].astype(np.int64),
dof)[0]
if v.size > 0:
raise RuntimeError("some of the DOF of SE 0 go to the"
" O-set. Routine not set up for"
" this.")
a = np.nonzero(mksetpv(uset, "g", "a"))[0]
pvdofa = locate.get_intersection(uset[a, :2].astype(np.int64),
dof)[0]
if pvdofa.size > 0:
a = a[pvdofa]
sets = a
else:
a = []
sets = np.zeros(0, np.int64)
hasm, m, pvdofm, gm = _proc_mset(nas, 0, dof)
if hasm:
o_n = mksetpv(uset, "n", "o")
if np.any(o_n):
if np.any(gm[:, o_n]):
raise RuntimeError('M-set for residual is dependent'
' on O-set (through GM). '
'Routine not set up for this.')
sets = np.hstack((sets, m))
# see if any of the DOF are in the s-set
hass = 0
s = np.nonzero(mksetpv(uset, "g", "s"))[0]
if s.size > 0:
pvdofs = locate.get_intersection(uset[s, :2].astype(np.int64),
dof)[0]
if pvdofs.size > 0:
hass = 1
s = s[pvdofs]
sets = np.hstack((sets, s))
fulldof = uset[sets, :2].astype(np.int64)
pv, pv2 = locate.get_intersection(fulldof, dof, 2)
if len(pv2) != len(pvdof):
notpv2 = locate.flippv(pv2, len(pvdof))
print('missing_dof = ', dof[notpv2])
raise RuntimeError("bug in _formtran_0() since dof in "
"recovery set does not contain all of the "
"dof in DOF. See missing_dof echo above.")
# sets = [ a, m, s ]
cols = nas['pha'][0].shape[1]
tran = np.zeros((len(pv), cols))
R = len(a)
tran[:R] = nas['pha'][0][pvdofa]
if hasm:
a_n = np.nonzero(mksetpv(uset, "n", "a"))[0]
tran[R:R+len(m)] = np.dot(gm[:, a_n], nas['pha'][0])
R += len(m)
if hass:
cu = tran.shape[1]
tran[R:R+len(s)] = np.zeros((len(s), cu))
# order DOF as requested:
tran = tran[pv]
return tran, dof
def formtran(nas, se, dof, gset=False):
"""
Make a transformation matrix from A-set DOF to specified DOF within
the same SE.
Parameters
----------
nas : dictionary
This is the nas2cam dictionary: ``nas = op2.rdnas2cam()``
se : integer
The id of the superelement.
dof : 1d or 2d array
One or two column matrix: [ids] or [ids, dofs]; if one column,
the second column is internally set to 123456 for each id
gset : bool; optional
If true, and `sedn` == 0, transform from g-set instead of
modal DOF. See below.
Returns
-------
tuple: (Tran, outdof)
Tran : ndarray
Transformation from the A-set DOF of superelement `se` to
the specified DOF (`dof`) of the same superelement. The
transformation is as follows::
if sedn > 0:
{DOF} = Tran * {T & Q}
if sedn == 0:
{DOF} = Tran * {modal} (for gset == False)
{DOF} = Tran * {G-Set} (for gset == True)
outdof : ndarray
The expanded version of the `dof` input as returned by
:func:`mkdofpv`::
[id1, dof1; id1, dof2; ... id2, dof1; ...]
This routine is the workhorse of other routines such as
:func:`formdrm` and :func:`formulvs`.
See also :func:`formdrm`, :func:`formulvs`, :func:`mkdofpv`.
Example usage::
# Want data recovery matrix from t & q dof to grids 3001 and
# 3002 of se 300:
import n2y
import op2
nas = op2.rdnas2cam('nas2cam')
drm = n2y.formtran(nas, 300, [3001, 3002])
# or, equivalently:
drm = n2y.formdrm(nas, 300, 300, [3001, 3002])
"""
if se == 0:
return _formtran_0(nas, dof, gset)
uset = nas['uset'][se]
pvdof, dof = make_dof_partition_vector(uset, "g", dof)
t_a = np.nonzero(mksetpv(uset, "a", "t"))[0]
q_a = np.nonzero(mksetpv(uset, "a", "q"))[0]
# if all dof are in a-set we can quit quickly:
a = mksetpv(uset, "g", "a")
if np.all(a[pvdof]):
pvdofa = make_dof_partition_vector(uset, "a", dof)[0]
tran = np.eye(sum(a))
tran = tran[pvdofa]
return tran, dof
sets = np.zeros(0, np.int64)
t = np.nonzero(mksetpv(uset, "g", "t"))[0]
pvdoft = locate.get_intersection(uset[t, :2].astype(np.int64),
dof)[0]
hast = 0
if pvdoft.size > 0:
hast = 1
t = t[pvdoft]
sets = np.hstack((sets, t))
o = np.nonzero(mksetpv(uset, "g", "o"))[0]
pvdofo = locate.get_intersection(uset[o, :2].astype(np.int64),
dof)[0]
haso = 0
if pvdofo.size > 0:
haso = 1
o = o[pvdofo]
sets = np.hstack((sets, o))
if 'goq' in nas and se in nas['goq']:
goq = nas['goq'][se]
else:
q1 = sum(mksetpv(uset, "g", "q"))
if q1 > 0:
warnings.warn("nas['goq'][{}] not found, but q-set do"
" exist. Assuming it is all zeros. "
"This can happen when q-set DOF are "
"defined but modes are not calculated.".
format(se), RuntimeWarning)
o1 = sum(mksetpv(uset, "g", "o"))
goq = np.zeros((o1, q1))
else:
goq = np.array([[]])
if 'got' in nas and se in nas['got']:
got = nas['got'][se]
else:
warnings.warn("nas['got'][{}] not found. Assuming it is "
"all zeros. This can happen for a Benfield"
"-Hruda collector superelement since all "
"b-set (really upstream q-set) are not "
"connected to other DOF in the stiffness.".
format(se), RuntimeWarning)
o1 = sum(mksetpv(uset, "g", "o"))
t1 = sum(mksetpv(uset, "g", "t"))
got = np.zeros((o1, t1))
ct = got.shape[1]
cq = goq.shape[1]
hasm, m, pvdofm, gm = _proc_mset(nas, se, dof)
if hasm:
t_n = np.nonzero(mksetpv(uset, "n", "t"))[0]
o_n = np.nonzero(mksetpv(uset, "n", "o"))[0]
q_n = np.nonzero(mksetpv(uset, "n", "q"))[0]
sets = np.hstack((sets, m))
# see if any of the DOF are in the q-set
q = np.nonzero(mksetpv(uset, "g", "q"))[0]
hasq = 0
if q.size > 0:
pvdofq = locate.get_intersection(uset[q, :2].astype(np.int64),
dof)[0]
if pvdofq.size > 0:
hasq = 1
q = q[pvdofq]
sets = np.hstack((sets, q))
# see if any of the DOF are in the s-set
hass = 0
s = np.nonzero(mksetpv(uset, "g", "s"))[0]
if s.size > 0:
pvdofs = locate.get_intersection(uset[s, :2].astype(np.int64),
dof)[0]
if pvdofs.size > 0:
hass = 1
s = s[pvdofs]
sets = np.hstack((sets, s))
fulldof = uset[sets, :2].astype(np.int64)
pv, pv2 = locate.get_intersection(fulldof, dof, 2)
if len(pv2) != len(pvdof):
notpv2 = locate.flippv(pv2, len(pvdof))
print('missing_dof = ', dof[notpv2])
raise RuntimeError("bug in formtran() since dof in recovery"
" set does not contain all of the dof in"
" DOF. See missing_dof echo above.")
# sets = [ t, o, m, q, s ]
tran = np.zeros((len(pv), ct+cq))
R = 0
if hast:
I = np.eye(ct)
R = len(t)
tran[:R, t_a] = I[pvdoft]
if haso:
tran[R:R+len(o), t_a] = got[pvdofo]
if cq:
tran[R:R+len(o), q_a] = goq[pvdofo]
R += len(o)
if hasm:
ulvsm = np.zeros((gm.shape[0], ct+cq))
gmo = gm[:, o_n]
v = np.nonzero(np.any(gmo, 0))[0]
if v.size > 0:
gmo = gmo[:, v]
ulvsm[:, t_a] = gm[:, t_n] + gmo @ got[v]
if cq:
ulvsm[:, q_a] = gmo @ goq[v]
else:
ulvsm[:, t_a] = gm[:, t_n]
if cq:
# m-set dependent on q-set (via MPC maybe)
ulvsm[:, q_a] += gm[:, q_n]
tran[R:R+len(m)] = ulvsm
R += len(m)
if hasq:
I = np.eye(cq)
tran[R:R+len(q), q_a] = I[pvdofq]
R += len(q)
if hass:
cu = tran.shape[1]
tran[R:R+len(s)] = np.zeros((len(s), cu))
# order DOF as requested:
tran = tran[pv]
return tran, dof
def formulvs(nas, seup, sedn=0, keepcset=True, shortcut=True,
gset=False):
"""
Form ULVS for an upstream SE relative to a given downstream SE.
Parameters
----------
nas : dictionary
This is the nas2cam dictionary: ``nas = op2.rdnas2cam()``
seup : integer
The id of the upstream superelement.
sedn : integer; optional
The id of the downstream superelement.
keepcset : bool; optional
If true, keeps any C-set rows/columns in the result. This is
useful when the C-set are real (that is, NOT used for 'left-
over' DOF after defining the Q-set). Set `keepcset=False` to
delete C-set.
shortcut : bool; optional
If true, use the ULVS already in `nas` if it's there.
gset : bool; optional
If true, and `sedn` == 0, transform from g-set instead of
modal DOF. See below.
Returns
-------
ULVS : 2d numpy ndarray or 1
Transformation from either the modal or physical DOF of the
downstream superelement sedn to the T and Q-set DOF of the
upstream superelement seup. The transformation (called ULVS
here) is as follows::
if sedn > 0:
{upstream T & Q} = ULVS * {downstream T & Q}
if sedn == 0:
{upstream T & Q} = ULVS * {modal} (for gset == False)
{upstream T & Q} = ULVS * {G-Set} (for gset == True)
Returns 1 if seup == sedn.
This routine starts from seup and works down to sedn, forming the
appropriate ULVS at each level (by calling formtran()) and
multiplying them together to form the total ULVS from sedn DOF to
seup T & Q-set DOF.
See also :func:`formdrm`, :func:`formtran`.
Example usage::
# From recovery matrix from se 0 q-set to t & q set of se 500:
import n2y
import op2
nas = op2.rdnas2cam('nas2cam')
ulvs = n2y.formulvs(nas, 500)
"""
# work from up to down:
r = _findse(nas, seup)
sedown = nas['selist'][r, 1]
if seup in [sedown, sedn]:
return 1.
if (shortcut and sedn == 0 and not gset and
'ulvs' in nas and seup in nas['ulvs']):
return nas['ulvs'][seup]
ulvs = 1.
while 1:
usetup = nas['uset'][seup]
usetdn = nas['uset'][sedown]
tqup = upstream_aset_partition_vector(nas, seup)
ulvs1, outdof = formtran(nas, sedown, usetdn[tqup, :2], gset)
# get rid of c-set if required
if not keepcset:
noncrows = np.logical_not(mksetpv(usetup, "a", "c"))
if sedown != 0:
nonccols = np.logical_not(mksetpv(usetdn, "a", "c"))
ulvs1 = ulvs1[np.ix_(noncrows, nonccols)]
else:
ulvs1 = ulvs1[noncrows]
ulvs = ulvs @ ulvs1
if sedown == sedn:
return ulvs
seup = sedown
r = _findse(nas, seup)
sedown = nas['selist'][r, 1]
def formdrm(nas, seup, dof, sedn=0, gset=False):
"""
Form a displacement data recovery matrix for specified dof.
Parameters
----------
nas : dictionary
This is the nas2cam dictionary: ``nas = op2.rdnas2cam()``
seup : integer
The id of the upstream superelement.
dof : 1d or 2d array
One or two column matrix: [ids] or [ids, dofs]; if one column,
the second column is internally set to 123456 for each id
sedn : integer
The id of the downstream superelement.
gset : bool; optional
If true, and `sedn` == 0, transform from g-set instead of
modal DOF. See below.
Returns
-------
tuple: (DRM, outdof)
DRM : ndarray
Transformation from downstream DOF to the specified
upstream DOF (`dof`). The transformation is as follows::
if sedn > 0:
{upstream DOF} = DRM * {downstream T & Q}
if sedn == 0:
{upstream DOF} = DRM * {modal} (for gset == False)
{upstream DOF} = DRM * {G-Set} (for gset == True)
outdof : ndarray
The expanded version of the `dof` input as returned by
:func:`mkdofpv`::
[id1, dof1; id1, dof2; ... id2, dof1; ...]
This routine uses :func:`formulvs` and :func:`formtran` as
necessary.
Example usage::
# Want data recovery matrix from se 0 to grids 3001 and
# 3002 of se 300:
import n2y
import op2
nas = op2.rdnas2cam('nas2cam')
drm, dof = n2y.formdrm(nas, 300, [3001, 3002])
"""
t, outdof = formtran(nas, seup, dof, gset=gset)
u = formulvs(nas, seup, sedn, keepcset=True,
shortcut=True, gset=gset)
if np.size(u) > 1:
# check for null c-sets (extra cols in t):
c = t.shape[1]
r = u.shape[0]
if r < c and not np.any(t[:, r:]):
t = t[:, :r]
return np.dot(t, u), outdof
def AddULVS(nas, *ses):
"""
Add ULVS matrices to the nas (nas2cam) record.
Parameters
----------
nas : dictionary
This is the nas2cam dictionary: ``nas = op2.rdnas2cam()``
*ses : list
Remaining args are the superelement ids for which to compute a
ULVS via :func:`formulvs`.
"""
if 'ulvs' not in nas:
nas['ulvs'] = {}
for se in ses:
if se not in nas['ulvs']:
nas['ulvs'][se] = formulvs(nas, se)
if __name__ == '__main__': # pragma: no cover
import doctest
doctest.testmod()
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,653
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/op2/op2_interface/op2_scalar.py
|
#pylint: disable=R0913
"""
Defines the sub-OP2 class. This should never be called outisde of the OP2 class.
- OP2_Scalar(debug=False, log=None, debug_file=None)
**Methods**
- set_subcases(subcases=None)
- set_transient_times(times)
- read_op2(op2_filename=None, combine=False)
- set_additional_generalized_tables_to_read(tables)
- set_additional_result_tables_to_read(tables)
- set_additional_matrices_to_read(matrices)
**Attributes**
- total_effective_mass_matrix
- effective_mass_matrix
- rigid_body_mass_matrix
- modal_effective_mass_fraction
- modal_participation_factors
- modal_effective_mass
- modal_effective_weight
- set_as_msc()
- set_as_optistruct()
**Private Methods**
- _get_table_mapper()
- _not_available(data, ndata)
- _table_crasher(data, ndata)
- _table_passer(data, ndata)
- _validate_op2_filename(op2_filename)
- _create_binary_debug()
- _make_tables()
- _read_tables(table_name)
- _skip_table(table_name)
- _read_table_name(rewind=False, stop_on_failure=True)
- _update_generalized_tables(tables)
- _read_cmodext()
- _read_cmodext_helper(marker_orig, debug=False)
- _read_geom_table()
- _finish()
"""
import os
from struct import Struct, unpack
from collections import defaultdict
from typing import List, Tuple, Dict, Set, Union, Optional, Any
from numpy import array
import numpy as np
import cpylog
if cpylog.__version__ >= '1.5.0': # pragma: no cover
#import warnings
#warnings.warn('run "pip install cpylog>=1.5.0"')
from cpylog import get_logger2, log_exc
else: # pramga: no cover
from cpylog import get_logger2
def log_exc(*args, **kwargs):
pass
from pyNastran import is_release, __version__
from pyNastran.f06.errors import FatalError
from pyNastran.op2.errors import EmptyRecordError
from pyNastran.op2.op2_interface.op2_reader import OP2Reader, reshape_bytes_block
from pyNastran.bdf.cards.params import PARAM
#============================
from pyNastran.op2.op2_interface.msc_tables import MSC_RESULT_TABLES, MSC_MATRIX_TABLES, MSC_GEOM_TABLES
from pyNastran.op2.op2_interface.nx_tables import NX_RESULT_TABLES, NX_MATRIX_TABLES, NX_GEOM_TABLES
from pyNastran.op2.op2_interface.op2_common import OP2Common
from pyNastran.op2.fortran_format import FortranFormat
from pyNastran.utils import is_binary_file
"""
ftp://161.24.15.247/Nastran2011/seminar/SEC04-DMAP_MODULES.pdf
Datablock Type Description
EFMFSMS Matrix 6 x 1 Total Effective mass matrix
EFMASSS Matrix 6 x 6 Effective mass matrix
RBMASS Matrix 6 x 6 Rigid body mass matrix
EFMFACS Matrix 6 X N Modal effective mass fraction matrix
MPFACS Matrix 6 x N Modal participation factor matrix
MEFMASS Matrix 6 x N Modal effective mass matrix
MEFWTS Matrix 6 x N Modal effective weight matrix
RAFGEN Matrix N x M Generalized force matrix
RADEFMP Matrix N X U2 Effective inertia loads
BHH Matrix N x N Viscous damping matrix
K4HH Matrix N x N Structural damping matrix
RADAMPZ Matrix N x N equivalent viscous damping ratios
RADAMPG Matrix N X N equivalent structural damping ratio
LAMA LAMA Eigenvalue summary table
OGPWG OGPWG Mass properties output
OQMG1 OQMG Modal MPC forces
RANCONS ORGY1 Constraint mode element strain energy table
RANEATC ORGY1 Attachment mode element strain energy table
RAGCONS OGPFB Constraint mode grid point force table
RAGEATC OGPFB Attachment mode grid point force table
RAPCONS OES Constraint mode ply stress table
RAPEATC OES Attachment mode ply stress table
RASCONS OES Constraint mode element stress table
RAECONS OES Constraint mode element strain table
RASEATC OES Attachment mode element stress table
RAEEATC OES Attachment mode element strain table
OES1C OES Modal Element Stress Table
OES1X OES Modal Element Stress Table
OSTR1C OES Modal Element Strain Table
OSTR1X OSTR Modal Element Strain Table
RAQCONS OUG Constraint mode MPC force table
RADCONS OUG Constraint mode displacement table
RADEFFM OUG Effective inertia displacement table
RAQEATC OUG Attachment mode MPC force table
RADEATC OUG Attachment mode displacement table
OUGV1 OUG Eigenvector Table
RAFCONS OEF Constraint mode element force table
RAFEATC OEF Attachment mode element force table
OEF1X OEF Modal Element Force Table
OGPFB1 OGPFB Modal Grid Point Force Table
ONRGY1 ONRGY1 Modal Element Strain Energy Table
ONRGY2 ONRGY1
#--------------------
RADCONS - Displacement Constraint Mode
RADDATC - Displacement Distributed Attachment Mode
RADNATC - Displacement Nodal Attachment Mode
RADEATC - Displacement Equivalent Inertia Attachment Mode
RADEFFM - Displacement Effective Inertia Mode
RAECONS - Strain Constraint Mode
RAEDATC - Strain Distributed Attachment Mode
RAENATC - Strain Nodal Attachment Mode
RAEEATC - Strain Equivalent Inertia Attachment Mode
RAFCONS - Element Force Constraint Mode
RAFDATC - Element Force Distributed Attachment Mode
RAFNATC - Element Force Nodal Attachment Mode
RAFEATC - Element Force Equivalent Inertia Attachment Mode
RALDATC - Load Vector Used to Compute the Distributed Attachment M
RANCONS - Strain Energy Constraint Mode
RANDATC - Strain Energy Distributed Attachment Mode
RANNATC - Strain Energy Nodal Attachment Mode
RANEATC - Strain Energy Equivalent Inertia Attachment Mode
RAQCONS - Ply Strains Constraint Mode
RAQDATC - Ply Strains Distributed Attachment Mode
RAQNATC - Ply Strains Nodal Attachment Mode
RAQEATC - Ply Strains Equivalent Inertia Attachment Mode
RARCONS - Reaction Force Constraint Mode
RARDATC - Reaction Force Distributed Attachment Mode
RARNATC - Reaction Force Nodal Attachment Mode
RAREATC - Reaction Force Equivalent Inertia Attachment Mode
RASCONS - Stress Constraint Mode
RASDATC - Stress Distributed Attachment Mode
RASNATC - Stress Nodal Attachment Mode
RASEATC - Stress Equivalent Inertia Attachment Mode
RAPCONS - Ply Stresses Constraint Mode
RAPDATC - Ply Stresses Distributed Attachment Mode
RAPNATC - Ply Stresses Nodal Attachment Mode
RAPEATC - Ply Stresses Equivalent Inertia Attachment Mode
RAGCONS - Grid Point Forces Constraint Mode
RAGDATC - Grid Point Forces Distributed Attachment Mode
RAGNATC - Grid Point Forces Nodal Attachment Mode
RAGEATC - Grid Point Forces Equivalent Inertia Attachment Mode
RADEFMP - Displacement PHA^T * Effective Inertia Mode
RADAMPZ - Viscous Damping Ratio Matrix
RADAMPG - Structural Damping Ratio Matrix
RAFGEN - Generalized Forces Matrix
BHH - Modal Viscous Damping Matrix
K4HH - Modal Structural Damping Matrix
"""
GEOM_TABLES = MSC_GEOM_TABLES + NX_GEOM_TABLES
AUTODESK_MATRIX_TABLES = [
#b'BELM',
b'KELM',
#b'MELM',
] # type: List[bytes]
# this will be split later
TEST_MATRIX_TABLES = [b'ATB', b'BTA', b'MYDOF']
RESULT_TABLES = NX_RESULT_TABLES + MSC_RESULT_TABLES
MATRIX_TABLES = NX_MATRIX_TABLES + MSC_MATRIX_TABLES + AUTODESK_MATRIX_TABLES + TEST_MATRIX_TABLES + [b'MEFF']
#GEOM_TABLES = MSC_GEOM_TABLES
#RESULT_TABLES = MSC_RESULT_TABLES
#MATRIX_TABLES = MSC_MATRIX_TABLES
# TODO: these are weird...
# RPOSTS1, MAXRATI, RESCOMP, PDRMSG
INT_PARAMS_1 = {
b'OMODES', b'LGSTRN', b'ADJFRQ', b'BSHDMP', b'BSHDMP4',
b'POST', b'OPPHIPA', b'OPPHIPB', b'GRDPNT', b'RPOSTS1', b'BAILOUT',
b'COUPMASS', b'CURV', b'INREL', b'MAXRATI', b'OG',
b'S1AM', b'S1M', b'DDRMM', b'MAXIT', b'PLTMSG', b'LGDISP', b'NLDISP',
b'OUNIT2K', b'OUNIT2M', b'RESCOMP', b'PDRMSG', b'LMODES', b'USETPRT',
b'NOCOMPS', b'OPTEXIT', b'RSOPT', b'GUSTAERO', b'MPTUNIT',
b'USETSEL', b'NASPRT', b'DESPCH', b'DESPCH1', b'COMPARE', b'DBNBLKS', b'NEWSEQ', b'OLDSEQ',
b'METHCMRS', b'NOFISR', b'KGGCPCH', b'ERROR', b'DBCDIAG', b'GPECT', b'LSTRN',
b'DBDROPT', b'SEOP2CV', b'IRES', b'SNORMPRT', b'DBDRNL', b'VMOPT',
b'OSWPPT', b'KDAMP', b'KDAMPFL', b'MATNL', b'MPCX', b'GEOMPLT', b'NOELOP',
b'NOGPF', b'PROUT', b'SUPER', b'LGDIS', b'EST', b'SEP1XOVR',
b'FRSEID', b'HRSEID', b'LRSEID', b'MODACC', b'XFLAG', b'TSTATIC',
b'NASPDV', b'RMXCRT', b'RMXTRN', b'DBCLEAN', b'LANGLE', b'SEMAPPRT',
b'FIXEDB', b'AMGOK', b'ASING', b'CNSTRT', b'CURVPLOT', b'CYCIO',
b'CYCSEQ', b'DBDICT', b'DBINIT', b'DBSET1', b'DBSET2', b'DBSET3', b'DBSET4',
b'DBSORT', b'DOPT', b'FACTOR', b'ALTSHAPE', b'MODTRK', b'IFTM', b'INRLM',
b'KINDEX', b'KMIN', b'KMAX', b'LARGEDB', b'LOADINC', b'LOADING', b'LOOP',
b'LOOPID', b'MODEL', b'MOREK', b'NEWDYN', b'NFECI', b'NINTPTS',
b'NLAYERS', b'NOELOF', b'NOMSGSTR', b'NONCUP', b'NUMOUT', b'NUMOUT1', b'NUMOUT2',
b'OPGTKG', b'OPPHIB', b'OUTOPT', b'PKRSP', b'RSPECTRA', b'RSPRINT',
b'S1G', b'SCRSPEC', b'SEMAPOPT', b'SEQOUT', b'SESEF', b'SKPAMG', b'SKPAMP',
b'SLOOPID', b'SOLID', b'SPCGEN', b'SRTELTYP', b'SRTOPT', b'START', b'SUBID',
b'SUBSKP', b'TABID', b'TESTNEG', b'BDMNCON', b'FRUMIN',
# not defined in qrg...
b'NT', b'PNCHDB', b'DLOAD', b'NLOAD', b'NOAP', b'NOCMPFLD', b'NODATA',
b'NODJE', b'NOMECH', b'NOSDR1', b'NOSHADE', b'NOSORT1', b'NOTRED',
b'NSEGS', b'OLDELM', b'OPADOF', b'OUTPUT', b'P1', b'P2', b'P3', b'PCHRESP',
b'PLOT', b'PLOTSUP', b'PRTPCH', b'RADLIN', b'RESDUAL', b'S1', b'SDATA',
b'SEFINAL', b'SEMAP1', b'SKPLOAD', b'SKPMTRX', b'SOLID1', b'SSG3',
b'PEDGEP', b'ACMSPROC', b'ACMSSEID', b'ACOUS', b'ACOUSTIC', b'ADJFLG',
b'ADJLDF', b'AEDBCP', b'AESRNDM', b'ARCSIGNS', b'ATVUSE', b'BADMESH', b'BCHNG',
b'BCTABLE', b'ROTCSV', b'ROTGPF', b'BEARDMP', b'BEARFORC', b'OP2FMT',
b'LRDISP',
# ???
b'CHKSEC', b'CMSMETH', b'CNTNSUB', b'CNTSTPS', b'CONCHG', b'CP',
b'DBDRPRJ', b'DBDRVER', b'DDAMRUN', b'DESCONX', b'DESEIG', b'DESFINAL',
b'DESMAX', b'DESSOLAP', b'DIAGOPT',
b'DOBUCKL', b'DOF123', b'DOMODES', b'DOSTATIC', b'DOTRIP', b'DRESP', b'DSGNOPTX',
b'DYNAMICX', b'EBULK', b'EIGNFREQ', b'ELOOPID',
b'FDEPCB', b'FLUIDMP', b'FLUIDSE', b'FMODE', b'FREQDEP', b'FREQDEPS',
b'GENEL', b'GEOMFLAG', b'GEOMU', b'GKCHNG', b'GLUSET', b'GMCONV', b'GNSTART',
b'GOODVER', b'GOPH2', b'GRIDFMP', b'GRIDMP', b'HNNLK', b'ICTASET', b'IFPCHNG',
b'INEP', b'INP2FMT', b'INP4FMT', b'INREL0', b'ITAPE', b'ITOITCNT',
b'ITOMXITR', b'ITONDVAR', b'ITONGHBR', b'ITONOBJF', b'ITOOPITR', b'ITOPALG',
b'ITOPALLR', b'ITOPDIAG', b'ITOPOPT', b'ITOSIMP',
b'IUNIT', b'K4CHNG', b'KCHNG', b'KREDX', b'LANGLES',
b'LBEARING', b'LDSTI1', b'LMDYN', b'LMODESFL', b'LMSTAT', b'LNUMROT',
b'LOADGENX', b'LOADREDX', b'LOADU', b'LODCHG', b'LROTOR', b'LTOPOPT',
b'LUSET', b'LUSETD', b'LUSETS', b'LUSETX', b'MATGENX',
b'MAXITER', b'MAXSEIDX', b'MBDIFB', b'MBDIFO', b'MBDLMN',
b'MCHNG', b'MDOF', b'MDTRKFLG', b'MELPG', b'MGRID', b'MLTIMSTR', b'MODESX',
b'MODETRAK', b'MPIFRHD', b'MPNFLG', b'MREDX', b'MSCOP2', b'NACEXTRA',
b'NCNOFFST', b'NDISOFP', b'NDVAR', b'NEWSET', b'NGELS', b'NJ', b'NK',
b'NLBEAR', b'NLCBFOR', b'NMLOOP', b'NMSOL', b'NOA', b'NOASET', b'NOCOMP',
b'NOFASET', b'NOFGSET', b'NOGENL', b'NOGEOM3', b'NOK4GG', b'NOK4JJ',
b'NOKGGX', b'NOKJJX', b'NOLSET', b'NOMGG', b'NOMGGX', b'NOMJJX', b'NOQSET',
b'NORADMAT', b'NORBM', b'NOSE', b'NOSIMP', b'NOSSET', b'NOUE', b'NOUP',
b'NOYSET', b'NOZSET', b'NQSET', b'NR1OFFST', b'NR2OFFST', b'NR3OFFST',
b'NROTORS', b'NSE', b'NSKIP0', b'NSOL', b'NSOLF', b'NUMPAN', b'NX',
b'O2E', b'OADPMAX', b'OALTSHP', b'ODESMAX', b'ODSFLG', b'OMAXR',
b'OP2SE', b'OP4FMT', b'OP4SE', b'OPGEOM', b'OPTIFCS',
b'OPTII231', b'OPTII408', b'OPTII411', b'OPTII420', b'OPTIIDMP', b'OPTISNS',
b'OTAPE', b'OUNIT1', b'OUNIT2', b'OUNIT2R', b'OUTFMP', b'OUTSMP', b'PANELMP',
b'PBCONT', b'PCHNG', b'PKLLR', b'POSTU', b'PRTMAT', b'PSLGDVX',
b'PSLOAD', b'PSORT', b'PVALINIT', b'PVALLAST', b'PVALLIST', b'PYCHNG',
b'REFOPT', b'RESLTOPT', b'RESPSENX',
b'RMXPANEL', b'ROTPRES', b'ROTPRT', b'RPDFRD', b'RVCHG', b'RVCHG1',
b'RVCHG2', b'S1AG', b'SAVERSTL', b'SDSRFLAG', b'SEBULK',
b'SEDMP231', b'SEDMP265', b'SEDMP408', b'SEDMP411', b'SEDMP445', b'SEDMPFLG',
b'SELDPRS', b'SKIPSE', b'SNDSEIDX', b'SOLFINAL',
b'SOLNLX', b'SOLNX', b'SOLVSUB', b'SPLINE', b'STOP0', b'STRUCTMP', b'SWEXIST',
b'TORSIN', b'UACCEL', b'UNIQIDS', b'VUELJUMP', b'VUENEXT',
b'VUGJUMP', b'VUGNEXT', b'WRTMAT',
b'XNTIPS', b'XRESLTOP', b'XSEMEDIA', b'XSEUNIT', b'XTIPSCOL',
b'XYUNIT', b'XZCOLLCT', b'Z2XSING',
b'ZUZRI1', b'ZUZRI2', b'ZUZRI3', b'ZUZRI4', b'ZUZRI5', b'ZUZRI6', b'ZUZRI7', b'ZUZRI8', b'ZUZRI9', b'ZUZRI10',
b'ZUZRL1', b'ZUZRL2', b'ZUZRL3', b'ZUZRL4', b'ZUZRL5', b'ZUZRL6', b'ZUZRL7', b'ZUZRL8', b'ZUZRL9', b'ZUZRL10',
b'DBCPAE', b'DBCPATH',
b'EXTBEMI', b'EXTBEMO', b'EXTDRUNT', b'EXTUNIT',
b'UZROLD',
b'HIRES'
# no
#b'SEPS', b'SMALLQ', b'FEPS',
}
FLOAT_PARAMS_1 = {
b'EPPRT',
b'WTMASS', b'SNORM', b'PATVER', b'MAXRATIO', b'EPSHT',
b'SIGMA', b'TABS', b'AUNITS', b'BOLTFACT', b'LMSCAL',
'DSZERO', b'G', b'GFL', b'LFREQ', b'HFREQ', b'ADPCON',
b'W3', b'W4', b'W3FL', b'W4FL', b'PREFDB',
b'EPZERO', b'DSZERO', b'TINY', b'TOLRSC',
b'FRSPD', b'HRSPD', b'LRSPD', b'MTRFMAX', b'ROTCMRF', b'MTRRMAX',
b'LAMLIM', b'BIGER', b'BIGER1', b'BIGER2', b'CLOSE',
b'EPSBIG', b'EPSMALC', b'EPSMALU', b'KDIAG', b'MACH', b'VREF',
b'STIME', b'TESTSE', b'LFREQFL', b'Q', b'ADPCONS', b'AFNORM', b'AFZERO',
b'GE', b'MASSDENS',
# should this be FLOAT_PARAMS_1???
b'HFREQFL',
# not defined
b'CNTSCL',
b'PRPA', b'PRPHIVZ', b'PRPJ', b'PRRULV', b'RMAX', b'ARF', b'BOV',
b'ARS', # b'BSHDAMP',
b'EPSRC',
# floats - not verified
b'THRSHOLD', b'SEPS', b'SMALLQ', b'FEPS',
b'DSNOKD',
# or integer (not string)
b'CONFAC',
b'DFREQ', b'DFRSPCF', b'DSTSPCF', b'DTRSPCF',
b'DUCTFMAX',
b'EXTDONE',
b'FZERO', b'LMFACT', b'MPCZERO',
b'RESVPGF', b'RESVRAT', b'SWPANGLE', b'UPFAC',
b'ITODENS',
b'ITOPCONV',
b'ITORMAS',
b'ITOSIMP1',
b'ITOSIMP2',
b'MAXRPM',
b'OBJIN',
b'PITIME',
b'RGBEAMA', b'RGBEAME', b'RGLCRIT', b'RGSPRGK',
b'VOL', b'VOLS',
b'WGT', b'WGTS',
b'XSMALLQ',
b'XUPFAC',
b'ZUZRR1', b'ZUZRR2', b'ZUZRR3', b'ZUZRR4', b'ZUZRR5', b'ZUZRR6', b'ZUZRR7', b'ZUZRR8', b'ZUZRR9', b'ZUZRR10',
b'K6ROT',
# models/msc/units_mass_spring_damper.op2
b'RBTR',
}
FLOAT_PARAMS_2 = {
b'BETA', b'CB1', b'CB2', b'CK1', b'CK2', b'CK3', b'CK41', b'CK42',
b'CM1', b'CM2',
b'G1', b'G2', b'G3', b'G4', b'G5', b'G6', b'G7', b'G8', b'G9', b'G10',
b'G11', b'G12', b'G13', b'G14', b'G15', b'G16', b'G17', b'G18', b'G19',
b'ALPHA1', b'ALPHA2',
b'CA1', b'CA2',
b'CP1', b'CP2',
b'LOADFACS',
b'ZUZRC1', b'ZUZRC2', b'ZUZRC3', b'ZUZRC4', b'ZUZRC5', b'ZUZRC6', b'ZUZRC7', b'ZUZRC8', b'ZUZRC9', b'ZUZRC10',
# should this be FLOAT_PARAMS_1???
#b'EPPRT',
}
INT_PARAMS_2 = {
b'LOADFACS',
b'ZUZRC1', b'ZUZRC2', b'ZUZRC3', b'ZUZRC4', b'ZUZRC5', b'ZUZRC6', b'ZUZRC7', b'ZUZRC8', b'ZUZRC9', b'ZUZRC10',
}
#DOUBLE_PARAMS_1 = [] # b'Q'
STR_PARAMS_1 = {
b'POSTEXT', b'PRTMAXIM', b'AUTOSPC', b'OGEOM', b'PRGPST',
b'RESVEC', b'RESVINER', b'ALTRED', b'OGPS', b'OIBULK', b'OMACHPR',
b'UNITSYS', b'F56', b'OUGCORD', b'OGEM', b'EXTSEOUT',
b'CDIF', b'SUPAERO', b'RSCON', b'AUTOMPC', b'DBCCONV',
b'AUTOSPRT', b'PBRPROP', b'OMID', b'HEATSTAT', b'SECOMB', b'ELEMITER',
b'ELITASPC', b'DBCONV', b'SHLDAMP', b'COMPMATT', b'SPCSTR', b'ASCOUP',
b'PRTRESLT', b'SRCOMPS', b'CHECKOUT', b'SEMAP', b'AESMETH', b'RESVALT',
b'ROTSYNC', b'SYNCDAMP', b'PRGPOST', b'WMODAL', b'SDAMPUP',
b'COLPHEXA', b'CHKOUT', b'CTYPE', b'DBNAME', b'VUHEXA', b'VUPENTA', b'VUTETRA',
b'MESH', b'OPTION', b'PRINT', b'SENAME', b'MECHFIX', b'RMXTRAN', b'FLEXINV',
b'ADSTAT', b'ACOUT', b'ACSYM', b'ACTYPE', b'ADBX', b'AUTOSEEL',
b'RDSPARSE',
b'SPARSEDR',
b'BSHDAMP',
b'CORROPT',
b'DBACOUS',
b'DBALLNOQ',
b'DBALLX',
b'DBAPI',
b'DBAPP',
b'DBCNT',
b'DBCOVWRT',
b'DBDNOPT',
b'DBDNR', b'DBDNR1', b'DBDNX', b'DBEXT', b'DBGOA', b'DBMAP',
b'DBOFP2X', b'DBOFPX', b'DBRCVX', b'DBSCRR', b'DBUPOPT', b'DBUPR',
b'DBUPX', b'DBXSEDR', b'DBXSEDRR', b'DBZUZR', b'DSOR', b'DSOX',
b'DVGRDN', b'DYNSPCF', b'EQVSCR', b'EXTDROUT',
b'FLEXINCR', b'FTL', b'GDAMPF', b'GEOCENT', b'IFPSCR', b'IFPSOPT',
b'IFPX', b'IFPXOPT', b'MASTER', b'MODEOUT',
b'NXVER', b'OAPP', b'OCMP', b'OEE', b'OEEX', b'OEF', b'OEFX', b'OEPT',
b'OES', b'OESE', b'OESX', b'OGPF', b'OMPT', b'OPG', b'OPTIM', b'OQG',
b'OUG', b'OUMU', b'OUTSCR', b'PANAME', b'QSETREM', b'RESVSE', b'RESVSLI',
b'RESVSO', b'RSATT', b'SAVEOFP', b'SAVERST', b'SCRATCH', b'SDRPOPT',
b'SECOMB0', b'SELRNG', b'SERST', b'SOFTEXIT', b'SOLAPPI', b'SOLTYPI',
b'TDB0', b'TDBX', b'UPDTBSH',
b'USETSTR1', b'USETSTR2', b'USETSTR3', b'USETSTR4',
b'VMOPTSET', b'VUBEAM', b'VUQUAD4', b'VUTRIA3', b'WRN', b'XAUTOSPT',
b'XRESVECA', b'XRESVECO', b'XRESVIRA', b'XRESVIRO',
b'ZUZRCL1', b'ZUZRCL2', b'ZUZRCL3', b'ZUZRCL4', b'ZUZRCL5', b'ZUZRCL6', b'ZUZRCL7', b'ZUZRCL8', b'ZUZRCL9', b'ZUZRCL10',
b'ZUZRCH1', b'ZUZRCH2', b'ZUZRCH3', b'ZUZRCH4', b'ZUZRCH5', b'ZUZRCH6', b'ZUZRCH7', b'ZUZRCH8', b'ZUZRCH9', b'ZUZRCH10',
b'APPI', b'APPF',
# part of param, checkout
b'PRTBGPDT', b'PRTCSTM', b'PRTEQXIN', b'PRTGPDT',
b'PRTGPL', b'PRTGPTT', b'PRTMGG', b'PRTPG',
# superelements
b'EXTOUT', b'SESDAMP',
# TODO: remove these as they're in the matrix test and are user
# defined PARAMs; arguably all official examples should just work
# TODO: add an option for custom PARAMs
b'ADB', b'AEDB', b'MREDUC', b'OUTDRM', b'OUTFORM', b'REDMETH', b'DEBUG',
b'AEDBX', b'AERO', b'AUTOSUP0', b'AXIOPT',
}
def _check_unique_sets(*sets: List[Set[str]]):
"""verifies that the sets are unique"""
for i, seti in enumerate(sets):
for unused_j, setj in enumerate(sets[i+1:]):
intersectioni = seti.intersection(setj)
assert len(intersectioni) == 0, intersectioni
_check_unique_sets(INT_PARAMS_1, FLOAT_PARAMS_1, FLOAT_PARAMS_2, STR_PARAMS_1)
class OP2_Scalar(OP2Common, FortranFormat):
"""Defines an interface for the Nastran OP2 file."""
@property
def total_effective_mass_matrix(self):
"""6x6 matrix"""
return self.matrices['EFMFSMS']
@property
def effective_mass_matrix(self):
"""6x6 matrix"""
return self.matrices['EFMASSS']
@property
def rigid_body_mass_matrix(self):
"""6x6 matrix"""
return self.matrices['RBMASS']
@property
def modal_effective_mass_fraction(self):
"""6xnmodes matrix"""
return self.matrices['EFMFACS']#.dataframe
@property
def modal_participation_factors(self):
"""6xnmodes matrix"""
return self.matrices['MPFACS']#.dataframe
@property
def modal_effective_mass(self):
"""6xnmodes matrix"""
return self.matrices['MEFMASS']#.dataframe
@property
def modal_effective_weight(self):
"""6xnmodes matrix"""
return self.matrices['MEFWTS']#.dataframe
@property
def monitor1(self):
self.deprecated('op2.monitor1', 'op2.op2_results.monitor1', '1.4')
return self.op2_results.monitor1
@monitor1.setter
def monitor1(self, monitor1):
self.deprecated('op2.monitor1', 'op2.op2_results.monitor1', '1.4')
self.op2_results.monitor1 = monitor1
@property
def monitor3(self):
self.deprecated('op2.monitor3', 'op2.op2_results.monitor3', '1.4')
return self.op2_results.monitor3
@monitor3.setter
def monitor3(self, monitor3):
self.deprecated('op2.monitor3', 'op2.op2_results.monitor3', '1.4')
self.op2_results.monitor3 = monitor3
@property
def matrix_tables(self):
return MATRIX_TABLES
def set_as_nx(self):
self.is_nx = True
self.is_msc = False
self.is_autodesk = False
self.is_nasa95 = False
self.is_optistruct = False
self._nastran_format = 'nx'
def set_as_msc(self):
self.is_nx = False
self.is_msc = True
self.is_autodesk = False
self.is_nasa95 = False
self.is_optistruct = False
self._nastran_format = 'msc'
def set_as_autodesk(self):
self.is_nx = False
self.is_msc = False
self.is_autodesk = True
self.is_nasa95 = False
self.is_optistruct = False
self._nastran_format = 'autodesk'
def set_as_nasa95(self):
self.is_nx = False
self.is_msc = False
self.is_autodesk = False
self.is_optistruct = False
self.is_nasa95 = True
self._nastran_format = 'nasa95'
self.reader_oes._read_oes1_loads = self.reader_oes._read_oes1_loads_nasa95
self.reader_oef._read_oef1_loads = self.reader_oef._read_oef1_loads_nasa95
if hasattr(self, 'reader_geom2') and hasattr(self.reader_geom2, '_read_cquad4_nasa95'):
self.reader_geom2.geom2_map[(5408, 54, 261)] = ['CQUAD4', self.reader_geom2._read_cquad4_nasa95]
def set_as_optistruct(self):
self.is_nx = False
self.is_msc = False
self.is_autodesk = False
self.is_nasa95 = False
self.is_optistruct = True
self._nastran_format = 'optistruct'
def __init__(self, debug=False, log=None, debug_file=None):
"""
Initializes the OP2_Scalar object
Parameters
----------
debug : bool/None; default=True
used to set the logger if no logger is passed in
True: logs debug/info/warning/error messages
False: logs info/warning/error messages
None: logs warning/error messages
log : Log()
a logging object to write debug messages to
(.. seealso:: import logging)
debug_file : str; default=None (No debug)
sets the filename that will be written to
"""
assert debug is None or isinstance(debug, bool), 'debug=%r' % debug
self.log = get_logger2(log, debug=debug, encoding='utf-8')
self._count = 0
self.op2_filename = None
self.bdf_filename = None
self.f06_filename = None
self.des_filename = None
self.h5_filename = None
self._encoding = 'utf8'
#: should a MATPOOL "symmetric" matrix be stored as symmetric
#: it takes double the RAM, but is easier to use
self.apply_symmetry = True
OP2Common.__init__(self)
FortranFormat.__init__(self)
self.is_vectorized = False
self._close_op2 = True
self.result_names = set()
self.grid_point_weight = {}
self.words = []
self.debug = debug
self._last_comment = None
#self.debug = True
#self.debug = False
#debug_file = None
if debug_file is None:
self.debug_file = None
else:
assert isinstance(debug_file, str), debug_file
self.debug_file = debug_file
self.op2_reader = OP2Reader(self)
def set_subcases(self, subcases=None):
"""
Allows you to read only the subcases in the list of isubcases
Parameters
----------
subcases : List[int, ...] / int; default=None->all subcases
list of [subcase1_ID,subcase2_ID]
"""
#: stores the set of all subcases that are in the OP2
#self.subcases = set()
if subcases is None or subcases == []:
#: stores if the user entered [] for isubcases
self.is_all_subcases = True
self.valid_subcases = []
else:
#: should all the subcases be read (default=True)
self.is_all_subcases = False
if isinstance(subcases, int):
subcases = [subcases]
#: the set of valid subcases -> set([1,2,3])
self.valid_subcases = set(subcases)
self.log.debug(f'set_subcases - subcases = {self.valid_subcases}')
def set_transient_times(self, times): # TODO this name sucks...
"""
Takes a dictionary of list of times in a transient case and
gets the output closest to those times.
Examples
--------
>>> times = {subcase_id_1: [time1, time2],
subcase_id_2: [time3, time4]}
.. warning:: I'm not sure this still works...
"""
expected_times = {}
for (isubcase, etimes) in times.items():
etimes = list(times)
etimes.sort()
expected_times[isubcase] = array(etimes)
self.expected_times = expected_times
def _get_table_mapper(self):
"""gets the dictionary of function3 / function4"""
# MSC table mapper
reader_onr = self.reader_onr
reader_opg = self.reader_opg
reader_oes = self.reader_oes
reader_oef = self.reader_oef
reader_oug = self.reader_oug
reader_oqg = self.reader_oqg
reader_ogpf = self.reader_ogpf
table_mapper = {
# -----------------------------------------------------------
# geometry
b'GEOM1' : [self._table_passer, self._table_passer], # GEOM1-Geometry-related bulk data
b'GEOM2' : [self._table_passer, self._table_passer], # GEOM2-element connectivity and SPOINT-related data
b'GEOM3' : [self._table_passer, self._table_passer], # GEOM3-Static and thermal loads
b'GEOM4' : [self._table_passer, self._table_passer], # GEOM4-constraints, DOF membership entries, MPC, and R-type element data
# superelements
b'GEOM1S' : [self._table_passer, self._table_passer], # GEOMx + superelement
b'GEOM2S' : [self._table_passer, self._table_passer],
b'GEOM3S' : [self._table_passer, self._table_passer],
b'GEOM4S' : [self._table_passer, self._table_passer],
b'GEOM1VU' : [self._table_passer, self._table_passer],
b'GEOM2VU' : [self._table_passer, self._table_passer],
b'BGPDTVU' : [self._table_passer, self._table_passer],
b'GEOM1N' : [self._table_passer, self._table_passer],
b'GEOM2N' : [self._table_passer, self._table_passer],
b'GEOM3N' : [self._table_passer, self._table_passer],
b'GEOM4N' : [self._table_passer, self._table_passer],
b'GEOM1OLD' : [self._table_passer, self._table_passer],
b'GEOM2OLD' : [self._table_passer, self._table_passer],
b'GEOM3OLD' : [self._table_passer, self._table_passer],
b'GEOM4OLD' : [self._table_passer, self._table_passer],
b'EPT' : [self._table_passer, self._table_passer], # elements
b'EPTS' : [self._table_passer, self._table_passer], # elements - superelements
b'EPTOLD' : [self._table_passer, self._table_passer],
b'MPT' : [self._table_passer, self._table_passer], # materials
b'MPTS' : [self._table_passer, self._table_passer], # materials - superelements
b'DYNAMIC' : [self._table_passer, self._table_passer],
b'DYNAMICS' : [self._table_passer, self._table_passer],
b'DIT' : [self._table_passer, self._table_passer],
b'DITS' : [self._table_passer, self._table_passer],
# this comment may refer to CSTM?
#F:\work\pyNastran\examples\Dropbox\pyNastran\bdf\cards\test\test_mass_01.op2
#F:\work\pyNastran\examples\matpool\gpsc1.op2
b'AXIC': [self._table_passer, self._table_passer],
# EDT - aero cards
# element deformation, aerodynamics, p-element, divergence analysis,
# and iterative solver input (includes SET1 entries)
b'EDT' : [self._table_passer, self._table_passer],
b'EDTS' : [self._table_passer, self._table_passer],
# contact/glue
b'CONTACT' : [self._table_passer, self._table_passer],
b'CONTACTS' : [self._table_passer, self._table_passer],
b'EDOM' : [self._table_passer, self._table_passer], # optimization
b'VIEWTB' : [self._table_passer, self._table_passer], # view elements
# =========================end geom passers=========================
# per NX
b'OESVM1' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4], # isat_random
b'OESVM1C' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4], # isat_random
b'OSTRVM1' : [reader_oes._read_oes1_3, reader_oes._read_ostr1_4], # isat_random
b'OSTRVM1C' : [reader_oes._read_oes1_3, reader_oes._read_ostr1_4], # isat_random
b'OSTRVM2' : [reader_oes._read_oes2_3, reader_oes._read_ostr2_4],
b'OESVM2' : [reader_oes._read_oes2_3, reader_oes._read_oes2_4], # big random
b'OES2C' : [reader_oes._read_oes2_3, reader_oes._read_oes2_4],
b'OSTR2' : [reader_oes._read_oes2_3, reader_oes._read_ostr2_4], # TODO: disable
b'OSTR2C' : [reader_oes._read_oes2_3, reader_oes._read_ostr2_4],
#b'OES2C' : [self._table_passer, self._table_passer], # stress
#b'OSTR2' : [self._table_passer, self._table_passer], # TODO: enable
#b'OSTR2C' : [self._table_passer, self._table_passer],
b'OTEMP1' : [reader_oug._read_otemp1_3, reader_oug._read_otemp1_4],
# --------------------------------------------------------------------------
# MSC TABLES
# common tables
# unorganized
b'RADCONS': [reader_oug._read_oug1_3, reader_oug._read_oug_4], # Displacement Constraint Mode (OUG)
b'RADEFFM': [reader_oug._read_oug1_3, reader_oug._read_oug_4], # Displacement Effective Inertia Mode (OUG)
b'RADEATC': [reader_oug._read_oug1_3, reader_oug._read_oug_4], # Displacement Equivalent Inertia Attachment mode (OUG)
# broken - isat_launch_100hz.op2 - wrong numwide
# spc forces
b'RAQCONS': [reader_oqg._read_oqg1_3, reader_oqg._read_oqg_4], # Constraint mode MPC force table (OQG)
b'RAQEATC': [reader_oqg._read_oqg1_3, reader_oqg._read_oqg_4], # Attachment mode MPC force table (OQG)
#b'RAQCONS': [self._table_passer, self._table_passer], # temporary
#b'RAQEATC': [self._table_passer, self._table_passer], # temporary
# element forces
b'RAFCONS': [reader_oef._read_oef1_3, reader_oef._read_oef1_4], # Element Force Constraint Mode (OEF)
b'RAFEATC': [reader_oef._read_oef1_3, reader_oef._read_oef1_4], # Element Force Equivalent Inertia Attachment mode (OEF)
#b'RAFCONS': [self._table_passer, self._table_passer], # temporary
#b'RAFEATC': [self._table_passer, self._table_passer], # temporary
# grid point forces
b'RAGCONS': [reader_ogpf._read_ogpf1_3, reader_ogpf._read_ogpf1_4], # Grid Point Forces Constraint Mode (OGPFB)
b'RAGEATC': [reader_ogpf._read_ogpf1_3, reader_ogpf._read_ogpf1_4], # Grid Point Forces Equivalent Inertia Attachment mode (OEF)
#b'RAGCONS': [self._table_passer, self._table_passer], # Grid Point Forces Constraint Mode (OGPFB)
#b'RAGEATC': [self._table_passer, self._table_passer], # Grid Point Forces Equivalent Inertia Attachment mode (OEF)
# stress
b'RAPCONS': [reader_oes._read_oes1_3, reader_oes._read_oes1_4], # Constraint mode ply stress table (OES)
b'RAPEATC': [reader_oes._read_oes1_3, reader_oes._read_oes1_4], # Attachment mode ply stress table (OES)
#b'RAPCONS': [self._table_passer, self._table_passer], # Constraint mode ply stress table (OES)
#b'RAPEATC': [self._table_passer, self._table_passer], # Attachment mode ply stress table (OES)
# stress
b'RASCONS': [reader_oes._read_oes1_3, reader_oes._read_oes1_4], # Stress Constraint Mode (OES)
b'RASEATC': [reader_oes._read_oes1_3, reader_oes._read_oes1_4], # Stress Equivalent Inertia Attachment mode (OES)
#b'RASCONS': [self._table_passer, self._table_passer], # temporary
#b'RASEATC': [self._table_passer, self._table_passer], # temporary
# strain
b'RAEEATC': [reader_oes._read_oes1_3, reader_oes._read_ostr1_4], # Strain Equivalent Inertia Attachment mode (OES)
b'RAECONS': [reader_oes._read_oes1_3, reader_oes._read_ostr1_4], # Strain Constraint Mode (OSTR)
#b'RAEEATC': [self._table_passer, self._table_passer], # temporary
#b'RAECONS': [self._table_passer, self._table_passer], # temporary
# strain energy
b'RANEATC' : [reader_onr._read_onr1_3, reader_onr._read_onr1_4], # Strain Energy Equivalent Inertia Attachment mode (ORGY1)
b'RANCONS': [reader_onr._read_onr1_3, reader_onr._read_onr1_4], # Constraint mode element strain energy table (ORGY1)
#b'RANEATC': [self._table_passer, self._table_passer], # Strain Energy Equivalent Inertia Attachment mode (ORGY1)
#b'RANCONS': [self._table_passer, self._table_passer], # Constraint mode element strain energy table (ORGY1)
#b'TOL': [self._table_passer, self._table_passer],
b'MATPOOL': [self._table_passer, self._table_passer], # DMIG bulk data entries
b'RSOUGV1': [self._table_passer, self._table_passer],
b'RESOES1': [self._table_passer, self._table_passer],
b'RESEF1' : [self._table_passer, self._table_passer],
b'DESCYC' : [self._table_passer, self._table_passer],
#b'AEMONPT' : [self._read_aemonpt_3, self._read_aemonpt_4],
#=======================
# OEF
# element forces
#b'OEFITSTN' : [self._table_passer, self._table_passer], # works
b'OEFITSTN' : [reader_oef._read_oef1_3, reader_oef._read_oef1_4],
b'OEFIT' : [reader_oef._read_oef1_3, reader_oef._read_oef1_4], # failure indices
b'OEF1X' : [reader_oef._read_oef1_3, reader_oef._read_oef1_4], # element forces at intermediate stations
b'OEF1' : [reader_oef._read_oef1_3, reader_oef._read_oef1_4], # element forces or heat flux
b'HOEF1' : [reader_oef._read_oef1_3, reader_oef._read_oef1_4], # element heat flux
b'DOEF1' : [reader_oef._read_oef1_3, reader_oef._read_oef1_4], # scaled response spectra - forces
# off force
b'OEF2' : [reader_oef._read_oef2_3, reader_oef._read_oef2_4], # element forces or heat flux
#=======================
# OQG
# spc forces
# OQG1/OQGV1 - spc forces in the nodal frame
# OQP1 - scaled response spectra - spc-forces
b'OQG1' : [reader_oqg._read_oqg1_3, reader_oqg._read_oqg_4],
b'OQG2' : [reader_oqg._read_oqg2_3, reader_oqg._read_oqg_4],
b'OQGV1' : [reader_oqg._read_oqg1_3, reader_oqg._read_oqg_4],
b'OQGV2' : [reader_oqg._read_oqg2_3, reader_oqg._read_oqg_4],
b'OQP1' : [reader_oqg._read_oqg1_3, reader_oqg._read_oqg_4],
b'OQP2' : [reader_oqg._read_oqg2_3, reader_oqg._read_oqg_4],
# SPC/MPC tables depending on table_code
# SPC - NX/MSC
# MPC - MSC
b'OQGATO1' : [reader_oqg._read_oqg1_3, reader_oqg._read_oqg_4],
b'OQGCRM1' : [reader_oqg._read_oqg1_3, reader_oqg._read_oqg_4],
b'OQGPSD1' : [reader_oqg._read_oqg1_3, reader_oqg._read_oqg_4],
b'OQGRMS1' : [reader_oqg._read_oqg1_3, reader_oqg._read_oqg_4],
b'OQGNO1' : [reader_oqg._read_oqg1_3, reader_oqg._read_oqg_4],
b'OQGATO2' : [reader_oqg._read_oqg2_3, reader_oqg._read_oqg_4],
b'OQGCRM2' : [reader_oqg._read_oqg2_3, reader_oqg._read_oqg_4],
b'OQGPSD2' : [reader_oqg._read_oqg2_3, reader_oqg._read_oqg_4],
b'OQGRMS2' : [self._table_passer, self._table_passer], # buggy on isat random
b'OQGNO2' : [self._table_passer, self._table_passer], # buggy on isat random
#b'OQGRMS2' : [reader_oqg._read_oqg2_3, reader_oqg._read_oqg_4], # buggy on isat random
#b'OQGNO2' : [reader_oqg._read_oqg2_3, reader_oqg._read_oqg_4], # buggy on isat random
b'PSDF' : [self._read_psdf_3, self._read_psdf_4], # MSC NASA/goesr
#=======================
# MPC Forces
# these are NX tables
# OQGM1 - mpc forces in the nodal frame
b'OQMG1' : [reader_oqg._read_oqg1_3, reader_oqg._read_oqg_mpc_forces],
b'OQMATO1' : [reader_oqg._read_oqg1_3, reader_oqg._read_oqg_mpc_ato],
b'OQMCRM1' : [reader_oqg._read_oqg1_3, reader_oqg._read_oqg_mpc_crm],
b'OQMPSD1' : [reader_oqg._read_oqg1_3, reader_oqg._read_oqg_mpc_psd],
b'OQMRMS1' : [reader_oqg._read_oqg1_3, reader_oqg._read_oqg_mpc_rms],
b'OQMNO1' : [reader_oqg._read_oqg1_3, reader_oqg._read_oqg_mpc_no],
b'OQMG2' : [reader_oqg._read_oqg2_3, reader_oqg._read_oqg_mpc_forces], # big random
b'OQMATO2' : [reader_oqg._read_oqg2_3, reader_oqg._read_oqg_mpc_ato],
b'OQMCRM2' : [reader_oqg._read_oqg2_3, reader_oqg._read_oqg_mpc_crm],
b'OQMPSD2' : [reader_oqg._read_oqg2_3, reader_oqg._read_oqg_mpc_psd],
b'OQMRMS2' : [self._table_passer, self._table_passer], # buggy on isat random
b'OQMNO2' : [self._table_passer, self._table_passer], # buggy on isat random
#b'OQMRMS2' : [reader_oqg._read_oqg2_3, reader_oqg._read_oqg_mpc_rms], # buggy on isat random
#b'OQMNO2' : [reader_oqg._read_oqg2_3, reader_oqg._read_oqg_mpc_no], # buggy on isat random
#=======================
# OPG
# applied loads
b'OPG1' : [reader_opg._read_opg1_3, self.reader_opg._read_opg1_4], # applied loads in the nodal frame
b'OPGV1' : [reader_opg._read_opg1_3, self.reader_opg._read_opg1_4], # solution set applied loads?
b'OPNL1' : [reader_opg._read_opg1_3, self.reader_opg._read_opg1_4], # nonlinear loads
b'OCRPG' : [reader_opg._read_opg1_3, self.reader_opg._read_opg1_4], # post-buckling loads
b'OPG2' : [reader_opg._read_opg2_3, reader_opg._read_opg1_4], # applied loads in the nodal frame
b'OPNL2' : [reader_opg._read_opg2_3, reader_opg._read_opg1_4], # nonlinear loads
b'OPGATO1' : [reader_opg._read_opg1_3, reader_opg._read_opg1_4],
b'OPGCRM1' : [reader_opg._read_opg1_3, reader_opg._read_opg1_4],
b'OPGPSD1' : [reader_opg._read_opg1_3, reader_opg._read_opg1_4],
b'OPGRMS1' : [reader_opg._read_opg1_3, reader_opg._read_opg1_4],
b'OPGNO1' : [reader_opg._read_opg1_3, reader_opg._read_opg1_4],
b'OPGATO2' : [reader_opg._read_opg2_3, reader_opg._read_opg1_4],
b'OPGCRM2' : [reader_opg._read_opg2_3, reader_opg._read_opg1_4],
b'OPGPSD2' : [reader_opg._read_opg2_3, reader_opg._read_opg1_4],
#b'OPGRMS2' : [self._table_passer, self._table_passer],
#b'OPGNO2' : [self._table_passer, self._table_passer],
b'OPGRMS2' : [reader_opg._read_opg2_3, reader_opg._read_opg1_4],
b'OPGNO2' : [reader_opg._read_opg2_3, reader_opg._read_opg1_4],
#=======================
# OGPFB1
# grid point forces
b'OGPFB1' : [reader_ogpf._read_ogpf1_3, reader_ogpf._read_ogpf1_4], # grid point forces
#b'OGPFB2' : [reader_ogpf._read_ogpf1_3, reader_ogpf._read_ogpf1_4], # grid point forces
#=======================
# ONR/OEE
# strain energy density
b'ONRGY' : [reader_onr._read_onr1_3, reader_onr._read_onr1_4],
b'ONRGY1' : [reader_onr._read_onr1_3, reader_onr._read_onr1_4], # strain energy density
b'ONRGY2': [reader_onr._read_onr2_3, reader_onr._read_onr1_4],
#b'ONRGY2': [self._table_passer, self._table_passer],
#===========================================================
# OES
# stress
# OES1C - Table of composite element stresses or strains in SORT1 format
# OESRT - Table of composite element ply strength ratio. Output by SDRCOMP
b'OES1X1' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4], # stress - nonlinear elements
b'OES1' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4], # stress - linear only
b'OES1X' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4], # element stresses at intermediate stations & nonlinear stresses
b'OES1C' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4], # stress - composite
b'OESCP' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4], # stress - nonlinear???
b'OESRT' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4], # ply strength ratio
# strain
b'OSTR1' : [reader_oes._read_oes1_3, reader_oes._read_ostr1_4], # strain - autodesk/9zk6b5uuo.op2
b'OSTR1X' : [reader_oes._read_oes1_3, reader_oes._read_ostr1_4], # strain - isotropic
b'OSTR1C' : [reader_oes._read_oes1_3, reader_oes._read_ostr1_4], # strain - composite
b'OESTRCP' : [reader_oes._read_oes1_3, reader_oes._read_ostr1_4],
b'OSTR1PL' : [self._table_passer, self._table_passer], # ????
b'OSTR1THC' : [self._table_passer, self._table_passer], # ????
b'OSTR1CR' : [self._table_passer, self._table_passer], # ????
#b'OEFIIP'
# special nonlinear tables
# OESNLBR - Slideline stresses
# OESNLXD - Nonlinear transient stresses
# OESNLXR - Nonlinear stress
# Table of nonlinear element stresses in SORT1 format and appended for all subcases
b'OESNLXR' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4], # nonlinear stresses
b'OESNLXD' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4], # nonlinear transient stresses
b'OESNLBR' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4],
b'OESNL1X' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4],
b'OESNL2' : [reader_oes._read_oes2_3, reader_oes._read_oes2_4],
b'OESNLXR2' : [reader_oes._read_oes2_3, reader_oes._read_oes2_4],
b'OESNLBR2' : [reader_oes._read_oes2_3, reader_oes._read_oes2_4],
#b'OESNLXR2' : [self._table_passer, self._table_passer],
#b'OESNLBR2' : [self._table_passer, self._table_passer],
# off stress
b'OES2' : [reader_oes._read_oes2_3, reader_oes._read_oes2_4], # stress - linear only - disabled; need better tests
#b'OES2' : [self._table_passer, self._table_passer], # stress - linear only - disabled; need better tests
b'OESPSD2C' : [reader_oes._read_oes2_3, reader_oes._read_oes2_4], # isat_random (nx)
b'OSTPSD2C' : [reader_oes._read_oes2_3, reader_oes._read_ostr2_4], # isat_random (nx)
#=======================
# off strain
b'OSTRATO1' : [reader_oes._read_oes1_3, reader_oes._read_ostr1_4],
b'OSTRCRM1' : [reader_oes._read_oes1_3, reader_oes._read_ostr1_4],
b'OSTRPSD1' : [reader_oes._read_oes1_3, reader_oes._read_ostr1_4],
b'OSTRRMS1' : [reader_oes._read_oes1_3, reader_oes._read_ostr1_4], # isat_random
b'OSTRNO1' : [reader_oes._read_oes1_3, reader_oes._read_ostr1_4], # isat_random
b'OSTRATO2' : [reader_oes._read_oes2_3, reader_oes._read_ostr2_4],
b'OSTRCRM2' : [reader_oes._read_oes2_3, reader_oes._read_ostr2_4],
b'OSTRPSD2' : [reader_oes._read_oes2_3, reader_oes._read_ostr2_4],
b'OSTRRMS2' : [self._table_passer, self._table_passer], # buggy on isat random
b'OSTRNO2' : [self._table_passer, self._table_passer], # buggy on isat random
#b'OSTRRMS2' : [reader_oes._read_oes2_3, reader_oes._read_ostr2_4], # buggy on isat random
#b'OSTRNO2' : [reader_oes._read_oes2_3, reader_oes._read_ostr2_4], # buggy on isat random
b'OSTRMS1C' : [reader_oes._read_oes1_3, reader_oes._read_ostr1_4], # isat_random
b'OSTNO1C' : [reader_oes._read_oes1_3, reader_oes._read_ostr1_4], # isat_random
#=======================
# OUG
# displacement/velocity/acceleration/eigenvector/temperature
b'OUG1' : [reader_oug._read_oug1_3, reader_oug._read_oug_4], # displacements in nodal frame
b'OVG1' : [reader_oug._read_oug1_3, reader_oug._read_oug_4], # velocity in nodal frame
b'OAG1' : [reader_oug._read_oug1_3, reader_oug._read_oug_4], # accelerations in nodal frame
b'OUG1F' : [reader_oug._read_oug1_3, reader_oug._read_oug_4], # acoustic displacements in ? frame
b'OUGV1' : [reader_oug._read_oug1_3, reader_oug._read_oug_4], # displacements in nodal frame
b'BOUGV1' : [reader_oug._read_oug1_3, reader_oug._read_oug_4], # OUG1 on the boundary???
b'BOUGF1' : [reader_oug._read_oug1_3, reader_oug._read_oug_4], # OUG1 on the boundary???
b'OUGV1PAT': [reader_oug._read_oug1_3, reader_oug._read_oug_4], # OUG1 + coord ID
b'OUPV1' : [reader_oug._read_oug1_3, reader_oug._read_oug_4], # scaled response spectra - displacement
b'TOUGV1' : [reader_oug._read_oug1_3, reader_oug._read_oug_4], # grid point temperature
b'ROUGV1' : [reader_oug._read_oug1_3, reader_oug._read_oug_4], # relative OUG
b'OPHSA' : [reader_oug._read_oug1_3, reader_oug._read_oug_4], # Displacement output table in SORT1
b'OUXY1' : [reader_oug._read_oug1_3, reader_oug._read_oug_4], # Displacements in SORT1 format for h-set or d-set.
b'OUGPC1' : [reader_oug._read_ougpc1_3, reader_oug._read_ougpc_4], # panel contributions
b'OUGPC2' : [reader_oug._read_ougpc2_3, reader_oug._read_ougpc_4], # panel contributions
b'OUGF1' : [reader_oug._read_oug1_3, reader_oug._read_oug_4], # Acoustic pressures at microphone points in SORT1 format
b'OUGF2' : [reader_oug._read_oug2_3, reader_oug._read_oug_4], # Acoustic pressures at microphone points in SORT1 format
b'OUGV2' : [reader_oug._read_oug2_3, reader_oug._read_oug_4], # displacements in nodal frame
b'ROUGV2' : [reader_oug._read_oug2_3, reader_oug._read_oug_4], # relative OUG
b'OUXY2' : [reader_oug._read_oug2_3, reader_oug._read_oug_4], # Displacements in SORT2 format for h-set or d-set.
# modal contribution
b'OUGMC1' : [reader_oug._read_oug1_3, reader_oug._read_ougmc_4],
b'OQGMC1' : [reader_oqg._read_oqg1_3, reader_oug._read_ougmc_4],
b'OESMC1' : [reader_oes._read_oes1_3, reader_oes._read_oesmc_4],
b'OSTRMC1' : [reader_oes._read_oes1_3, reader_oes._read_oesmc_4],
#F:\work\pyNastran\examples\Dropbox\move_tpl\sbuckl2a.op2
b'OCRUG' : [reader_oug._read_oug1_3, reader_oug._read_oug_4], # post-buckling displacement
b'OPHIG' : [reader_oug._read_oug1_3, reader_oug._read_oug_4], # eigenvectors in basic coordinate system
b'BOPHIG' : [reader_oug._read_oug1_3, reader_oug._read_oug_4], # eigenvectors in basic coordinate system
b'BOPHIGF' : [reader_oug._read_oug1_3, reader_oug._read_oug_4], # Eigenvectors in the basic coordinate system for the fluid portion of the model.
b'BOPHIGS' : [reader_oug._read_oug1_3, reader_oug._read_oug_4], # Eigenvectors in the basic coordinate system for the structural portion of the model.
b'BOPG1' : [reader_opg._read_opg1_3, reader_opg._read_opg1_4], # applied loads in basic coordinate system
b'OUGATO1' : [reader_oug._read_oug1_3, reader_oug._read_oug_ato],
b'OUGCRM1' : [reader_oug._read_oug1_3, reader_oug._read_oug_crm],
b'OUGPSD1' : [reader_oug._read_oug1_3, reader_oug._read_oug_psd],
b'OUGRMS1' : [reader_oug._read_oug1_3, reader_oug._read_oug_rms],
b'OUGNO1' : [reader_oug._read_oug1_3, reader_oug._read_oug_no],
b'OUGATO2' : [reader_oug._read_oug2_3, reader_oug._read_oug_ato],
b'OUGCRM2' : [reader_oug._read_oug2_3, reader_oug._read_oug_crm],
b'OUGPSD2' : [reader_oug._read_oug2_3, reader_oug._read_oug_psd],
b'OUGRMS2' : [self._table_passer, self._table_passer], # buggy on isat random
b'OUGNO2' : [self._table_passer, self._table_passer], # buggy on isat random
#b'OUGRMS2' : [reader_oug._read_oug2_3, reader_oug._read_oug_rms], # buggy on isat random
#b'OUGNO2' : [reader_oug._read_oug2_3, reader_oug._read_oug_no], # buggy on isat random
#=======================
# extreme values of the respective table
b'OUGV1MX' : [self._table_passer, self._table_passer],
b'OEF1MX' : [self._table_passer, self._table_passer],
b'OES1MX' : [self._table_passer, self._table_passer],
#=======================
# contact
b'OQGCF1' : [reader_oqg._read_oqg1_3, reader_oqg._read_oqg_4], # Contact force at grid point.
b'OQGCF2' : [reader_oqg._read_oqg2_3, reader_oqg._read_oqg_4], # Contact force at grid point.
b'OSPDS1' : [reader_oqg._read_opsds1_3, reader_oqg._read_opsds1_4], # Final separation distance.
b'OSPDS2' : [self._nx_table_passer, self._table_passer],
b'OSPDSI1' : [reader_oqg._read_opsdi1_3, reader_oqg._read_opsdi1_4], # Initial separation distance.
b'OSPDSI2' : [self._nx_table_passer, self._table_passer], # Output contact separation distance results.
#b'OBC1' : [self._read_obc1_3, self._read_obc1_4],
#b'OBC2' : [self._nx_table_passer, self._table_passer], # Contact pressures and tractions at grid points.
#b'OSLIDE1'
b'OPRPSD2' : [self._nx_table_passer, self._table_passer],
b'OPRATO2' : [self._nx_table_passer, self._table_passer],
b'OPRNO1' : [self._nx_table_passer, self._table_passer],
b'OPRCRM2' : [self._nx_table_passer, self._table_passer],
b'OCPSDFC' : [self._nx_table_passer, self._table_passer],
b'OCCORFC' : [self._nx_table_passer, self._table_passer],
# Glue normal and tangential tractions at grid point in basic coordinate system
b'OBG1' : [self._nx_table_passer, self._table_passer],
b'OBG2' : [self._nx_table_passer, self._table_passer],
b'OQGGF1' : [reader_oqg._read_oqg1_3, reader_oqg._read_oqg_4], # Glue forces at grid point in basic coordinate system
b'OQGGF2' : [reader_oqg._read_oqg2_3, reader_oqg._read_oqg_4],
# Table of Euler Angles for transformation from material to basic coordinate system
# in the undeformed configuration
b'TRMBU' : [self._nx_table_passer, self._table_passer],
b'TRMBD' : [self._nx_table_passer, self._table_passer],
#=======================
# OGPWG
# grid point weight
b'OGPWG' : [self.reader_ogpwg._read_ogpwg_3, self.reader_ogpwg._read_ogpwg_4], # grid point weight
b'OGPWGM' : [self.reader_ogpwg._read_ogpwg_3, self.reader_ogpwg._read_ogpwg_4], # modal? grid point weight
#=======================
# OGS
# grid point stresses
b'OGS1' : [self.reader_ogs._read_ogs1_3, self.reader_ogs._read_ogs1_4], # grid point stresses
#b'OGS2' : [self._read_ogs1_3, self._read_ogs1_4], # grid point stresses
b'OGSTR1' : [self.reader_ogs._read_ogstr1_3, self.reader_ogs._read_ogstr1_4], # grid point strains
#=======================
# eigenvalues
b'BLAMA' : [self.reader_lama._read_buckling_eigenvalue_3, self.reader_lama._read_buckling_eigenvalue_4], # buckling eigenvalues
b'CLAMA' : [self.reader_lama._read_complex_eigenvalue_3, self.reader_lama._read_complex_eigenvalue_4], # complex eigenvalues
b'LAMA' : [self.reader_lama._read_real_eigenvalue_3, self.reader_lama._read_real_eigenvalue_4], # eigenvalues
b'LAMAS' : [self.reader_lama._read_real_eigenvalue_3, self.reader_lama._read_real_eigenvalue_4], # eigenvalues-structure
b'LAMAF' : [self.reader_lama._read_real_eigenvalue_3, self.reader_lama._read_real_eigenvalue_4], # eigenvalues-fluid
# ===passers===
#b'EQEXIN': [self._table_passer, self._table_passer],
#b'EQEXINS': [self._table_passer, self._table_passer],
b'GPDT' : [self._table_passer, self._table_passer], # grid points?
b'BGPDT' : [self._table_passer, self._table_passer], # basic grid point defintion table
b'BGPDTS' : [self._table_passer, self._table_passer],
b'BGPDTOLD' : [self._table_passer, self._table_passer],
b'PVT' : [self._read_pvto_3, self._read_pvto_4], # PVT - Parameter Variable Table
b'PVTS' : [self._read_pvto_3, self._read_pvto_4], # ???
b'PVT0' : [self._read_pvto_3, self._read_pvto_4], # user parameter value table
b'TOLD' : [self._table_passer, self._table_passer],
#b'CASECC' : [self._table_passer, self._table_passer], # case control deck
#b'XCASECC' : [self._table_passer, self._table_passer], # ???
b'STDISP' : [self._table_passer, self._table_passer], # matrix?
b'AEDISP' : [self._table_passer, self._table_passer], # matrix?
#b'TOLB2' : [self._table_passer, self._table_passer], # matrix?
b'FOL' : [self._table_passer, self._table_passer],
b'PERF' : [self._table_passer, self._table_passer],
# DSCMCOL - Correlation table for normalized design sensitivity coefficient matrix.
# Output by DSTAP2.
# DBCOPT - Design optimization history table for
b'OEKE1' : [self._table_passer, self._table_passer],
#b'DSCMCOL' : [self._table_passer, self._table_passer],
#b'DBCOPT' : [self._table_passer, self._table_passer],
#b'FRL0': [self._table_passer, self._table_passer], # frequency response list
#==================================
# modal participation factors
# OFMPF2M Table of fluid mode participation factors by normal mode.
b'OFMPF2M' : [self._read_mpf_3, self._read_mpf_4],
# OLMPF2M Load mode participation factors by normal mode.
b'OLMPF2M' : [self._read_mpf_3, self._read_mpf_4],
# OPMPF2M Panel mode participation factors by normal mode.
b'OPMPF2M' : [self._read_mpf_3, self._read_mpf_4],
# OPMPF2M Panel mode participation factors by normal mode.
b'OSMPF2M' : [self._read_mpf_3, self._read_mpf_4],
# OGMPF2M Grid mode participation factors by normal mode.
b'OGPMPF2M' : [self._read_mpf_3, self._read_mpf_4],
#OFMPF2E Table of fluid mode participation factors by excitation frequencies.
#OSMPF2E Table of structure mode participation factors by excitation frequencies.
#OPMPF2E Table of panel mode participation factors by excitation frequencies.
#OLMPF2E Table of load mode participation factors by excitation frequencies.
#OGMPF2E Table of grid mode participation factors by excitation frequencies.
# velocity
b'OVGATO1' : [reader_oug._read_oug1_3, reader_oug._read_oug_ato],
b'OVGCRM1' : [reader_oug._read_oug1_3, reader_oug._read_oug_crm],
b'OVGPSD1' : [reader_oug._read_oug1_3, reader_oug._read_oug_psd],
b'OVGRMS1' : [reader_oug._read_oug1_3, reader_oug._read_oug_rms],
b'OVGNO1' : [reader_oug._read_oug1_3, reader_oug._read_oug_no],
b'OVGATO2' : [reader_oug._read_oug2_3, reader_oug._read_oug_ato],
b'OVGCRM2' : [reader_oug._read_oug2_3, reader_oug._read_oug_crm],
b'OVGPSD2' : [reader_oug._read_oug2_3, reader_oug._read_oug_psd],
#b'OVGRMS2' : [self._table_passer, self._table_passer],
#b'OVGNO2' : [self._table_passer, self._table_passer],
b'OVGRMS2' : [reader_oug._read_oug2_3, reader_oug._read_oug_rms],
b'OVGNO2' : [reader_oug._read_oug2_3, reader_oug._read_oug_no],
#==================================
#b'GPL': [self._table_passer, self._table_passer],
#b'OMM2' : [self._table_passer, self._table_passer], # max/min table - kinda useless
b'ERRORN' : [self._table_passer, self._table_passer], # p-element error summary table
#==================================
b'OUG2T' : [self._table_passer, self._table_passer],
# acceleration
b'OAGATO1' : [reader_oug._read_oug1_3, reader_oug._read_oug_ato],
b'OAGCRM1' : [reader_oug._read_oug1_3, reader_oug._read_oug_crm],
b'OAGPSD1' : [reader_oug._read_oug1_3, reader_oug._read_oug_psd],
b'OAGRMS1' : [reader_oug._read_oug1_3, reader_oug._read_oug_rms],
b'OAGNO1' : [reader_oug._read_oug1_3, reader_oug._read_oug_no],
b'OAGATO2' : [reader_oug._read_oug2_3, reader_oug._read_oug_ato],
b'OAGCRM2' : [reader_oug._read_oug2_3, reader_oug._read_oug_crm],
b'OAGPSD2' : [reader_oug._read_oug2_3, reader_oug._read_oug_psd],
#b'OAGRMS2' : [self._table_passer, self._table_passer],
#b'OAGNO2' : [self._table_passer, self._table_passer],
b'OAGRMS2' : [reader_oug._read_oug2_3, reader_oug._read_oug_rms],
b'OAGNO2' : [reader_oug._read_oug2_3, reader_oug._read_oug_no],
# stress
b'OESATO1' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4],
b'OESCRM1' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4],
b'OESPSD1' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4],
b'OESRMS1' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4],
b'OESNO1' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4],
# OESXRM1C : Composite element RMS stresses in SORT1 format for random analysis that includes von Mises stress output.
b'OESXRMS1' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4],
b'OESXRM1C' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4],
b'OESXNO1' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4],
b'OESXNO1C' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4],
b'OESATO2' : [reader_oes._read_oes2_3, reader_oes._read_oes2_4],
b'OESCRM2' : [reader_oes._read_oes2_3, reader_oes._read_oes2_4],
b'OESPSD2' : [reader_oes._read_oes2_3, reader_oes._read_oes2_4],
#b'OESRMS2' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4], # buggy on isat random
#b'OESNO2' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4], # buggy on isat random
b'OESRMS2' : [self._table_passer, self._table_passer], # buggy on isat random
b'OESNO2' : [self._table_passer, self._table_passer], # buggy on isat random
# force
b'OEFATO1' : [reader_oef._read_oef1_3, reader_oef._read_oef1_4],
b'OEFCRM1' : [reader_oef._read_oef1_3, reader_oef._read_oef1_4],
b'OEFPSD1' : [reader_oef._read_oef1_3, reader_oef._read_oef1_4],
b'OEFRMS1' : [reader_oef._read_oef1_3, reader_oef._read_oef1_4],
b'OEFNO1' : [reader_oef._read_oef1_3, reader_oef._read_oef1_4],
b'OEFATO2' : [reader_oef._read_oef2_3, reader_oef._read_oef2_4],
b'OEFCRM2' : [reader_oef._read_oef2_3, reader_oef._read_oef2_4],
b'OEFPSD2' : [reader_oef._read_oef2_3, reader_oef._read_oef2_4],
#b'OEFRMS2' : [reader_oef._read_oef2_3, reader_oef._read_oef2_4], # buggy on isat random
# nx cohesive zone
b'ODAMGCZT' : [self._nx_table_passer, self._table_passer],
b'ODAMGCZR' : [self._nx_table_passer, self._table_passer],
b'ODAMGCZD' : [self._nx_table_passer, self._table_passer],
# Normalized Mass Density
b'ONMD' : [self.reader_onmd._read_onmd_3, self.reader_onmd._read_onmd_4],
#====================================================================
# NASA95
b'OESC1' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4],
}
if self.is_nx and 0:
table_mapper2 = {
#b'OUGRMS2' : [self._table_passer, self._table_passer], # buggy on isat random
#b'OUGNO2' : [self._table_passer, self._table_passer], # buggy on isat random
b'OUGRMS2' : [reader_oug._read_oug2_3, reader_oug._read_oug_rms], # buggy on isat random
b'OUGNO2' : [reader_oug._read_oug2_3, reader_oug._read_oug_no], # buggy on isat random
#b'OQMRMS2' : [self._table_passer, self._table_passer], # buggy on isat random
#b'OQMNO2' : [self._table_passer, self._table_passer], # buggy on isat random
b'OQMRMS2' : [reader_oqg._read_oqg2_3, reader_oqg._read_oqg_mpc_rms], # buggy on isat random
b'OQMNO2' : [reader_oqg._read_oqg2_3, reader_oqg._read_oqg_mpc_no], # buggy on isat random
#b'OSTRRMS2' : [self._table_passer, self._table_passer], # buggy on isat random
#b'OSTRNO2' : [self._table_passer, self._table_passer], # buggy on isat random
b'OSTRRMS2' : [reader_oes._read_oes2_3, reader_oes._read_ostr2_4], # buggy on isat random
b'OSTRNO2' : [reader_oes._read_oes2_3, reader_oes._read_ostr2_4], # buggy on isat random
b'OESRMS2' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4], # buggy on isat random
b'OESNO2' : [reader_oes._read_oes1_3, reader_oes._read_oes1_4], # buggy on isat random
#b'OESRMS2' : [self._table_passer, self._table_passer], # buggy on isat random
#b'OESNO2' : [self._table_passer, self._table_passer], # buggy on isat random
b'OEFNO2' : [reader_oef._read_oef2_3, reader_oef._read_oef2_4],
#b'OEFNO2' : [self._table_passer, self._table_passer], # buggy on isat_random_steve2.op2
}
for key, value in table_mapper2.items():
table_mapper[key] = value
#table_mapper.update(table_mapper2)
return table_mapper
def _read_mpf_3(self, data, ndata: int) -> int:
"""reads table 3 (the header table)
OFMPF2E Table of fluid mode participation factors by excitation frequencies.
OFMPF2M Table of fluid mode participation factors by normal mode.
OSMPF2E Table of structure mode participation factors by excitation frequencies.
OSMPF2M Table of structure mode participation factors by normal mode.
OPMPF2E Table of panel mode participation factors by excitation frequencies.
OPMPF2M Table of panel mode participation factors by normal mode.
OLMPF2E Table of load mode participation factors by excitation frequencies.
OLMPF2M Table of load mode participation factors by normal mode.
OGMPF2E Table of grid mode participation factors by excitation frequencies.
OGMPF2M Table of grid mode participation factors by normal mode.
"""
#self._set_times_dtype()
self.nonlinear_factor = np.nan
self.is_table_1 = True
self.is_table_2 = False
unused_three = self.parse_approach_code(data)
self.words = [
'approach_code', 'table_code', '???', 'isubcase',
'???', '???', '???', 'random_code',
'format_code', 'num_wide', '???', '???',
'acoustic_flag', '???', '???', '???',
'???', '???', '???', '???',
'???', '???', 'thermal', '???',
'???', 'Title', 'subtitle', 'label']
## random code
self.random_code = self.add_data_parameter(data, 'random_code', b'i', 8, False)
## format code
self.format_code = self.add_data_parameter(data, 'format_code', b'i', 9, False)
## number of words per entry in record
self.num_wide = self.add_data_parameter(data, 'num_wide', b'i', 10, False)
## acoustic pressure flag
self.acoustic_flag = self.add_data_parameter(data, 'acoustic_flag', b'i', 13, False)
## thermal flag; 1 for heat transfer, 0 otherwise
self.thermal = self.add_data_parameter(data, 'thermal', b'i', 23, False)
#if self.analysis_code == 1: # statics / displacement / heat flux
## load set number
#self.lsdvmn = self.add_data_parameter(data, 'lsdvmn', b'i', 5, False)
#self.data_names = self.apply_data_code_value('data_names', ['lsdvmn'])
#self.setNullNonlinearFactor()
#elif self.analysis_code == 2: # real eigenvalues
## mode number
#self.mode = self.add_data_parameter(data, 'mode', b'i', 5)
## eigenvalue
#self.eign = self.add_data_parameter(data, 'eign', b'f', 6, False)
## mode or cycle .. todo:: confused on the type - F1???
#self.mode_cycle = self.add_data_parameter(data, 'mode_cycle', b'i', 7, False)
#self.update_mode_cycle('mode_cycle')
#self.data_names = self.apply_data_code_value('data_names', ['mode', 'eign', 'mode_cycle'])
#elif self.analysis_code == 3: # differential stiffness
#self.lsdvmn = self.get_values(data, b'i', 5) ## load set number
#self.data_code['lsdvmn'] = self.lsdvmn
#elif self.analysis_code == 4: # differential stiffness
#self.lsdvmn = self.get_values(data, b'i', 5) ## load set number
if self.analysis_code == 5: # frequency
# frequency
self.node_id = self.add_data_parameter(data, 'node_id', b'i', 5, fix_device_code=True)
self.data_names = self.apply_data_code_value('data_names', ['node_id'])
#self.freq = self.add_data_parameter(data, 'freq', b'f', 5)
#self.data_names = self.apply_data_code_value('data_names', ['freq'])
#elif self.analysis_code == 6: # transient
## time step
#self.dt = self.add_data_parameter(data, 'dt', b'f', 5)
#self.data_names = self.apply_data_code_value('data_names', ['dt'])
#elif self.analysis_code == 7: # pre-buckling
## load set number
#self.lsdvmn = self.add_data_parameter(data, 'lsdvmn', b'i', 5)
#self.data_names = self.apply_data_code_value('data_names', ['lsdvmn'])
#elif self.analysis_code == 8: # post-buckling
## load set number
#self.lsdvmn = self.add_data_parameter(data, 'lsdvmn', b'i', 5)
## real eigenvalue
#self.eigr = self.add_data_parameter(data, 'eigr', b'f', 6, False)
#self.data_names = self.apply_data_code_value('data_names', ['lsdvmn', 'eigr'])
#elif self.analysis_code == 9: # complex eigenvalues
## mode number
#self.mode = self.add_data_parameter(data, 'mode', b'i', 5)
## real eigenvalue
#self.eigr = self.add_data_parameter(data, 'eigr', b'f', 6, False)
## imaginary eigenvalue
#self.eigi = self.add_data_parameter(data, 'eigi', b'f', 7, False)
#self.data_names = self.apply_data_code_value('data_names', ['mode', 'eigr', 'eigi'])
#elif self.analysis_code == 10: # nonlinear statics
## load step
#self.lftsfq = self.add_data_parameter(data, 'lftsfq', b'f', 5)
#self.data_names = self.apply_data_code_value('data_names', ['lftsfq'])
#elif self.analysis_code == 11: # old geometric nonlinear statics
## load set number
#self.lsdvmn = self.add_data_parameter(data, 'lsdvmn', b'i', 5)
#self.data_names = self.apply_data_code_value('data_names', ['lsdvmn'])
#elif self.analysis_code == 12: # contran ? (may appear as aCode=6) --> straight from DMAP...grrr...
## load set number
#self.lsdvmn = self.add_data_parameter(data, 'lsdvmn', b'i', 5)
#self.data_names = self.apply_data_code_value('data_names', ['lsdvmn'])
else:
msg = f'invalid analysis_code...analysis_code={self.analysis_code}\ndata={self.data_code}'
raise RuntimeError(msg)
#print self.code_information()
#
self.fix_format_code()
if self.num_wide == 8:
self.format_code = 1
self.data_code['format_code'] = 1
else:
#self.fix_format_code()
if self.format_code == 1:
self.format_code = 2
self.data_code['format_code'] = 2
assert self.format_code in [2, 3], self.code_information()
self._parse_thermal_code()
if self.is_debug_file:
self.binary_debug.write(' approach_code = %r\n' % self.approach_code)
self.binary_debug.write(' tCode = %r\n' % self.tCode)
self.binary_debug.write(' isubcase = %r\n' % self.isubcase)
self._read_title(data)
self._write_debug_bits()
def _read_mpf_4(self, data: bytes, ndata: int):
"""unused"""
if self.read_mode == 1: # or self.table_name_str not in ['OFMPF2M']:
return ndata
#print(self.table_name_str, ndata, self.num_wide) # 176
#self.show_ndata(100, types='ifs')
structi = Struct('fiff')
nelements = ndata // 16
ndev = ndata % 16
assert ndev == 0, ndev
for i in range(nelements):
datai = data[i*16 : (i+1)*16]
freq, dunno_int, mag, phase = structi.unpack(datai)
assert dunno_int == 2, str(self.node_id, freq, dunno_int, mag, phase)
#print(self.node_id, freq, dunno_int, mag, phase)
#print()
if self.isubtable == -4:
self.log.warning('%s results were read, but not saved' % self.table_name_str)
return ndata
def _read_pvto_3(self, data: bytes, ndata: int):
"""unused"""
raise RuntimeError(self.read_mode)
def _read_pvto_4(self, data: bytes, ndata: int) -> int:
"""
Reads PARAM cards
data = (
AUTOSPC, 3, YES,
GRDPNT, 1, 0,
K6ROT, 2, 100.0,
OUGCORD, 3, GLOBAL,
POST, 1, -1,
POST, 1, -2,
POSTEXT, 3, YES,
K6ROT, 2, 100.0,
OIBULK, 3, YES,
OMACHPR, 3, YES,
POSTEXT, 3, YES,
UNITSYS, 3, MN-MM)
"""
if self.read_mode == 2:
return ndata
iloc = self.f.tell()
try:
ndata2 = self._read_pvto_4_helper(data, ndata)
except (NotImplementedError, AssertionError) as error:
#raise # only for testing
if 'dev' in __version__ and self.IS_TESTING:
raise # only for testing
self.log.error(str(error))
log_exc(self.log)
self.f.seek(iloc)
ndata2 = ndata
if 'NXVER' in self.params and not self.is_nx:
self.set_as_nx()
self.log.debug('found PARAM,NXVER -> setting as NX')
return ndata2
def _read_pvto_4_helper(self, data: bytes, ndata: int) -> int:
"""reads PARAM cards"""
xword = (4 * self.factor)
nvalues = ndata // xword
assert ndata % xword == 0, ndata
if self.size == 4:
structi = self.struct_i
structf = Struct(b'f')
structs8 = self.struct_8s
#struct2s8 = Struct(b'4s8s')
#struct2i = self.struct_2i
struct2f = Struct(b'ff')
#struct2d = Struct(b'dd')
else:
structi = self.struct_q
structf = Struct(b'd')
#struct2i = self.struct_2q
structs8 = self.struct_16s
struct2f = Struct(b'dd')
i = 0
#print('---------------------------')
#self.show_data(data, types='ifsqL')
while i < nvalues:
#print('-----------------------------------------------------------')
#print('*i=%s nvalues=%s' % (i, nvalues))
istart = i*xword
#self.show_data(data[istart:istart+32], types='sqd')
#self.show_data(data[istart:istart+64], types='sqd')
if self.size == 4:
word = data[istart:(i+2)*xword].rstrip()
elif self.size == 8:
bword = data[istart:(i+2)*xword]
word = reshape_bytes_block(bword).rstrip()
else:
raise RuntimeError(self.size)
key = word.decode('latin1')
flag_data = data[(i+2)*xword:(i+3)*xword]
flag = structi.unpack(flag_data)[0]
# =1
# 4 INT I* =2
# 4 REAL RS* =3
# 4 BCD(2) CHAR4* =4
# 4 REALDBL RD
# =5 4 CMPLXS CS*
# =6 4 CMPLXD CD
# =7 4 LOGICAL LOGIC*
# ----------------
#wrong...
# 1 PARAM(2) CHAR4
# 3 I
# =1 4 INT I*
# =2 4 REAL RS*
# =3 4 BCD(2) CHAR4*
# =4 4 REALDBL RD
# =5 4 CMPLXS CS*
# =6 4 CMPLXD CD
# =7 4 LOGICAL LOGIC*
#print(f'word={word!r} flag={flag}')
#word = s8.unpack(word)[0]#.decode(self._encoding)
#if flag == 1:
#flag_str = 'int'
#elif flag == 2:
#flag_str = 'float'
#elif flag == 3:
#flag_str = 'str'
# the first two entries are typically trash, then we can get values
if flag == 1: # int
#self.show_data(data[i*xword:(i+4)*xword], types='isq', endian=None, force=False)
assert self.size in [4, 8], (key, self.size, flag)
#assert word in INT_PARAMS_1, f'word={word}'
slot = data[(i+3)*xword:(i+4)*xword]
i += 4
#slot = data[(i+4)*xword:(i+5)*xword]
#i += 5
value, = structi.unpack(slot)
values = [value]
elif flag == 2: # float
assert self.size in [4, 8], (key, self.size, flag)
slot = data[(i+3)*xword:(i+4)*xword]
value, = structf.unpack(slot)
values = [value]
#assert word in FLOAT_PARAMS_1, f'word={word}'
i += 4
elif flag == 3: # float / string
assert self.size in [4, 8], (key, self.size, flag)
#slot = data[(i+3)*xword:(i+4)*xword]
#i += 4
slot = data[(i+3)*xword:(i+5)*xword]
try:
bvalue, = structs8.unpack(slot)
if self.size == 8:
bvalue = reshape_bytes_block(bvalue)
value = bvalue.decode('latin1').rstrip()
if value:
if word == b'NXVER':
assert value.replace('.', '').isalnum(), f'{key} = {value!r}'
elif word == b'UNITSYS':
assert value.replace('-', '').isalnum(), f'{key} = {value!r}'
else:
assert value.isalnum(), f'{key} = {value!r}'
except AssertionError:
value, = structf.unpack(slot[4:])
values = [value]
if isinstance(value, str):
assert word in STR_PARAMS_1, f'word={word}'
else:
#if self.size == 4:
#self.show_data(data[istart:istart+20], types='sifqd')
#elif self.size == 8:
#self.show_data(data[istart:istart+40], types='sifqd')
assert word in FLOAT_PARAMS_1, f'float/str; word={word} value={value}'
i += 5
#elif flag == 3: # string
#assert self.size in [4, 8], (key, self.size, flag)
#slot = data[(i+3)*xword:(i+5)*xword]
##self.show_data(slot)
#assert word in STR_PARAMS_1, f'word={word}'
#i += 5
elif flag == 5: # CMPLXS CS - FLOAT_PARAMS_2
assert self.size in [4, 8], (key, self.size, flag)
slot = data[(i+3)*xword:(i+5)*xword]
#self.show_data(data[(i+3)*xword:(i+5)*xword], types='ifsqd', endian=None, force=False)
values = struct2f.unpack(slot)
values = list(values)
assert word in FLOAT_PARAMS_2, f'word={word}'
i += 5
elif flag == 7: # logical/int
assert self.size in [4, 8], (key, self.size, flag)
slot = data[(i+3)*xword:(i+4)*xword]
value, = structi.unpack(slot)
values = [value]
i += 4
else:
self.show_data(data[i*xword:], types='ifsqd', endian=None, force=False)
self.log.error('%r' % word)
raise NotImplementedError(f'{word!r} is not a supported PARAM; flag={flag}')
#i, value = self._old_pvto(word, data, i, xword,
#struct2i, struct2f, structs8)
param = PARAM(key, values, comment='')
self.params[key] = param
del key, values
#print(f'{key} ({flag}) = {value!r}')
#print(param.rstrip())
return nvalues
def _old_pvto(self, word: bytes, data: bytes, i: int, xword: int,
struct2i, struct2f, structs8) -> Tuple[int, Any]: # pragma: no cover
if word in INT_PARAMS_1:
slot = data[(i+2)*xword:(i+4)*xword]
value = struct2i.unpack(slot)[1]
i += 4
elif word in FLOAT_PARAMS_1:
slot = data[(i+2)*xword:(i+4)*xword]
value = struct2f.unpack(slot)[1]
i += 4
elif word in FLOAT_PARAMS_2:
slot = data[(i+3)*xword:(i+5)*xword]
value = struct2f.unpack(slot)
i += 5
elif word in INT_PARAMS_2:
slot = data[(i+3)*xword:(i+5)*xword]
value = struct2i.unpack(slot)
i += 5
#elif word in DOUBLE_PARAMS_1:
#slot = data[(i+1)*xword:(i+8)*xword]
#try:
#value = struct2d.unpack(slot)[1]
#except Exception:
#print(word)
#raise
#i += 8
#elif word in [b'VUHEXA']:
#self.show_data(data[i*4:(i+5)*4], types='ifs', endian=None)
#aaa
elif word in STR_PARAMS_1:
i += 3
slot = data[i*xword:(i+2)*xword]
bvalue = structs8.unpack(slot)[0]
if self.size == 8:
bvalue = reshape_bytes_block(bvalue)
value = bvalue.decode('ascii').rstrip()
i += 2
else:
if self.size == 4:
self.show_data(data[i*xword+12:i*4+i*4+12], types='ifs')
self.show_data(data[i*xword+8:(i+4)*4], types='ifs')
else:
self.show_data(data[i*xword+24:i*8+i*8+24], types='sdq')
self.show_data(data[i*xword+16:(i+4)*8], types='sdq')
#print(i*xword+24, i*8+i*8+24)
#print(i*xword+16, (i+4)*8)
self.log.error('%r' % word)
raise NotImplementedError(f'{word!r} is not a supported PARAM')
return i, value
def _not_available(self, data: bytes, ndata: int):
"""testing function"""
if ndata > 0:
raise RuntimeError('this should never be called...'
'table_name={self.table_name!r} len(data)={ndata}')
def _table_crasher(self, data: bytes, ndata: int):
"""auto-table crasher"""
if self.is_debug_file:
self.binary_debug.write(f' crashing table = {self.table_name}\n')
raise NotImplementedError(self.table_name)
return ndata
def _nx_table_passer(self, data, ndata: int):
"""auto-table skipper"""
self.to_nx(f' because table_name={self.table_name} was found')
self._table_passer(data, ndata)
def _table_passer(self, data, ndata: int):
"""auto-table skipper"""
if self.is_debug_file:
self.binary_debug.write(f' skipping table = {self.table_name}\n')
if self.table_name not in GEOM_TABLES and self.isubtable > -4:
self.log.warning(f' skipping table: {self.table_name_str}')
if not is_release and self.isubtable > -4:
if self.table_name in GEOM_TABLES and not self.make_geom:
pass
else:
print(f'dont skip table {self.table_name_str!r}')
raise RuntimeError(f'dont skip table {self.table_name_str!r}')
return ndata
def _validate_op2_filename(self, op2_filename: Optional[str]) -> str:
"""
Pops a GUI if the op2_filename hasn't been set.
Parameters
----------
op2_filename : str
the filename to check (None -> gui)
Returns
-------
op2_filename : str
a valid file string
"""
if op2_filename is None:
from pyNastran.utils.gui_io import load_file_dialog
wildcard_wx = "Nastran OP2 (*.op2)|*.op2|" \
"All files (*.*)|*.*"
wildcard_qt = "Nastran OP2 (*.op2);;All files (*)"
title = 'Please select a OP2 to load'
op2_filename, unused_wildcard_level = load_file_dialog(
title, wildcard_wx, wildcard_qt, dirname='')
assert op2_filename is not None, op2_filename
return op2_filename
def _create_binary_debug(self):
"""Instatiates the ``self.binary_debug`` variable/file"""
if hasattr(self, 'binary_debug') and self.binary_debug is not None:
self.binary_debug.close()
del self.binary_debug
self.is_debug_file, self.binary_debug = create_binary_debug(
self.op2_filename, self.debug_file, self.log)
def read_op2(self, op2_filename=None,
combine: bool=False,
load_as_h5: bool=False,
h5_file=None,
mode: Optional[str]=None) -> None:
"""
Starts the OP2 file reading
Parameters
----------
op2_filename : str
the op2 file
combine : bool; default=True
True : objects are isubcase based
False : objects are (isubcase, subtitle) based;
will be used for superelements regardless of the option
load_as_h5 : default=None
False : don't setup the h5_file
True : loads the op2 as an h5 file to save memory
stores the result.element/data attributes in h5 format
h5_file : h5File; default=None
None : ???
h5File : ???
+--------------+-----------------------+
| op2_filename | Description |
+--------------+-----------------------+
| None | a dialog is popped up |
+--------------+-----------------------+
| string | the path is used |
+--------------+-----------------------+
"""
fname = os.path.splitext(op2_filename)[0]
self.op2_filename = op2_filename
self.bdf_filename = fname + '.bdf'
self.f06_filename = fname + '.f06'
self.des_filename = fname + '.des'
self.h5_filename = fname + '.h5'
self.op2_reader.load_as_h5 = load_as_h5
if load_as_h5:
h5_file = None
import h5py
self.h5_file = h5py.File(self.h5_filename, 'w')
self.op2_reader.h5_file = self.h5_file
self._count = 0
if self.read_mode == 1:
#sr = list(self._results.saved)
#sr.sort()
#self.log.debug('_results.saved = %s' % str(sr))
#self.log.info('_results.saved = %s' % str(sr))
pass
if self.read_mode != 2:
op2_filename = self._validate_op2_filename(op2_filename)
self.log.info(f'op2_filename = {op2_filename!r}')
if not is_binary_file(op2_filename):
if os.path.getsize(op2_filename) == 0:
raise IOError(f'op2_filename={op2_filename!r} is empty.')
raise IOError(f'op2_filename={op2_filename!r} is not a binary OP2.')
self._create_binary_debug()
self._setup_op2()
_op2 = self.op2_reader.op2
#is_nasa_nastran = False
#if is_nasa_nastran:
#self.show(104, types='ifs', endian=None)
#self.show(52, types='ifs', endian=None)
#aa
#data = _op2.f.read(4)
#_op2.n += 8
#_op2.f.seek(_op2.n)
#else:
self.op2_reader.read_nastran_version(mode)
data = _op2.f.read(4)
_op2.f.seek(_op2.n)
if len(data) == 0:
raise FatalError('There was a Nastran FATAL Error. Check the F06.\n'
'No tables exist...check for a license issue')
#=================
table_name = self.op2_reader._read_table_name(rewind=True, stop_on_failure=False)
if table_name is None:
raise FatalError('There was a Nastran FATAL Error. Check the F06.\n'
'No tables exist...check for a license issue')
self._make_tables()
table_names = []
try:
self._read_tables(table_name, table_names)
except EmptyRecordError:
self.show(500, types='ifs', endian=None, force=False)
raise
op2_reader = self.op2_reader
op2_reader.read_markers([1, 0, 0, 0])
self.show(500, types='ifs', endian=None, force=False)
self._finish()
self.close_op2(force=False)
#self.remove_unpickable_data()
return table_names
def close_op2(self, force=True):
"""closes the OP2 and debug file"""
if self.is_debug_file:
self.binary_debug.write('-' * 80 + '\n')
self.binary_debug.write('f.tell()=%s\ndone...\n' % self.f.tell())
self.binary_debug.close()
if self._close_op2 or force:
if self.f is not None:
# can happen if:
# - is ascii file
self.f.close()
del self.binary_debug
del self.f
self._cleanup_data_members()
self._cleanup_words()
#self.op2_reader.h5_file.close()
def _cleanup_words(self):
"""
Remove internal parameters that are not useful and just clutter
the object attributes.
"""
words = [
'isubcase', 'int3', '_table4_count', 'nonlinear_factor',
'is_start_of_subtable', 'superelement_adaptivity_index',
'thermal_bits', 'is_vectorized', 'pval_step', #'_frequencies',
'_analysis_code_fmt', 'isubtable', '_data_factor', 'sort_method',
'acoustic_flag', 'approach_code', 'format_code_original',
'element_name', 'sort_bits', 'code', 'n', 'use_vector', 'ask',
'stress_bits', 'expected_times', 'table_code', 'sort_code',
'is_all_subcases', 'num_wide', '_table_mapper', 'label',
'apply_symmetry',
'words', 'device_code', 'table_name', '_count', 'additional_matrices',
# 350
'data_names', '_close_op2',
'op2_reader',
# 74
'generalized_tables',
# 124
'is_table_1', 'is_table_2', 'ntotal', 'element_mapper',
'is_debug_file', 'debug_file',
'_results', 'skip_undefined_matrices',
# 140
#---------------------------------------------------------
# dont remove...
# make_geom, title, read_mode
# result_names, op2_results
]
for word in words:
if hasattr(self, word):
delattr(self, word)
def _setup_op2(self):
"""
Does preliminary op2 tasks like:
- open the file
- set the endian
- preallocate some struct objects
"""
#: file index
self.n = 0
self.table_name = None
if not hasattr(self, 'f') or self.f is None:
#: the OP2 file object
op2_filename = self.op2_filename
self.f = open(op2_filename, 'rb')
#: the endian in bytes
self._endian = None
#: the endian in unicode
self._uendian = None
flag_data = self.f.read(20)
self.f.seek(0)
#(4, 3, 4, 24) ???
#(8, 3, 0, 8, 24)
little_data = unpack(b'<5i', flag_data)
big_data = unpack(b'>5i', flag_data)
if big_data[0] in [4, 8]:
self._uendian = '>'
self._endian = b'>'
size = big_data[0]
elif little_data[0] in [4, 8]:
self._uendian = '<'
self._endian = b'<'
size = little_data[0]
#elif unpack(b'<ii', flag_data)[0] == 4:
#self._endian = b'<'
else:
# Matrices from test show
# (24, 10, 10, 6, 2) before the Matrix Name...
print(little_data, big_data)
self.show(30, types='ifs', endian='<')
self.show(30, types='ifs', endian='>')
self.show(12, types='ifs', endian='<')
self.show(12, types='ifs', endian='>')
#self.show_data(flag_data, types='iqlfsld', endian='<')
#print('----------')
#self.show_data(flag_data, types='iqlfsld', endian='>')
raise FatalError('cannot determine endian')
else:
self.op2_reader._goto(self.n)
if self.read_mode == 1:
self._set_structs(size)
def _make_tables(self):
return
#global RESULT_TABLES, NX_RESULT_TABLES, MSC_RESULT_TABLES
#table_mapper = self._get_table_mapper()
#RESULT_TABLES = table_mapper.keys()
def _read_tables(self, table_name: bytes, table_names: List[bytes]) -> None:
"""
Reads all the geometry/result tables.
The OP2 header is not read by this function.
Parameters
----------
table_name : bytes str
the first table's name
table_names : List[bytes str]
the table names that were read
"""
op2_reader = self.op2_reader
self.table_count = defaultdict(int)
while table_name is not None:
self.table_count[table_name] += 1
table_names.append(table_name)
if self.is_debug_file:
self.binary_debug.write('-' * 80 + '\n')
self.binary_debug.write(f'table_name = {table_name!r}\n')
if is_release:
self.log.debug(f' table_name={table_name!r}')
self.table_name = table_name
#if 0:
#op2_reader._skip_table(table_name)
#else:
#print(table_name, table_name in op2_reader.mapped_tables)
if table_name in self.generalized_tables:
t0 = self.f.tell()
self.generalized_tables[table_name](self)
assert self.f.tell() != t0, 'the position was unchanged...'
elif table_name in op2_reader.mapped_tables:
t0 = self.f.tell()
op2_reader.mapped_tables[table_name]()
assert self.f.tell() != t0, 'the position was unchanged...'
elif table_name in GEOM_TABLES:
op2_reader.read_geom_table() # DIT (agard)
elif table_name in MATRIX_TABLES:
op2_reader.read_matrix(table_name)
elif table_name in RESULT_TABLES:
op2_reader.read_results_table()
elif self.skip_undefined_matrices:
op2_reader.read_matrix(table_name)
elif table_name.strip() in self.additional_matrices:
op2_reader.read_matrix(table_name)
else:
#self.show(1000, types='ifsq')
msg = (
f'Invalid Table = {table_name!r}\n\n'
'If you have matrices that you want to read, see:\n'
' model.set_additional_matrices_to_read(matrices)\n'
' matrices = {\n'
" b'BHH' : True,\n"
" b'KHH' : False,\n"
' } # you want to read some matrices, but not others\n'
" matrices = [b'BHH', b'KHH'] # assumes True\n\n"
'If you the table is a geom/result table, see:\n'
' model.set_additional_result_tables_to_read(methods_dict)\n'
" methods_dict = {\n"
" b'OUGV1' : [method3, method4],\n"
" b'GEOM4SX' : [method3, method4],\n"
" b'OES1X1' : False,\n"
' }\n\n'
'If you want to take control of the OP2 reader (mainly useful '
'for obscure tables), see:\n'
" methods_dict = {\n"
" b'OUGV1' : [method],\n"
' }\n'
' model.set_additional_generalized_tables_to_read(methods_dict)\n'
)
raise NotImplementedError(msg)
table_name = op2_reader._read_table_name(last_table_name=table_name,
rewind=True, stop_on_failure=False)
def set_additional_generalized_tables_to_read(self, tables):
"""
Adds methods to call a generalized table.
Everything is left to the user.
::
def read_some_table(self):
# read the data from self.f
pass
# let's overwrite the existing OP2 table
model2 = OP2Geom(debug=True)
generalized_tables = {
b'GEOM1S' : read_some_table,
}
model.set_additional_generalized_tables_to_read(generalized_tables)
"""
self._update_generalized_tables(tables)
self.generalized_tables = tables
def set_additional_result_tables_to_read(self, tables):
"""
Adds methods to read additional result tables.
This is expected to really only be used for skipping
unsupported tables or disabling enabled tables that are
buggy (e.g., OUGV1).
Parameters
----------
tables : Dict[bytes] = varies
a dictionary of key=name, value=list[method3, method4]/False,
False : skips a table
applies self._table_passer to method3 and method4
method3 : function
function to read table 3 results (e.g., metadata)
method4 : function
function to read table 4 results (e.g., the actual results)
"""
self._update_generalized_tables(tables)
table_mapper = self._get_table_mapper()
#is_added = False
def func():
"""overloaded version of _get_table_mapper"""
#if is_added:
#return table_mapper
for _key, methods in tables.items():
if methods is False:
table_mapper[_key] = [self._table_passer, self._table_passer]
else:
assert len(methods) == 2, methods
table_mapper[_key] = methods
#is_added = True
return table_mapper
self._get_table_mapper = func
def _update_generalized_tables(self, tables):
"""
helper function for:
- set_additional_generalized_tables_to_read
- set_additional_result_tables_to_read
"""
global NX_RESULT_TABLES
global MSC_RESULT_TABLES
global RESULT_TABLES
failed_keys = []
keys = list(tables.keys())
for _key in keys:
if not isinstance(_key, bytes):
failed_keys.append(_key)
if hasattr(self, 'is_nx') and self.is_nx:
NX_RESULT_TABLES.append(_key)
else:
MSC_RESULT_TABLES.append(_key)
if failed_keys:
failed_keys_str = [str(_key) for _key in failed_keys]
raise TypeError('[%s] must be bytes' % ', '. join(failed_keys_str))
RESULT_TABLES = NX_RESULT_TABLES + MSC_RESULT_TABLES
#RESULT_TABLES.sort()
#assert 'OESXRMS1' in RESULT_TABLES, RESULT_TABLES
def set_additional_matrices_to_read(self, matrices: Union[List[str], Dict[str, bool]]):
"""
Matrices (e.g., KHH) can be sparse or dense.
Parameters
----------
matrices : List[str]; Dict[str] = bool
List[str]:
simplified method to add matrices; value will be True
Dict[str] = bool:
a dictionary of key=name, value=True/False,
where True/False indicates the matrix should be read
.. note:: If you use an already defined table (e.g. KHH), it
will be ignored. If the table you requested doesn't
exist, there will be no effect.
.. note:: Do not use this for result tables like OUGV1, which
store results like displacement. Those are not matrices.
Matrices are things like DMIGs.
"""
if isinstance(matrices, list):
matrices2 = {}
for matrix in matrices:
assert isinstance(matrix, str), 'matrix=%r' % str(matrix)
matrices2[matrix] = True
matrices = matrices2
self.additional_matrices = matrices
self.additional_matrices = {}
for matrix_name, matrix in matrices.items():
if isinstance(matrix_name, bytes):
self.additional_matrices[matrix_name] = matrix
else:
self.additional_matrices[matrix_name.encode('latin1')] = matrix
def _finish(self):
"""
Clears out the data members contained within the self.words variable.
This prevents mixups when working on the next table, but otherwise
has no effect.
"""
for word in self.words:
if word != '???' and hasattr(self, word):
if word not in ['Title', 'reference_point']:
delattr(self, word)
self.obj = None
if hasattr(self, 'subtable_name'):
del self.subtable_name
def _read_psdf_3(self, data: bytes, ndata: int):
"""reads the PSDF table"""
#(50, 2011, 4001, 0, 302130, 3
# strip off the title
unused_three = self.parse_approach_code(data)
self.words = [
'approach_code', 'table_code', '???', 'isubcase',
'???', '???', '???', 'random_code',
'format_code', 'num_wide', '???', '???',
'acoustic_flag', '???', '???', '???',
'???', '???', '???', '???',
'???', '???', 'thermal', '???',
'???', 'Title', 'subtitle', 'label'
]
## random code
self.random_code = self.add_data_parameter(data, 'random_code', b'i', 8, False)
self._read_title(data)
# simplifying to see the data better
del self.data_code['title']
del self.data_code['label']
del self.data_code['subtitle']
del self.data_code['subtitle_original']
del self.data_code['superelement_adaptivity_index']
#del self.data_code['pval_step']
del self.data_code['table_name']
del self.data_code['_encoding']
del self.data_code['load_as_h5']
del self.data_code['h5_file']
del self.data_code['is_msc']
#del self.data_code['is_nasa95']
del self.data_code['pval_step']
# wrong
del self.data_code['isubcase']
#del self.data_code['random_code']
#del self.data_code['sort_bits']
#del self.data_code['device_code']
#del self.data_code['sort_code']
#del self.data_code['sort_method']
#print(self.data_code)
#aaa
#self.reader_oug._read_oug1_3(data, ndata)
if self.read_mode == 1:
return ndata
# just stripping off title
#self.show_data(data[:200], types='if')
# stripping off zeros
#self.show_data(data[:52], types='ifs')
#self.show_data(data[:40], types='if')
approach_code, tcode, int3, frame_id, int5, dof, float7, rms_value, float9, int10, stress_strain_flag = unpack(
self._endian + b'6i 3f 2i', data[:44])
self.stress_strain_flag = stress_strain_flag
ints = np.frombuffer(data[:200], dtype=self.idtype)
if ints[11:].max() > 0:
self.log.warning(f'ints11 = {ints[11:].tolist()}')
node = int5 // 10
#dof = int5 % 10
#from pyNastran.op2.op2_interface.op2_codes import TABLE_CODE_MAP
#title = self.title
#subtitle = self.subtitle
#label = self.label
#approach_code={iapproach_code} tcode={tcode} table_code={self.table_code}
#print(f'analysis_code={self.analysis_code} '
#print(f'title={title!r} subtitle={subtitle!r} label={label!r}')
if (self.analysis_code, self.table_code, self.stress_strain_flag) == (5, 1, 0):
word = 'displacements'
elif (self.analysis_code, self.table_code, self.stress_strain_flag) == (5, 2, 0):
word = 'load_vectors'
elif (self.analysis_code, self.table_code, self.stress_strain_flag) == (5, 3, 0):
word = 'spc_forces'
elif (self.analysis_code, self.table_code, self.stress_strain_flag) == (5, 4, 0):
word = 'force'
elif (self.analysis_code, self.table_code, self.stress_strain_flag) == (5, 5, 0):
word = 'stress'
elif (self.analysis_code, self.table_code, self.stress_strain_flag) == (5, 5, 2):
word = 'strain'
elif (self.analysis_code, self.table_code, self.stress_strain_flag) == (5, 10, 0):
word = 'velocities'
elif (self.analysis_code, self.table_code, self.stress_strain_flag) == (5, 11, 0):
word = 'accelerations'
else: # pragma: no cover
#print(f'table_code={self.table_code} table={TABLE_CODE_MAP[self.table_code]!r}')
print(f'analysis_code={self.analysis_code} approach_code={approach_code} tcode={tcode} table_code={self.table_code} '
f'int3={int3} frame_id={frame_id} node={node} dof={dof} '
f'float7={float7} rms_value={rms_value:.5e} float9={float9:.4e} int10={int10} stress_strain_flag={stress_strain_flag}')
raise NotImplementedError(f'analysis_code={self.analysis_code} '
f'table_code={self.table_code} '
f'stress_strain_flag={self.stress_strain_flag} is not supported')
self.node = node
self.dof = dof
self.word = word
return ndata
#self.show_data(data, types='ifs', endian=None)
#aaaa
def _read_psdf_4(self, data: bytes, ndata: int):
"""reads the PSDF table"""
if self.read_mode == 1:
return ndata
#self.show_data(data[:100], types='ifs', endian=None)
data2 = np.frombuffer(data, dtype=self.fdtype)
ndata = len(data2)
nfreqs = ndata // 2
data2 = data2.reshape(nfreqs, 2)
#last2 = data2[-2:, 1]
#self.log.warning(f'skipping PSDF; nfreqs={nfreqs} [{last2[0]:.6e},{last2[1]:.6e}] '
#f'ymin={data2[:,1].min():.6e} ymax={data2[:,1].max():.6e}') # {self.data_code}
# self.show_data(), self._read_psdf_4
key = (self.label, self.node, self.dof)
slot = getattr(self.op2_results.psds, self.word)
assert key not in slot, slot
slot[key] = data2
del self.node
del self.dof
del self.word
def main(): # pragma: no cover
"""testing pickling"""
from pickle import dump, load
txt_filename = 'solid_shell_bar.txt'
pickle_file = open(txt_filename, 'wb')
op2_filename = 'solid_shell_bar.op2'
op2 = OP2_Scalar()
op2.read_op2(op2_filename)
#print(op2.displacements[1])
dump(op2, pickle_file)
pickle_file.close()
pickle_file = open(txt_filename, 'r')
op2 = load(pickle_file)
pickle_file.close()
#print(op2.displacements[1])
#import sys
#op2_filename = sys.argv[1]
#o = OP2_Scalar()
#o.read_op2(op2_filename)
#(model, ext) = os.path.splitext(op2_filename)
#f06_outname = model + '.test_op2.f06'
#o.write_f06(f06_outname)
def create_binary_debug(op2_filename: str, debug_file: str, log) -> Tuple[bool, Any]:
"""helper method"""
binary_debug = None
if debug_file is not None:
#: an ASCII version of the op2 (creates lots of output)
log.debug('debug_file = %s' % debug_file)
binary_debug = open(debug_file, 'w')
binary_debug.write(op2_filename + '\n')
is_debug_file = True
else:
is_debug_file = False
return is_debug_file, binary_debug
if __name__ == '__main__': # pragma: no cover
main()
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,654
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/bdf/cards/constraints.py
|
"""
All constraint cards are defined in this file. This includes:
* Constraint
* SUPORT
* SUPORT1
* SPC
* SPC1
* SPCAX
* MPC
* GMSPC
* ConstraintADD
* SPCADD
* MPCADD
The ConstraintObject contain multiple constraints.
"""
from __future__ import annotations
from itertools import count
from typing import TYPE_CHECKING
from pyNastran.utils.numpy_utils import integer_types
from pyNastran.bdf import MAX_INT
from pyNastran.bdf.cards.base_card import BaseCard, _node_ids, expand_thru
from pyNastran.bdf.bdf_interface.assign_type import (
integer, integer_or_blank, double, double_or_blank, parse_components,
components_or_blank, string)
from pyNastran.bdf.field_writer_8 import print_card_8, print_float_8
from pyNastran.bdf.field_writer_16 import print_float_16, print_card_16
from pyNastran.bdf.field_writer_double import print_scientific_double
if TYPE_CHECKING: # pragma: no cover
from pyNastran.bdf.bdf import BDF
class Constraint(BaseCard):
"""
common class for:
- SUPORT / SUPORT1 / SESUP
- GMSPC
- MPC
- SPC / SPC1
- SPCAX
- SPCOFF / SPCOFF1
"""
def __init__(self):
pass
def _node_ids(self, nodes=None, allow_empty_nodes=False, msg=''):
"""returns nodeIDs for repr functions"""
return _node_ids(self, nodes, allow_empty_nodes, msg)
class SUPORT1(Constraint):
"""
Defines determinate reaction degrees-of-freedom (r-set) in a free
body-analysis. SUPORT1 must be requested by the SUPORT1 Case
Control command.
+---------+-----+-----+----+-----+----+-----+----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
+=========+=====+=====+====+=====+====+=====+====+
| SUPORT1 | SID | ID1 | C1 | ID2 | C2 | ID3 | C3 |
+---------+-----+-----+----+-----+----+-----+----+
| SUPORT1 | 1 | 2 | 23 | 4 | 15 | 5 | 0 |
+---------+-----+-----+----+-----+----+-----+----+
"""
type = 'SUPORT1'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
conid = 1
nodes = [1]
Cs = ['123']
return SUPORT1(conid, nodes, Cs, comment='')
def __init__(self, conid, nodes, Cs, comment=''):
"""
Creates a SUPORT card, which defines free-body reaction points.
Parameters
----------
conid : int
Case Control SUPORT id
nodes : List[int]
the nodes to release
Cs : List[str]
compoents to support at each node
comment : str; default=''
a comment for the card
"""
Constraint.__init__(self)
if comment:
self.comment = comment
self.conid = conid
self.nodes = nodes
self.Cs = Cs
assert len(self.nodes) > 0
assert len(self.nodes) == len(self.Cs)
self.nodes_ref = None
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a SUPORT1 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
conid = integer(card, 1, 'conid') # really a support id sid
nfields = len(card)
assert len(card) > 2
nterms = int((nfields - 1.) / 2.)
n = 1
nodes = []
Cs = []
for i in range(nterms):
nstart = 2 + 2 * i
nid = integer(card, nstart, 'ID%s' % n)
C = components_or_blank(card, nstart + 1, 'component%s' % n, '0')
nodes.append(nid)
Cs.append(C)
n += 1
return SUPORT1(conid, nodes, Cs, comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
"""
Adds a SUPORT1 card from the OP2
Parameters
----------
data : List[varies]
a list of fields defined in OP2 format
comment : str; default=''
a comment for the card
"""
conid = data[0]
assert (len(data) - 1) % 2 == 0, data
nodes = []
Cs = []
for i in range(1, len(data), 2):
nid = data[i]
C = data[i+1]
nodes.append(nid)
Cs.append(C)
return SUPORT1(conid, nodes, Cs, comment=comment)
def add_suport1_to_set(self, suport1):
assert self.conid == suport1.conid, 'SUPORT1 conid=%s new_conid=%s; they must be the same' % (self.conid, suport1.conid)
comment = self.comment + suport1.comment
if comment:
self.comment = comment
self.nodes += suport1.nodes
self.Cs += suport1.Cs
@property
def node_ids(self):
msg = ', which is required by SUPORT1'
if self.nodes_ref is None:
return self.nodes
return self._node_ids(nodes=self.nodes_ref, allow_empty_nodes=True, msg=msg)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by SUPORT1'
self.nodes_ref = model.EmptyNodes(self.nodes, msg=msg)
def safe_cross_reference(self, model: BDF, debug=True):
nids2 = []
msg = ', which is required by SUPORT1=%s' % self.conid
for nid in self.nodes:
try:
nid2 = model.Node(nid, msg=msg)
except KeyError:
if debug:
msg = 'Couldnt find nid=%i, which is required by SUPORT1=%s' % (
nid, self.conid)
model.log.warning(msg)
continue
nids2.append(nid2)
self.nodes_ref = nids2
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.nodes = self.node_ids
self.nodes_ref = None
def raw_fields(self):
fields = ['SUPORT1', self.conid]
for nid, c in zip(self.node_ids, self.Cs):
fields += [nid, c]
return fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.raw_fields()
return self.comment + print_card_8(card)
class SUPORT(Constraint):
"""
Defines determinate reaction degrees-of-freedom in a free body.
+---------+-----+-----+-----+-----+-----+-----+-----+----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+=====+=====+=====+=====+=====+=====+=====+====+
| SUPORT | ID1 | C1 | ID2 | C2 | ID3 | C3 | ID4 | C4 |
+---------+-----+-----+-----+-----+-----+-----+-----+----+
"""
type = 'SUPORT'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
nodes = [1, 2]
components = ['123', '456']
return SUPORT(nodes, components, comment='')
def __init__(self, nodes, Cs, comment=''):
"""
Creates a SUPORT card, which defines free-body reaction points.
This is always active.
Parameters
----------
nodes : List[int]
the nodes to release
Cs : List[str]
compoents to support at each node
comment : str; default=''
a comment for the card
"""
Constraint.__init__(self)
if comment:
self.comment = comment
self.nodes = nodes
self.Cs = []
for ci in Cs:
if isinstance(ci, integer_types):
ci = str(ci)
self.Cs.append(ci)
self.nodes_ref = None
def validate(self):
assert len(self.nodes) > 0
assert len(self.nodes) == len(self.Cs)
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a SUPORT card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
nfields = len(card)
assert len(card) > 1, card
nterms = int(nfields / 2.)
n = 1
nodes = []
components = []
for i in range(nterms):
nstart = 1 + 2 * i
nid = integer(card, nstart, 'ID%s' % n)
componentsi = components_or_blank(card, nstart + 1, 'component%s' % n, '0')
nodes.append(nid)
components.append(componentsi)
n += 1
return SUPORT(nodes, components, comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
"""
Adds a SUPORT card from the OP2
Parameters
----------
data : List[varies]
a list of fields defined in OP2 format
comment : str; default=''
a comment for the card
"""
fields = data
nodes = []
components = []
for i in range(0, len(fields), 2):
nid, comp = fields[i:i+2]
assert nid > 0, nid
nodes.append(nid)
components.append(comp)
return SUPORT(nodes, components, comment=comment)
@property
def node_ids(self):
msg = ', which is required by SUPORT'
if self.nodes_ref is None:
return self.nodes
return self._node_ids(nodes=self.nodes_ref, allow_empty_nodes=True, msg=msg)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by SUPORT'
self.nodes_ref = model.EmptyNodes(self.nodes, msg=msg)
def safe_cross_reference(self, model: BDF, debug=True):
nids2 = []
msg = ', which is required by SUPORT'
for nid in self.nodes:
try:
nid2 = model.Node(nid, msg=msg)
except KeyError:
if debug:
msg = 'Couldnt find nid=%i, which is required by SUPORT' % nid
model.log.warning(msg)
continue
nids2.append(nid2)
self.nodes_ref = nids2
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.nodes = self.node_ids
self.nodes_ref = None
def raw_fields(self):
fields = [self.type]
for nid, c in zip(self.node_ids, self.Cs):
fields += [nid, c]
return fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.raw_fields()
if max(self.node_ids) > MAX_INT:
return self.comment + print_card_16(card)
return self.comment + print_card_8(card)
class SESUP(SUPORT):
type = 'SESUP'
@classmethod
def _init_from_empty(cls):
nodes = [1, 2]
Cs = ['1', '2']
return SESUP(nodes, Cs, comment='')
def __init__(self, nodes, Cs, comment=''):
SUPORT.__init__(self, nodes, Cs, comment='')
class MPC(Constraint):
"""
Multipoint Constraint
Defines a multipoint constraint equation of the form:
sum(A_j * u_j) = 0
where:
uj represents degree-of-freedom Cj at grid or scalar point Gj.
Aj represents the scale factor
+-----+-----+----+----+-----+----+----+----+-----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=====+=====+====+====+=====+====+====+====+=====+
| MPC | SID | G1 | C1 | A1 | G2 | C2 | A2 | |
+-----+-----+----+----+-----+----+----+----+-----+
| | G3 | C3 | A3 | ... | | | | |
+-----+-----+----+----+-----+----+----+----+-----+
"""
type = 'MPC'
#'constraints', 'enforced', 'gids_ref', 'gids'
_properties = ['node_ids', ]
@classmethod
def _init_from_empty(cls):
conid = 1
nodes = [1]
components = ['1']
coefficients = [1.]
return MPC(conid, nodes, components, coefficients)
def __init__(self, conid: int, nodes: List[int], components: List[str],
coefficients: List[float], comment: str=''):
"""
Creates an MPC card
Parameters
----------
conid : int
Case Control MPC id
nodes : List[int]
GRID/SPOINT ids
components : List[str]
the degree of freedoms to constrain (e.g., '1', '123')
coefficients : List[float]
the scaling coefficients
"""
Constraint.__init__(self)
if comment:
self.comment = comment
#: Set identification number. (Integer > 0)
self.conid = conid
#: Identification number of grid or scalar point. (Integer > 0)
self.nodes = nodes
#: Component number. (Any one of the Integers 1 through 6 for grid
#: points; blank or zero for scalar points.)
self.components = components
#: Coefficient. (Real; Default = 0.0 except A1 must be nonzero.)
self.coefficients = coefficients
self.nodes_ref = None
def validate(self):
assert isinstance(self.nodes, list), type(self.nodes)
assert isinstance(self.components, list), type(self.components)
assert isinstance(self.coefficients, list), type(self.coefficients)
assert len(self.nodes) == len(self.components)
assert len(self.nodes) == len(self.coefficients)
for nid, comp, coefficient in zip(self.nodes, self.components, self.coefficients):
assert isinstance(nid, integer_types), self.nodes
assert isinstance(comp, str), self.components
assert isinstance(coefficient, float), self.coefficients
@classmethod
def add_card(cls, card, comment=''):
"""
Adds an MPC card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
conid = integer(card, 1, 'conid')
nodes = []
components = []
coefficients = []
fields = card.fields(0)
nfields = len(fields)
i = 1
for ifield in range(2, nfields, 8):
nid = integer(card, ifield, 'G%i' % i)
component = components_or_blank(card, ifield + 1, 'constraint%i' % i, '0') # scalar point
if i == 1:
coefficient = double(card, ifield + 2, 'coefficient%i' % i)
if coefficient == 0.0:
raise RuntimeError('coefficient1 must be nonzero; coefficient=%r' % coefficient)
else:
coefficient = double_or_blank(card, ifield + 2, 'coefficient%i' % i, 0.0)
nodes.append(nid)
components.append(component)
coefficients.append(coefficient)
i += 1
if ifield + 4 > nfields and i != 2:
# if G2 is empty (it's ifield+4 because nfields is length based
# and not loop friendly)
break
nid = integer(card, ifield + 3, 'G%i' % i)
component = components_or_blank(card, ifield + 4, 'constraint%i' % i, '0') # scalar point
coefficient = double_or_blank(card, ifield + 5, 'coefficient%i' % i, 0.0)
nodes.append(nid)
components.append(component)
coefficients.append(coefficient)
i += 1
return MPC(conid, nodes, components, coefficients, comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
"""
Adds an MPC card from the OP2
Parameters
----------
data : List[varies]
a list of fields defined in OP2 format
comment : str; default=''
a comment for the card
"""
#msg = 'MPC has not implemented data parsing'
conid = data[0]
nodes = data[1]
components = [str(component) for component in data[2]]
enforced = data[3]
return MPC(conid, nodes, components, enforced, comment=comment)
@property
def independent_dofs(self) -> Tuple[List[int], List[int]]:
"""The first degree-of-freedom (G1, C1) in the sequence is defined to be the
dependent degree-of-freedom. A dependent degree-of-freedom assigned by
one MPC entry cannot be assigned dependent by another MPC entry or by a
rigid element."""
nodes = self.nodes[1:]
components = self.components[1:]
if isinstance(nodes, integer_types):
return [nodes], [components]
return nodes, components
@property
def dependent_dofs(self) -> Tuple[List[int], List[int]]:
"""The first degree-of-freedom (G1, C1) in the sequence is defined to be the
dependent degree-of-freedom. A dependent degree-of-freedom assigned by
one MPC entry cannot be assigned dependent by another MPC entry or by a
rigid element."""
return [self.nodes[0]], [self.components[0]]
@property
def independent_nodes(self) -> List[int]:
"""The first degree-of-freedom (G1, C1) in the sequence is defined to be the
dependent degree-of-freedom. A dependent degree-of-freedom assigned by
one MPC entry cannot be assigned dependent by another MPC entry or by a
rigid element."""
nodes = self.nodes[1:]
if isinstance(nodes, integer_types):
return [nodes]
return nodes
@property
def dependent_nodes(self) -> List[int]:
"""The first degree-of-freedom (G1, C1) in the sequence is defined to be the
dependent degree-of-freedom. A dependent degree-of-freedom assigned by
one MPC entry cannot be assigned dependent by another MPC entry or by a
rigid element."""
return [self.nodes[0]]
@property
def node_ids(self):
if self.nodes_ref is None:
return self.nodes
msg = ', which is required by MPC=%s' % self.conid
return self._node_ids(nodes=self.nodes_ref, allow_empty_nodes=True, msg=msg)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by MPC=%s' % self.conid
self.nodes_ref = model.EmptyNodes(self.nodes, msg=msg)
def safe_cross_reference(self, model: BDF, debug=True):
nids2 = []
msg = ', which is required by SPC=%s' % self.conid
for nid in self.nodes:
try:
nid2 = model.Node(nid, msg=msg)
except KeyError:
if debug:
msg = 'Couldnt find nid=%i, which is required by SPC=%s' % (
nid, self.conid)
model.log.warning(msg)
continue
nids2.append(nid2)
self.nodes_ref = nids2
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.nodes = self.node_ids
self.nodes_ref = None
def raw_fields(self): # MPC
fields = ['MPC', self.conid]
for i, gid, component, coefficient in zip(count(), self.node_ids, self.components, self.coefficients):
fields += [gid, component, coefficient]
if i % 2 == 1 and i > 0:
fields.append(None)
fields.append(None)
return fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
"""see BaseCard.write_card``"""
if size == 8:
return self.write_card_8()
return self.write_card_16(is_double)
def write_card_8(self):
msg = 'MPC %8d' % self.conid
grids, components, coefficients = self.node_ids, self.components, self.coefficients
for i, grid, component, coefficient in zip(count(), grids, components, coefficients):
msg += '%8d%8s%8s' % (grid, component, print_float_8(coefficient))
if i % 2 == 1 and i > 0:
msg += '\n%8s%8s' % ('', '')
return self.comment + msg.rstrip() + '\n'
def write_card_16(self, is_double=False):
msg = 'MPC* %16d' % self.conid
grids, components, coefficients = self.node_ids, self.components, self.coefficients
if is_double:
for i, grid, component, coefficient in zip(count(), grids, components, coefficients):
if i == 0:
msg += '%16d%16s%16s\n' % (
grid, component, print_scientific_double(coefficient))
elif i % 2 == 1:
msg += '%-8s%16d%16s%16s\n' % (
'*', grid, component, print_scientific_double(coefficient))
else:
msg += '%-8s%16s%16d%16s%16s\n' % (
'*', '', grid, component, print_scientific_double(coefficient))
else:
for i, grid, component, coefficient in zip(count(), grids, components, coefficients):
if i == 0:
msg += '%16d%16s%16s\n' % (grid, component, print_float_16(coefficient))
elif i % 2 == 1:
msg += '%-8s%16d%16s%16s\n' % (
'*', grid, component, print_float_16(coefficient))
else:
msg += '%-8s%16s%16d%16s%16s\n' % (
'*', '', grid, component, print_float_16(coefficient))
if i % 2 == 0:
msg += '*'
return self.comment + msg.rstrip() + '\n'
class SPC(Constraint):
"""
Defines enforced displacement/temperature (static analysis)
velocity/acceleration (dynamic analysis).
+-----+-----+----+----+------+----+----+----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
+=====+=====+====+====+======+====+====+====+
| SPC | SID | G1 | C1 | D1 | G2 | C2 | D2 |
+-----+-----+----+----+------+----+----+----+
| SPC | 2 | 32 | 3 | -2.6 | 5 | | |
+-----+-----+----+----+------+----+----+----+
"""
type = 'SPC'
_properties = ['node_ids', 'constraints', 'gids_ref', 'gids']
@classmethod
def _init_from_empty(cls):
conid = 1
nodes = [1, 2]
components = ['123', '456']
enforced = [0., 0.]
return SPC(conid, nodes, components, enforced, comment='')
def __init__(self, conid: int,
nodes: List[int],
components: List[str],
enforced: List[float], comment: str=''):
"""
Creates an SPC card, which defines the degree of freedoms to be
constrained
Parameters
----------
conid : int
constraint id
nodes : List[int]
GRID/SPOINT ids
components : List[str]
the degree of freedoms to constrain (e.g., '1', '123')
enforced : List[float]
the constrained value for the given node (typically 0.0)
comment : str; default=''
a comment for the card
.. note:: len(nodes) == len(components) == len(enforced)
.. warning:: non-zero enforced deflection requires an SPCD as well
"""
Constraint.__init__(self)
if comment:
self.comment = comment
if isinstance(nodes, int):
nodes = [nodes]
if isinstance(components, str):
components = [components]
elif isinstance(components, int):
components = [str(components)]
if isinstance(enforced, float):
enforced = [enforced]
self.conid = conid
self.nodes = nodes
self.components = components
self.enforced = enforced
self.nodes_ref = None
def validate(self):
assert isinstance(self.nodes, list), self.nodes
assert isinstance(self.components, list), self.components
assert isinstance(self.enforced, list), self.enforced
assert len(self.nodes) == len(self.components), 'len(self.nodes)=%s len(self.components)=%s' % (len(self.nodes), len(self.components))
assert len(self.nodes) == len(self.enforced), 'len(self.nodes)=%s len(self.enforced)=%s' % (len(self.nodes), len(self.enforced))
for nid, comp, enforcedi in zip(self.nodes, self.components, self.enforced):
assert isinstance(nid, integer_types), self.nodes
assert isinstance(comp, str), self.components
assert isinstance(enforcedi, float), self.enforced
@classmethod
def add_card(cls, card, comment=''):
"""
Adds an SPC card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
conid = integer(card, 1, 'sid')
if card.field(5) in [None, '']:
nodes = [integer(card, 2, 'G1'),]
components = [components_or_blank(card, 3, 'C1', '0')]
enforced = [double_or_blank(card, 4, 'D1', 0.0)]
else:
nodes = [
integer(card, 2, 'G1'),
integer_or_blank(card, 5, 'G2'),
]
# :0 if scalar point 1-6 if grid
components = [components_or_blank(card, 3, 'C1', '0'),
components_or_blank(card, 6, 'C2', '0')]
enforced = [double_or_blank(card, 4, 'D1', 0.0),
double_or_blank(card, 7, 'D2', 0.0)]
return SPC(conid, nodes, components, enforced, comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
"""
Adds an SPC card from the OP2
Parameters
----------
data : List[varies]
a list of fields defined in OP2 format
comment : str; default=''
a comment for the card
"""
spc_id = data[0]
nodes = [data[1]]
components_str = sorted(str(data[2]))
components = int(''.join(components_str))
assert spc_id > 0, data
for i, nid in enumerate(nodes):
assert nodes[0] > 0, f'nodes={nodes} nodes[{i}]={nid}; data={data}'
assert 0 <= components <= 123456, data
enforced = [data[3]]
assert spc_id > 0, data
assert nodes[0] > 0, data
components_str = str(components)
assert len(components_str) <= 6, data
components = [components_str]
#if components[0] == 0:
#components[0] = 0
#if components[0] == 16:
#components[0] = '16'
#else:
#raise RuntimeError('SPC; components=%s data=%s' % (components, data))
#assert 0 < components[0] > 1000, data
return SPC(spc_id, nodes, components, enforced, comment=comment)
@property
def constraints(self):
return self.components
@constraints.setter
def constraints(self, constraints):
self.components = constraints
@property
def node_ids(self):
if self.nodes_ref is None:
return self.nodes
msg = ', which is required by SPC=%s' % (self.conid)
return self._node_ids(nodes=self.nodes_ref, allow_empty_nodes=True, msg=msg)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by SPC=%s' % (self.conid)
self.nodes_ref = model.EmptyNodes(self.nodes, msg=msg)
def safe_cross_reference(self, model: BDF, debug=True):
nids2 = []
msg = ', which is required by SPC=%s' % self.conid
for nid in self.node_ids:
try:
nid2 = model.Node(nid, msg=msg)
except KeyError:
if debug:
msg = 'Couldnt find nid=%i, which is required by SPC=%s' % (
nid, self.conid)
model.log.warning(msg)
continue
nids2.append(nid2)
self.nodes_ref = nids2
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.nodes = self.node_ids
self.nodes_ref = None
def raw_fields(self):
fields = ['SPC', self.conid]
for (node_id, constraint, enforced) in zip(self.node_ids, self.components,
self.enforced):
fields += [node_id, constraint, enforced]
return fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.raw_fields()
return self.comment + print_card_8(card)
class GMSPC(Constraint):
type = 'GMSPC'
@classmethod
def _init_from_empty(cls):
conid = 1
component = 2
entity = 3
entity_id = 4
return GMSPC(conid, component, entity, entity_id, comment='')
def __init__(self, conid, component, entity, entity_id, comment=''):
Constraint.__init__(self)
if comment:
self.comment = comment
self.conid = conid
self.component = component
self.entity = entity
self.entity_id = entity_id
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a GMSPC card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
conid = integer(card, 1, 'sid')
component = parse_components(card, 2, 'components')
entity = string(card, 3, 'entity')
entity_id = integer(card, 4, 'entity_id')
return GMSPC(conid, component, entity, entity_id, comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
raise NotImplementedError('GMSPC')
def cross_reference(self, model: BDF) -> None:
"""TODO: xref"""
#msg = ', which is required by GMSPC=%s' % (self.conid)
pass
def safe_cross_reference(self, model):
"""TODO: xref"""
#msg = ', which is required by GMSPC=%s' % (self.conid)
pass
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
pass
def raw_fields(self):
fields = ['GMSPC', self.conid, self.component, self.entity, self.entity_id]
return fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.raw_fields()
return self.comment + print_card_8(card)
class SPCAX(Constraint):
"""
Defines a set of single-point constraints or enforced displacements
for conical shell coordinates.
+-------+-----+-----+-----+----+-----+
| 1 | 2 | 3 | 4 | 5 | 6 |
+=======+=====+=====+=====+====+=====+
| SPCAX | SID | RID | HID | C | D |
+-------+-----+-----+-----+----+-----+
| SPCAX | 2 | 3 | 4 | 13 | 6.0 |
+-------+-----+-----+-----+----+-----+
"""
type = 'SPCAX'
@classmethod
def _init_from_empty(cls):
conid = 1
ringax = 2
hid = 3
component = 4
enforced = 0.
return SPCAX(conid, ringax, hid, component, enforced, comment='')
def __init__(self, conid, ringax, hid, component, enforced, comment=''):
"""
Creates an SPCAX card
Parameters
----------
conid : int
Identification number of a single-point constraint set.
ringax : int
Ring identification number. See RINGAX entry.
hid : int
Harmonic identification number. (Integer >= 0)
component : int
Component identification number. (Any unique combination of the
Integers 1 through 6.)
enforced : float
Enforced displacement value
"""
# defines everything :) at least until cross-referencing methods are
# implemented
Constraint.__init__(self)
if comment:
self.comment = comment
#: Identification number of a single-point constraint set.
self.conid = conid
#: Ring identification number. See RINGAX entry.
self.ringax = ringax
#: Harmonic identification number. (Integer >= 0)
self.hid = hid
#: Component identification number. (Any unique combination of the
#: Integers 1 through 6.)
self.component = component
#: Enforced displacement value
self.enforced = enforced
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a SPCAX card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
conid = integer(card, 1, 'conid')
ringax = integer(card, 2, 'ringax')
hid = integer(card, 3, 'hid')
component = parse_components(card, 4, 'component')
enforced = double(card, 5, 'enforced')
return SPCAX(conid, ringax, hid, component, enforced, comment=comment)
#@classmethod
#def add_op2_data(cls, data, comment=''):
#msg = '%s has not implemented data parsing' % cls.type
#raise NotImplementedError(msg)
def cross_reference(self, model: BDF) -> None:
pass
#msg = ', which is required by SPCAX=%s' % (self.conid)
#self.ringax = model.ring[self.ringax]
#self.hid = model.harmonic[self.hid]
def safe_cross_reference(self, model):
self.cross_reference(model)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
pass
def raw_fields(self):
fields = ['SPCAX', self.conid, self.ringax, self.hid, self.component, self.enforced]
return fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.raw_fields()
return self.comment + print_card_8(card)
class SPC1(Constraint):
"""
+------+-----+-------+--------+--------+--------+--------+--------+----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+======+=====+=======+========+========+========+========+========+====+
| SPC1 | SID | C | G1 | G2 | G3 | G4 | G5 | G6 |
+------+-----+-------+--------+--------+--------+--------+--------+----+
| | G7 | G8 | G9 | etc. | | | | |
+------+-----+-------+--------+--------+--------+--------+--------+----+
| SPC1 | 3 | 246 | 209075 | 209096 | 209512 | 209513 | 209516 | |
+------+-----+-------+--------+--------+--------+--------+--------+----+
| SPC1 | 3 | 2 | 1 | 3 | 10 | 9 | 6 | 5 |
+------+-----+-------+--------+--------+--------+--------+--------+----+
| | 2 | 8 | | | | | | |
+------+-----+-------+--------+--------+--------+--------+--------+----+
| SPC1 | SID | C | G1 | THRU | G2 | | | |
+------+-----+-------+--------+--------+--------+--------+--------+----+
| SPC1 | 313 | 12456 | 6 | THRU | 32 | | | |
+------+-----+-------+--------+--------+--------+--------+--------+----+
"""
type = 'SPC1'
_properties = ['node_ids'] # 'constraints',
@classmethod
def _init_from_empty(cls):
conid = 1
components = '1'
nodes = [1]
return SPC1(conid, components, nodes, comment='')
def __init__(self, conid: int, components: str, nodes: List[int], comment: str=''):
"""
Creates an SPC1 card, which defines the degree of freedoms to be
constrained to a value of 0.0
Parameters
----------
conid : int
constraint id
components : str
the degree of freedoms to constrain (e.g., '1', '123')
nodes : List[int]
GRID/SPOINT ids
comment : str; default=''
a comment for the card
"""
Constraint.__init__(self)
if comment:
self.comment = comment
if isinstance(nodes, integer_types):
nodes = [nodes]
if isinstance(components, int):
components = str(components)
self.conid = conid
self.components = components
self.nodes = expand_thru(nodes)
self.nodes.sort()
self.nodes_ref = None
def validate(self):
assert isinstance(self.nodes, list), 'nodes=%s\n%s' % (self.nodes, str(self))
assert isinstance(self.components, str), 'components=%s\n%s' % (self.components, str(self))
assert len(self.nodes) > 0, self.get_stats()
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a SPC1 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
conid = integer(card, 1, 'conid')
components = components_or_blank(card, 2, 'components', 0) # 246 = y; dx, dz dir
#nodes = [node for node in card.fields(3) if node is not None]
nodes = card.fields(3)
return SPC1(conid, components, nodes, comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
"""
Adds an SPC1 card from the OP2
Parameters
----------
data : List[varies]
a list of fields defined in OP2 format
comment : str; default=''
a comment for the card
"""
conid = data[0]
components = str(data[1])
nodes = data[2]
if nodes[-1] == -1:
nodes = nodes[:-1]
assert conid > 0, data
assert len(nodes) > 0, data
for nid in nodes:
assert nid > 0, data
return SPC1(conid, components, nodes, comment=comment)
@property
def node_ids(self):
if self.nodes_ref is None:
return self.nodes
msg = ', which is required by SPC1; conid=%s' % self.conid
return self._node_ids(self.nodes_ref, allow_empty_nodes=True, msg=msg)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by SPC1; conid=%s' % self.conid
self.nodes_ref = model.EmptyNodes(self.node_ids, msg=msg)
def safe_cross_reference(self, model: BDF, debug=True):
nids2 = []
missing_nids = []
for nid in self.node_ids:
try:
nid2 = model.Node(nid)
except KeyError:
missing_nids.append(str(nid))
continue
nids2.append(nid2)
if missing_nids and debug:
model.log.warning("Couldn't find nids=[%s], which is required by SPC1=%s" % (
', '.join(missing_nids), self.conid))
self.nodes_ref = nids2
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.nodes = self.node_ids
self.nodes_ref = None
def raw_fields(self):
fields = ['SPC1', self.conid, self.components] + self.node_ids
return fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.raw_fields()
return self.comment + print_card_8(card)
class SPCOFF(Constraint):
"""
+-----+----+----+----+----+----+----+----+----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=====+====+====+====+====+====+====+====+====+
| SPC | G1 | C1 | G2 | C2 | G3 | C3 | G4 | C4 |
+-----+----+----+----+----+----+----+----+----+
| SPC | 32 | 3 | 5 | | | | | |
+-----+----+----+----+----+----+----+----+----+
"""
type = 'SPCOFF'
@classmethod
def _init_from_empty(cls):
nodes= [1, 2]
Cs = ['1', '2']
return SPCOFF(nodes, Cs, comment='')
def __init__(self, nodes, components, comment=''):
Constraint.__init__(self)
if comment:
self.comment = comment
self.nodes = nodes
self.components = components
self.nodes_ref = None
def validate(self):
assert isinstance(self.nodes, list), self.nodes
assert isinstance(self.components, list), self.components
assert len(self.nodes) == len(self.components), 'len(self.nodes)=%s len(self.components)=%s' % (len(self.nodes), len(self.components))
for nid, comp in zip(self.nodes, self.components):
assert isinstance(nid, integer_types), self.nodes
assert isinstance(comp, str), self.components
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a SPCOFF card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
nodes = []
components = []
nfields = len(card) - 1
nconstraints = nfields // 2
if nfields % 2 == 1:
nconstraints += 1
for counter in range(nconstraints):
igrid = counter + 1
ifield = counter * 2 + 1
node = integer(card, ifield, 'G%i' % igrid)
component = components_or_blank(card, ifield+1, 'C%i' % igrid, '0')
nodes.append(node)
components.append(component)
assert len(card) > 1, f'len(SPCOFF card) = {len(card):d}\ncard={card}'
return SPCOFF(nodes, components, comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
"""
Adds a SPCOFF card from the OP2
Parameters
----------
data : List[varies]
a list of fields defined in OP2 format
comment : str; default=''
a comment for the card
"""
nodes = [data[0]]
components = data[1]
assert 0 <= components <= 123456, data
enforced = [data[2]]
assert nodes[0] > 0, data
components_str = str(components)
assert len(components_str) <= 6, data
components = [components_str]
#if components[0] == 0:
#components[0] = 0
#if components[0] == 16:
#components[0] = '16'
#else:
#raise RuntimeError('SPC; components=%s data=%s' % (components, data))
#assert 0 < components[0] > 1000, data
return SPCOFF(nodes, components, enforced, comment=comment)
@property
def constraints(self):
return self.components
@constraints.setter
def constraints(self, constraints):
self.components = constraints
@property
def node_ids(self):
if self.nodes_ref is None:
return self.nodes
msg = ', which is required by SPCOFF'
return self._node_ids(nodes=self.nodes_ref, allow_empty_nodes=True, msg=msg)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by SPCOFF'
self.nodes_ref = model.EmptyNodes(self.nodes, msg=msg)
def safe_cross_reference(self, model: BDF, debug=True):
nids2 = []
missing_nids = []
for nid in self.node_ids:
try:
nid2 = model.Node(nid)
except KeyError:
missing_nids.append(str(nid))
continue
nids2.append(nid2)
if missing_nids and debug:
model.log.warning("Couldn't find nids=[%s], which is required by SPCOFF" % (
', '.join(missing_nids)))
self.nodes_ref = nids2
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.nodes = self.node_ids
self.nodes_ref = None
def raw_fields(self):
fields = ['SPCOFF']
for (gid, constraint) in zip(self.node_ids, self.components):
fields += [gid, constraint]
return fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.raw_fields()
return self.comment + print_card_8(card)
class SPCOFF1(Constraint):
type = 'SPCOFF1'
@classmethod
def _init_from_empty(cls):
components = '1'
nodes= [1, 2]
return SPCOFF1(components, nodes, comment='')
def __init__(self, components, nodes, comment=''):
Constraint.__init__(self)
if comment:
self.comment = comment
self.components = components
self.nodes = expand_thru(nodes)
self.nodes.sort()
self.nodes_ref = None
def validate(self):
assert isinstance(self.nodes, list), 'nodes=%s\n%s' % (self.nodes, str(self))
assert isinstance(self.components, str), 'components=%s\n%s' % (self.components, str(self))
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a SPCOFF1 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
components = parse_components(card, 1, 'components') # 246 = y; dx, dz dir
nodes = card.fields(2)
assert len(card) > 2, f'len(SPCOFF1 card) = {len(card):d}\ncard={card}'
return cls(components, nodes, comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
"""
Adds an SPCOFF1 card from the OP2
Parameters
----------
data : List[varies]
a list of fields defined in OP2 format
comment : str; default=''
a comment for the card
"""
components = str(data[0])
nodes = data[1]
if nodes[-1] == -1:
nodes = nodes[:-1]
for nid in nodes:
assert nid > 0, data
return SPCOFF1(components, nodes, comment=comment)
@property
def constraints(self):
return self.components
@constraints.setter
def constraints(self, constraints):
self.components = constraints
@property
def node_ids(self):
if self.nodes_ref is None:
return self.nodes
msg = ', which is required by SPCOFF1'
return self._node_ids(self.nodes_ref, allow_empty_nodes=True, msg=msg)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by SPCOFF1'
self.nodes_ref = model.EmptyNodes(self.node_ids, msg=msg)
def safe_cross_reference(self, model: BDF, debug=True):
nids2 = []
missing_nids = []
for nid in self.node_ids:
try:
nid2 = model.Node(nid)
except KeyError:
missing_nids.append(str(nid))
continue
nids2.append(nid2)
if missing_nids and debug:
model.log.warning("Couldn't find nids=[%s], which is required by SPCOFF1" % (
', '.join(missing_nids)))
self.nodes_ref = nids2
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.nodes = self.node_ids
self.nodes_ref = None
def raw_fields(self):
fields = ['SPCOFF1', self.components] + self.node_ids
return fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.raw_fields()
return self.comment + print_card_8(card)
def write_card_16(self, is_double=False):
card = self.raw_fields()
return self.comment + print_card_16(card)
class ConstraintAdd(Constraint):
"""common class for SPCADD, MPCADD"""
def __init__(self):
Constraint.__init__(self)
self.sets_ref = None
class SPCADD(ConstraintAdd):
"""
Defines a single-point constraint set as a union of single-point constraint
sets defined on SPC or SPC1 entries.
+--------+----+----+-----+
| 1 | 2 | 3 | 4 |
+========+====+====+=====+
| SPCADD | 2 | 1 | 3 |
+--------+----+----+-----+
"""
type = 'SPCADD'
_properties = ['ids', 'spc_ids']
@classmethod
def _init_from_empty(cls):
conid = 1
sets = [1, 2]
return SPCADD(conid, sets, comment='')
def __init__(self, conid, sets, comment=''):
ConstraintAdd.__init__(self)
if comment:
self.comment = comment
self.conid = conid
self.sets = expand_thru(sets)
self.sets.sort()
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a SPCADD card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
conid = integer(card, 1, 'conid')
sets = card.fields(2)
return SPCADD(conid, sets, comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
"""
Adds an SPCADD card from the OP2
Parameters
----------
data : List[varies]
a list of fields defined in OP2 format
comment : str; default=''
a comment for the card
"""
conid = data[0]
sets = data[1:].tolist()
return SPCADD(conid, sets, comment=comment)
@property
def spc_ids(self):
if self.sets_ref is None:
return self.sets
spc_ids = []
for spc in self.sets_ref:
if isinstance(spc, integer_types):
spc_ids.append(spc)
elif isinstance(spc, list):
spc_ids.append(spc[0].conid)
else:
raise TypeError('type=%s; spc=\n%s' % (type(spc), spc))
return spc_ids
@property
def ids(self):
return self.spc_ids
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by SPCADD=%s' % self.conid
self.sets_ref = []
for spc_id in self.sets:
self.sets_ref.append(model.SPC(spc_id, consider_spcadd=False, msg=msg))
def safe_cross_reference(self, model: BDF, debug=True):
self.sets_ref = []
msg = ', which is required by SPCADD=%s' % self.conid
for spc_id in self.sets:
try:
spc = model.SPC(spc_id, consider_spcadd=False, msg=msg)
except KeyError:
if debug:
msg = 'Couldnt find SPC=%i, which is required by SPCADD=%s' % (
spc_id, self.conid)
model.log.warning(msg)
continue
self.sets_ref.append(spc)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.sets = self.spc_ids
self.sets_ref = []
def raw_fields(self):
fields = ['SPCADD', self.conid] + self.spc_ids
return fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.raw_fields()
return self.comment + print_card_8(card)
def write_card_16(self, is_double=False):
card = self.raw_fields()
return self.comment + print_card_16(card)
class MPCADD(ConstraintAdd):
r"""
Defines a multipoint constraint equation of the form
:math:`\Sigma_j A_j u_j =0` where :math:`u_j` represents
degree-of-freedom :math:`C_j` at grid or scalar point :math:`G_j`.
+--------+----+----+-----+
| 1 | 2 | 3 | 4 |
+========+====+====+=====+
| MPCADD | 2 | 1 | 3 |
+--------+----+----+-----+
"""
type = 'MPCADD'
_properties = ['ids', 'mpc_ids']
@classmethod
def _init_from_empty(cls):
conid = 1
sets = [1, 2]
return MPCADD(conid, sets, comment='')
def __init__(self, conid, sets, comment=''):
ConstraintAdd.__init__(self)
if comment:
self.comment = comment
self.conid = conid
self.sets = expand_thru(sets)
self.sets.sort()
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a MPCADD card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
conid = integer(card, 1, 'conid')
sets = card.fields(2)
return MPCADD(conid, sets, comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
"""
Adds an MPCADD card from the OP2
Parameters
----------
data : List[varies]
a list of fields defined in OP2 format
comment : str; default=''
a comment for the card
"""
conid = data[0]
sets = data[1:].tolist()
return MPCADD(conid, sets, comment=comment)
@property
def mpc_ids(self):
if self.sets_ref is None:
return self.sets
mpc_ids = []
for mpc in self.sets_ref:
if isinstance(mpc, integer_types):
mpc_ids.append(mpc)
else:
mpc_ids.append(mpc[0].conid)
return mpc_ids
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by MPCADD=%s' % self.conid
self.sets_ref = []
for mpc_id in self.sets:
self.sets_ref.append(model.MPC(mpc_id, consider_mpcadd=False, msg=msg))
def safe_cross_reference(self, model: BDF, debug=True):
self.sets_ref = []
msg = ', which is required by MPCADD=%s' % self.conid
for mpc_id in self.sets:
try:
mpc = model.MPC(mpc_id, consider_mpcadd=False, msg=msg)
except KeyError:
if debug:
msg = 'Couldnt find MPC=%i, which is required by MPCADD=%s' % (
mpc_id, self.conid)
model.log.warning(msg)
continue
self.sets_ref.append(mpc)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.sets = self.mpc_ids
self.sets_ref = []
@property
def ids(self):
return self.mpc_ids
def raw_fields(self):
fields = ['MPCADD', self.conid] + self.mpc_ids
return fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.raw_fields()
return self.comment + print_card_8(card)
def write_card_16(self, is_double=False):
card = self.raw_fields()
return self.comment + print_card_16(card)
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,655
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/bdf/bdf_interface/assign_type.py
|
"""Parses Nastran fields"""
import re
import warnings
from typing import Tuple, Union, Optional
from pyNastran.bdf.bdf_interface.bdf_card import BDFCard
from pyNastran.utils.numpy_utils import (
integer_types, integer_float_types, float_types)
#^ - start of string
#[-+]? - an optional (this is what ? means) minus or plus sign
#[0-9]+ - one or more digits (the plus means "one or more" and [0-9] is another way to say \d)
#$ - end of string
RE_INT = re.compile('^[-+]?[0-9]+$', flags=0)
#[-+]? - an optional (this is what ? means) minus or plus sign
# \. - period
# [-|+?] - required negtive sign or optional plus sign
# [-|+] - required negtive sign or plus sign
# [[0-9]+]? - optional N integers
#
# 1.032
# +1.032
# -1.032
#RE_FLOAT = re.compile('^[-+]?[0-9]+ \. [[0-9]+]$', flags=0)
# 1.032E+02
# +1.032E-02
# -1.032e+2
#RE_FLOAT_E = re.compile('^[-+]?[0-9]+ .[[0-9]+] [e|E] [-|+?] [0-9]+$', flags=0)
# 1.032D+02
# +1.032D-02
# -1.032d+02
#RE_FLOAT_D = re.compile('^[-+]?[0-9]+ .[[0-9]+] [d|D] [-|+?] [0-9]+$', flags=0)
#%e, %E, %f, %g [-+]?(\d+(\.\d*)?|\.\d+)([eE][-+]?\d+)?
# 1.032+2
# +1.032-2
# -1.032+2
#RE_FLOAT_SHORT = re.compile('^[-+]?[0-9]+ \. [[0-9]+]? [-+] [0-9]+$', flags=0)
def parse_components(card: BDFCard, ifield: int, fieldname: str) -> str:
"""
Parameters
----------
card : BDFCard()
BDF card as a list
ifield : int
field number
fieldname : str
name of field
Returns
-------
components : str
a string of the dofs '0' or '123456' (not all are required)
"""
assert isinstance(card, BDFCard), type(card)
assert isinstance(ifield, int), type(ifield)
assert isinstance(fieldname, str), type(fieldname)
svalue = card.field(ifield)
if isinstance(svalue, integer_types):
pass
elif svalue is None or '.' in svalue:
dtype = _get_dtype(svalue)
msg = ('%s = %r (field #%s) on card must be an integer (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
raise SyntaxError(msg)
try:
value = int(svalue)
except ValueError:
dtype = _get_dtype(svalue)
msg = ('%s = %r (field #%s) on card must be an integer (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
raise SyntaxError(msg)
if value > 0 and isinstance(svalue, str):
if '0' in svalue:
value2 = str(svalue).replace('0', '')
msg = ('%s = %r (field #%s) on card must contain 0 or %s (not both).\n'
'card=%s' % (fieldname, svalue, ifield, value2, card))
raise SyntaxError(msg)
svalue2 = str(value)
svalue3 = ''.join(sorted(svalue2))
for i, component in enumerate(svalue3):
if component not in '0123456':
msg = ('%s = %r (field #%s) on card contains an invalid component %r.\n'
'card=%s' % (fieldname, svalue, ifield, component, card))
raise SyntaxError(msg)
if component in svalue3[i + 1:]:
msg = ('%s = %r (field #%s) on card must not contain duplicate entries.\n'
'card=%s' % (fieldname, svalue, ifield, card))
raise SyntaxError(msg)
return svalue3
def components_or_blank(card: BDFCard,
ifield: int,
fieldname: str,
default: Optional[str]=None) -> Optional[str]:
"""
Parameters
----------
card : BDFCard()
BDF card as a list
ifield : int
field number
fieldname : str
name of field
default : str, None
the default value for the field (default=None)
Returns
-------
components : str
a string of the dofs '0' or '123456' (not all are required)
"""
#assert isinstance(card, BDFCard), type(card)
assert isinstance(ifield, int), type(ifield)
assert isinstance(fieldname, str), type(fieldname)
svalue = card.field(ifield)
if svalue is None:
return default
elif isinstance(svalue, integer_types):
svalue = str(svalue)
else:
svalue = svalue.strip()
if svalue:
return parse_components(card, ifield, fieldname)
return default
def blank(card: BDFCard, ifield: int, fieldname: str, default=None) -> None:
"""
Parameters
----------
card : BDFCard()
BDF card as a list
ifield : int
field number
fieldname : str
name of field
default : None
the default value for the field (default=None)
"""
assert isinstance(card, BDFCard), type(card)
assert isinstance(ifield, int), type(ifield)
assert isinstance(fieldname, str), type(fieldname)
svalue = card.field(ifield)
if svalue is None:
return default
if isinstance(svalue, str):
svalue = svalue.strip().upper()
if len(svalue) == 0:
return default
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be blank (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
#def field(card: BDFCard, ifield: int, fieldname: str) -> Optional[Union[int, float, str]]:
#"""
#Parameters
#----------
#card : BDFCard()
#BDF card as a list
#ifield : int
#field number
#fieldname : str
#name of field
#Returns
#-------
#value : int, float, str, None
#the field value
#"""
#assert isinstance(card, BDFCard), type(card)
#assert isinstance(ifield, int), type(ifield)
#assert isinstance(fieldname, str), type(fieldname)
#return integer_double_string_or_blank(card, ifield, fieldname, default=None)
def integer_double_string_or_blank(card: BDFCard, ifield: int, fieldname: str, default=None):
# type (BDFCard, int, str, Union[int, float, str]) -> Optional[Union[int, float, str]]
"""
Parameters
----------
card : BDFCard()
BDF card as a list
ifield : int
field number
fieldname : str
name of field
default : int, float, str, None (default=None)
the default value for the field
Returns
-------
value : int, float, str, None
the field value
"""
svalue = card.field(ifield)
if isinstance(svalue, integer_float_types):
return svalue
elif svalue is None:
return default
svalue = svalue.strip().upper()
if svalue:
# integer/float/string
if '.' in svalue or '-' in svalue[1:] or '+' in svalue[1:]:
# float
try:
value = double(card, ifield, fieldname)
except SyntaxError:
value = interpret_value(card[ifield], card)
elif RE_INT.match(svalue): # svalue[0].isdigit() or svalue[1:].isdigit():
# int
try:
value = int(svalue)
except ValueError:
dtype = _get_dtype(svalue)
msg = ('%s = %r (field #%s) on card must be an integer, float, '
'or string (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
raise SyntaxError(msg)
elif ' ' in svalue:
raise SyntaxError('%s = %r (field #%s) on card must be an integer, float or string '
'(without a blank space).\n'
'card=%s' % (fieldname, svalue, ifield, card))
else:
value = svalue
return value
return default
#def assert_int_bounded_range(card, ifield, fieldname, lower=None, upper=None):
def fields(func, card, fieldname, i, j=None):
"""
.. todo:: improve fieldname
"""
assert isinstance(card, BDFCard), type(card)
assert isinstance(fieldname, str), type(fieldname)
function_values = []
if j is None:
j = len(card)
for ii in range(i, j):
function_values.append(func(card, ii, fieldname + str(ii)))
return function_values
def modal_components(card: BDFCard, ifield: int, fieldname: str) -> int:
"""
Gets the modal components (allows a -1 value); used by TIC
Parameters
----------
card : BDFCard()
BDF card as a list
ifield : int
field number
fieldname : str
name of field
"""
value = integer(card, ifield, fieldname)
if not(-1 <= value <= 6):
raise SyntaxError('%s=%s (field #%s) on card must be an integer '
'(-1 <= val <= 6).\n'
'card=%s' % (fieldname, value, ifield, card))
return value
def modal_components_or_blank(card: BDFCard, ifield: int, fieldname: str, default: any=None) -> int:
"""
Gets the modal components (allows a -1 value); used by TIC
Parameters
----------
card : BDFCard()
BDF card as a list
ifield : int
field number
fieldname : str
name of field
"""
value = integer_or_blank(card, ifield, fieldname, default=default)
if not(-1 <= value <= 6):
raise SyntaxError('%s=%s (field #%s) on card must be an integer '
'(-1 <= val <= 6).\n'
'card=%s' % (fieldname, value, ifield, card))
return value
def integer(card: BDFCard, ifield: int, fieldname: str) -> int:
"""
Casts a value to an integer
Parameters
----------
card : BDFCard()
BDF card as a list
ifield : int
field number
fieldname : str
name of field
"""
svalue = card.field(ifield)
if isinstance(svalue, float_types):
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an integer (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
try:
return int(svalue)
except(ValueError, TypeError):
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an integer (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
def force_integer(card: BDFCard, ifield: int, fieldname: str) -> int:
"""see ``integer``"""
svalue = card.field(ifield)
if isinstance(svalue, float_types):
warnings.warn('%s = %r (field #%s) on card must be an integer (not a double).\n'
'card=%s' % (fieldname, svalue, ifield, card))
return int(svalue)
try:
return int(svalue)
except(ValueError, TypeError):
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an integer (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
def integer_or_blank(card: BDFCard, ifield: int, fieldname: str, default: Optional[int]=None):
# (card, ifield, fieldname, default) -> Optional[int]
"""
Casts a value to an integer
Parameters
----------
card : BDFCard()
BDF card as a list
ifield : int
field number
fieldname : str
name of field
default : int, None
the default value for the field (default=None)
"""
svalue = card.field(ifield)
if isinstance(svalue, integer_types):
return svalue
elif svalue is None:
return default
elif isinstance(svalue, str):
if len(svalue) == 0:
return default
elif '.' in svalue or '-' in svalue[1:] or '+' in svalue[1:]:
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an integer or blank (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
try:
return int(svalue)
except(ValueError, TypeError):
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an integer or blank (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an integer (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
def force_integer_or_blank(card: BDFCard, ifield: int, fieldname: str, default: Optional[int]=None):
# (card, ifield, fieldname, default) -> Optional[int]
"""see ``integer_or_blank``"""
svalue = card.field(ifield)
if isinstance(svalue, integer_types):
return svalue
elif svalue is None:
return default
elif '.' in svalue:
# float
fvalue = force_double(card, ifield, fieldname)
# TODO: warn if not a whole number
return int(fvalue)
elif isinstance(svalue, str):
if len(svalue) == 0:
return default
elif '.' in svalue or '-' in svalue[1:] or '+' in svalue[1:]:
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an integer or blank (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
try:
return int(svalue)
except(ValueError, TypeError):
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an integer or blank (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
# float
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an integer (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
def double(card: BDFCard, ifield: int, fieldname: str) -> float:
"""
Casts a value to an double
Parameters
----------
card : BDFCard()
BDF card as a list
ifield : int
field number
fieldname : str
name of field
Returns
-------
value : float
the value from the desired field
"""
svalue = card.field(ifield)
if isinstance(svalue, float_types):
return svalue
elif isinstance(svalue, integer_types):
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be a float (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
elif svalue is None or len(svalue) == 0: ## None
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be a float (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
if svalue.isdigit(): # 1, not +1, or -1
# if only int
raise SyntaxError('%s = %r (field #%s) on card must be a float (not an integer).\n'
'card=%s' % (fieldname, svalue, ifield, card))
try:
# 1.0, 1.0E+3, 1.0E-3
value = float(svalue)
except TypeError:
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be a float (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
except ValueError:
# 1D+3, 1D-3, 1-3
try:
svalue = svalue.upper()
if 'D' in svalue:
# 1.0D+3, 1.0D-3
svalue2 = svalue.replace('D', 'E')
return float(svalue2)
# 1.0+3, 1.0-3
sign = ''
if svalue[0] in ('+', '-'):
sign = svalue[0]
svalue = svalue[1:]
if '+' in svalue:
svalue = sign + svalue.replace('+', 'E+')
elif '-' in svalue:
svalue = sign + svalue.replace('-', 'E-')
value = float(svalue)
except ValueError:
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be a float (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
return value
def force_double(card: BDFCard, ifield: int, fieldname: str) -> float:
"""see ``double``"""
svalue = card.field(ifield)
if isinstance(svalue, float_types):
return svalue
elif isinstance(svalue, integer_types):
dtype = _get_dtype(svalue)
warnings.warn('%s = %r (field #%s) on card must be a float (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
return float(value)
elif svalue is None or len(svalue) == 0: ## None
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be a float (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
if svalue.isdigit(): # 1, not +1, or -1
# if only int
raise SyntaxError('%s = %r (field #%s) on card must be a float (not an integer).\n'
'card=%s' % (fieldname, svalue, ifield, card))
try:
# 1.0, 1.0E+3, 1.0E-3
value = float(svalue)
except TypeError:
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be a float (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
except ValueError:
# 1D+3, 1D-3, 1-3
try:
svalue = svalue.upper()
if 'D' in svalue:
# 1.0D+3, 1.0D-3
svalue2 = svalue.replace('D', 'E')
return float(svalue2)
# 1.0+3, 1.0-3
sign = ''
if svalue[0] in ('+', '-'):
sign = svalue[0]
svalue = svalue[1:]
if '+' in svalue:
svalue = sign + svalue.replace('+', 'E+')
elif '-' in svalue:
svalue = sign + svalue.replace('-', 'E-')
value = float(svalue)
except ValueError:
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be a float (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
return value
def double_or_blank(card: BDFCard, ifield: int, fieldname: str,
default: Optional[Union[float]]=None):
# (card, ifield, fieldname, default) -> Optional[Union[float]]
"""
Casts a value to an double/blank
Parameters
----------
card : BDFCard()
BDF card as a list
ifield : int
field number
fieldname : str
name of field
default : double, None
the default value for the field (default=None)
"""
svalue = card.field(ifield)
if isinstance(svalue, float_types):
return svalue
elif isinstance(svalue, integer_types):
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be a float or blank (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
elif isinstance(svalue, str):
svalue = svalue.strip().upper()
if not svalue:
return default
try:
return double(card, ifield, fieldname)
except Exception:
if svalue == '.':
return 0.
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be a float or blank (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
return default
def force_double_or_blank(card: BDFCard, ifield: int, fieldname: str, default: Optional[Union[float]]=None):
# (card, ifield, fieldname, default) -> Optional[Union[float]]
"""see ``double_or_blank``"""
svalue = card.field(ifield)
if isinstance(svalue, float_types):
return svalue
elif isinstance(svalue, integer_types):
fvalue = float(svalue)
warnings.warn('%s = %r (field #%s) on card must be a float or blank (not an integer) -> %s.\n'
'card=%s' % (fieldname, svalue, ifield, card))
return fvalue
elif isinstance(svalue, str):
try:
ivalue = int(svalue)
fvalue = float(ivalue)
warnings.warn('%s = %r (field #%s) on card must be a float or blank (not an integer) -> %s.\n'
'card=%s' % (fieldname, svalue, ifield, fvalue, card))
return fvalue
except Exception:
svalue = svalue.strip().upper()
if not svalue:
return default
try:
return double(card, ifield, fieldname)
except Exception:
if svalue == '.':
return 0.
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be a float or blank (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
return default
def double_or_string(card: BDFCard, ifield: int, fieldname: str) -> Union[float, str]:
"""
Casts a value to an double/string
Parameters
----------
card : BDFCard()
BDF card as a list
ifield : int
field number
fieldname : str
name of field
"""
svalue = card.field(ifield)
if isinstance(svalue, float_types):
return svalue
elif svalue is None or isinstance(svalue, integer_types):
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an float or string (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
elif isinstance(svalue, str):
svalue = svalue.strip().upper()
if '.' in svalue or '-' in svalue[1:] or '+' in svalue[1:]:
# float
try:
return double(card, ifield, fieldname)
except Exception:
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an float or string (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
elif svalue.isdigit(): # 1, not +1, or -1
# fail
pass
elif svalue:
# string
if ' ' in svalue:
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an float or '
'string (without a blank space; not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
elif svalue[0].isdigit() or '.' in svalue or '+' in svalue or '-' in svalue:
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an float or '
'string (without a blank space; not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
return str(svalue)
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an float or string (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
def double_string_or_blank(card: BDFCard, ifield: int, fieldname: str, default=None):
# type (BDFCard, int, str, Optional[Union[float, str]]) -> Optional[Union[float, str]]
"""
Casts a value to an double/string/blank
Parameters
----------
card : BDFCard()
BDF card as a list
ifield : int
field number
fieldname : str
name of field
default : double, None
the default value for the field (default=None)
Returns
-------
value : float / str / None
the typed value
:raises SyntaxError: if there is an invalid type
"""
svalue = card.field(ifield)
if isinstance(svalue, float_types):
return svalue
elif svalue is None:
return default
elif isinstance(svalue, str):
svalue = svalue.strip().upper()
elif isinstance(svalue, integer_types):
dtype = _get_dtype(svalue)
msg = ('%s = %r (field #%s) on card must be an float, string, or blank (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
raise SyntaxError(msg)
if '.' in svalue or '-' in svalue[1:] or '+' in svalue[1:]:
try:
return double(card, ifield, fieldname)
except Exception:
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be a float, string '
'or blank (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
elif svalue.isdigit(): # 1, not +1, or -1
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be a float, string or blank (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
elif svalue == '':
return default
if ' ' in svalue:
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an float, '
'string (without a blank space) or blank (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
return svalue
def integer_or_double(card: BDFCard, ifield: int, fieldname: str) -> Union[int, float]:
"""
Casts a value to an integer/double
Parameters
----------
card : BDFCard()
BDF card as a list
ifield : int
field number
fieldname : str
name of field
Returns
-------
value : int/float
the value with the proper type
:raises SyntaxError: if there's an invalid type
"""
svalue = card.field(ifield)
if isinstance(svalue, integer_float_types):
return svalue
elif svalue is None:
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an integer or float (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
if '.' in svalue or '-' in svalue[1:] or '+' in svalue[1:]:
# float/exponent
try:
value = double(card, ifield, fieldname)
except ValueError:
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be a integer or a float (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
else:
# int
try:
value = int(svalue)
except(ValueError, TypeError):
value = interpret_value(svalue, card)
if isinstance(value, (int, float)):
return value
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an integer or a '
'float (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
return value
def integer_double_or_blank(card: BDFCard, ifield: int, fieldname: str, default=None):
"""
Casts a value to an integer/double/blank
Parameters
----------
card : BDFCard()
BDF card as a list
ifield : int
field number
fieldname : str
name of field
default : int / float / None
the default value for the field (default=None)
"""
svalue = card.field(ifield)
if isinstance(svalue, integer_float_types):
return svalue
elif svalue is None:
return default
if svalue:
# integer/float
try:
return integer_or_double(card, ifield, fieldname)
except Exception:
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an integer, float, or '
'blank (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
return default
def integer_or_string(card: BDFCard, ifield: int, fieldname: str) -> Union[int, str]:
"""
Casts a value to an integer/string
Parameters
----------
card : BDFCard()
BDF card as a list
ifield : int
field number
fieldname : str
name of field
default : int / str
the default value for the field (default=None)
"""
svalue = card.field(ifield)
if isinstance(svalue, integer_types):
return svalue
elif isinstance(svalue, float_types):
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an integer or string (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
elif svalue is None:
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an integer or string (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
try:
value = int(svalue)
return value
except ValueError:
pass
if svalue[0].isdigit():
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an integer or string (not %s; '
'strings must start with a character).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
elif ' ' in svalue:
raise SyntaxError('%s = %r (field #%s) on card must be an integer or string '
'(without a blank space).\n'
'card=%s' % (fieldname, svalue, ifield, card))
# string
try:
value = double(card, ifield, fieldname)
except SyntaxError:
return str(svalue.upper())
if isinstance(value, float_types):
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an integer or string (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
def integer_string_or_blank(card: BDFCard, ifield: int, fieldname: str, default=None):
"""
Casts a value to an integer/string/blank
Parameters
----------
card : BDFCard()
BDF card as a list
ifield : int
field number
fieldname : str
name of field
default : int, str, None
the default value for the field (default=None)
"""
svalue = card.field(ifield)
if isinstance(svalue, integer_types):
return svalue
elif svalue is None:
return default
elif isinstance(svalue, float_types):
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an integer or string (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
svalue = svalue.strip()
if svalue:
# integer/string
try:
return integer_or_string(card, ifield, fieldname)
except Exception:
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an integer, '
'string (without a blank space), or blank (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
return default
def _get_dtype(value):
"""
Get the type of the input value in a form that is clear.
Parameters
----------
value : int/float/str/None
the value to get the type of
Returns
-------
dtype : str
the type of the value
"""
try:
value = interpret_value(value)
except Exception:
pass
if value is None:
dtype = 'blank'
elif isinstance(value, integer_types):
dtype = 'an integer'
elif isinstance(value, float):
dtype = 'a double value=%r' % value
elif isinstance(value, str):
dtype = 'a string'
else:
dtype = str(type(value))
return dtype
def integer_double_or_string(card: BDFCard, ifield: int, fieldname: str) -> Union[int, float, str]:
"""
Casts a value to an integer/double/string
Parameters
----------
card : BDFCard()
BDF card as a list
ifield : int
field number
fieldname : str
name of field
Returns
-------
value : varies
the value of the field
"""
svalue = card.field(ifield)
if isinstance(svalue, integer_float_types):
return svalue
elif svalue is None:
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an integer or float (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
svalue = str(svalue.strip())
if svalue: # integer/float/string
if '.' in svalue or '-' in svalue or '+' in svalue:
# float
value = double(card, ifield, fieldname)
elif svalue.isdigit(): # 1, not +1, or -1
# int
try:
value = int(svalue)
except(ValueError, TypeError):
raise SyntaxError('%s = %r (field #%s) on card must be an integer, float, '
'or string (not blank).\n'
'card=%s' % (fieldname, svalue, ifield, card))
elif ' ' in svalue:
raise SyntaxError('%s = %r (field #%s) on card must be an integer, float, or string '
'(not a string with a blank).\n'
'card=%s' % (fieldname, svalue, ifield, card))
elif svalue[0].isdigit():
raise SyntaxError('%s = %r (field #%s) on card must be an integer, float, or string '
'(not a string with a leading integer).\n'
'card=%s' % (fieldname, svalue, ifield, card))
else:
value = interpret_value(svalue, card)
return value
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be an integer, float, or string (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
def string(card: BDFCard, ifield: int, fieldname: str) -> str:
"""
Casts a value to a string
Parameters
----------
card : BDFCard()
BDF card as a list
ifield : int
field number
fieldname : str
name of field
Returns
-------
value : str
the value of the field
"""
svalue = card.field(ifield)
if isinstance(svalue, str):
svalue = svalue.strip()
if ' ' in svalue:
raise SyntaxError('%s = %r (field #%s) on card must be a string without a space.\n'
'card=%s' % (fieldname, svalue, ifield, card))
else:
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be a string (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
if svalue[0].isdigit() or '.' in svalue or '+' in svalue or '-' in svalue[0]:
value = integer_or_double(card, ifield, fieldname)
dtype = _get_dtype(value)
raise SyntaxError('%s = %r (field #%s) on card must be a '
'string with a character (not %s).\n'
'card=%s' % (fieldname, value, ifield, dtype, card))
if svalue: # string
return str(svalue.upper())
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be a string (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
def check_string(svalue: str, ifield: int, fieldname: str) -> str:
"""
strings can't have the following characters: ' '
strings can't have the following characters in the 0th position: '.', '+', '-', 1-9
"""
if isinstance(svalue, str):
svalue = svalue.strip()
if ' ' in svalue:
raise SyntaxError('%s = %r (field #%s) on card must be a string without a space.\n' % (
fieldname, svalue, ifield))
else:
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be a string (not %s).\n' % (
fieldname, svalue, ifield, dtype))
if svalue[0].isdigit() or '.' in svalue or '+' in svalue or '-' in svalue[0]:
#value = integer_or_double(card, ifield, fieldname)
#dtype = _get_dtype(value)
raise SyntaxError('%s = %s (field #%s) on card must be a '
'string with a character.\n' % (
fieldname, svalue, ifield))
if svalue: # string
return str(svalue.upper())
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be a string (not %s).\n' % (
fieldname, svalue, ifield, dtype))
def string_or_blank(card: BDFCard, ifield: int, fieldname: str, default=None):
"""
Parameters
----------
card : BDFCard()
BDF card as a list
ifield : int
field number
fieldname : str
name of field
default : str, None
the default value for the field (default=None)
Returns
-------
value : varies
the value of the field
"""
svalue = card.field(ifield)
if svalue is None:
return default
elif isinstance(svalue, str):
svalue = svalue.strip().upper()
if ' ' in svalue:
raise SyntaxError('%s = %r (field #%s) on card must be a string without a space.\n'
'card=%s' % (fieldname, svalue, ifield, card))
if svalue[0].isdigit() or '.' in svalue or '+' in svalue or '-' in svalue[0]:
chars = ''.join(list(set('%s.+-' % svalue[0] if svalue[0].isdigit() else '')))
raise SyntaxError('%s = %r (field #%s) on card must not have the '
'following characters %s\n'
'card=%s' % (fieldname, svalue, ifield, chars, card))
else:
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be a string (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
svalue = svalue.strip()
if svalue.isdigit() or '.' in svalue or '+' in svalue or '-' in svalue[0]:
# integer or float
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be a string or blank (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
if svalue: # string
return str(svalue.upper())
return default
def string_choice_or_blank(card: BDFCard, ifield: int, fieldname: str, choices: Tuple[str], default=None):
"""
Parameters
----------
card : BDFCard()
BDF card as a list
ifield : int
field number
fieldname : str
name of field
default : str, None
the default value for the field (default=None)
Returns
-------
value : varies
the value of the field
"""
svalue = card.field(ifield)
if svalue is None:
return default
elif isinstance(svalue, str):
svalue = svalue.strip().upper()
else:
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be a string (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
if svalue: # string
svalue = svalue.upper()
if svalue not in choices:
raise RuntimeError(f'{fieldname} = {svalue} (field #{ifield}) on card is a string, but must be {choices}.\n'
f'card={card}')
return svalue
return default
def filename_or_blank(card: BDFCard, ifield: int, fieldname: str, default=None):
"""
Used by the MKAEROZ to read a filename
Parameters
----------
card : BDFCard()
BDF card as a list
ifield : int
field number
fieldname : str
name of field
default : str, None
the default value for the field (default=None)
Returns
-------
value : varies
the value of the field
"""
svalue = card.field(ifield)
if svalue is None:
return default
elif isinstance(svalue, str):
svalue = svalue.strip().upper()
if ' ' in svalue:
raise SyntaxError('%s = %r (field #%s) on card must be a string without a space.\n'
'card=%s' % (fieldname, svalue, ifield, card))
if svalue[0].isdigit() or '+' in svalue or '-' in svalue[0]:
chars = ''.join(list(set('%s+-' % svalue[0] if svalue[0].isdigit() else '')))
raise SyntaxError('%s = %r (field #%s) on card must not have the '
'following characters %s\n'
'card=%s' % (fieldname, svalue, ifield, chars, card))
else:
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be a string (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
svalue = svalue.strip()
if svalue.isdigit() or '+' in svalue or '-' in svalue[0]:
# integer or float
dtype = _get_dtype(svalue)
raise SyntaxError('%s = %r (field #%s) on card must be a string or blank (not %s).\n'
'card=%s' % (fieldname, svalue, ifield, dtype, card))
if svalue: # string
return str(svalue.upper())
return default
def loose_string(card: BDFCard, ifield: int, fieldname: str, default=None):
"""
The most lenient of string checks:
Matches X, X1, 1X, 111, 1.0
Doesn't matches X, X1, 1X, 111, 1.0
Things that might be incorrect:
No embedded blanks (not enforced now)
Used for LABELs on DRESP1. This will be tightened up as neccessary.
Parameters
----------
card : BDFCard()
BDF card as a list
ifield : int
field number
fieldname : str
name of field
Returns
-------
value : varies
the value of the field
"""
svalue = card.field(ifield)
#elif isinstance(svalue, str):
#svalue = svalue.strip().upper()
#if ' ' in svalue:
#raise SyntaxError('%s = %r (field #%s) on card must be a string without a space.\n'
#'card=%s' % (fieldname, svalue, ifield, card))
#if svalue[0].isdigit() or '+' in svalue or '-' in svalue[0]:
#chars = ''.join(list(set('%s+-' % svalue[0] if svalue[0].isdigit() else '')))
#raise SyntaxError('%s = %r (field #%s) on card must not have the '
#'following characters %s\n'
#'card=%s' % (fieldname, svalue, ifield, chars, card))
#else:
#dtype = _get_dtype(svalue)
#raise SyntaxError('%s = %r (field #%s) on card must be a string (not %s).\n'
#'card=%s' % (fieldname, svalue, ifield, dtype, card))
#svalue = svalue.strip()
#if svalue.isdigit() or '+' in svalue or '-' in svalue[0]:
## integer or float
#dtype = _get_dtype(svalue)
#raise SyntaxError('%s = %r (field #%s) on card must be a string or blank (not %s).\n'
#'card=%s' % (fieldname, svalue, ifield, dtype, card))
svalue = str(svalue.upper())
if svalue[0].svalue.isdigit():
raise SyntaxError('%s = %r (field #%s) on card must not have an integer as the first character.\n'
'card=%s' % (fieldname, svalue, ifield, card))
return default
def exact_string_or_blank(card: BDFCard, ifield: int, fieldname: str, default=None):
"""
Parameters
----------
card : BDFCard()
BDF card as a list
ifield : int
field number
fieldname : str
name of field
default : str, None
the default value for the field (default=None)
Returns
-------
value : varies
the value of the field
"""
svalue = card.field(ifield)
if svalue is None:
return default
svalue = '%-8s' % svalue
if svalue == '':
return default
return svalue
# int - done
# int/blank - done
# int/float - done
# int/float/blank - done
# int/float/string - done
# int/float/string/blank - done
# int/string - done
# int/string/blank - done
# float - done
# float/blank - done
# float/string - done
# float/string/blank - done
# string - done
# string/blank - done
def interpret_value(value_raw: Optional[str],
card: Union[str, BDFCard]='') -> Union[int, float, str, None]:
"""
Converts a value from nastran format into python format.
Parameters
----------
raw_value : str
a string representation of a value
card : str
???
Returns
-------
value : varies
the Nastran reprentation of the value
"""
if value_raw is None:
return None
try:
value_in = value_raw.lstrip().rstrip(' *').upper()
except AttributeError:
# it's already an int/float
msg = 'value_raw=%s type=%s' % (value_raw, type(value_raw))
assert isinstance(value_raw, integer_float_types), msg
return value_raw
if len(value_in) == 0:
# blank / None
return None
if value_in[0].isalpha():
# string
return value_in
if '=' in value_in or '(' in value_in or '*' in value_raw:
return value_raw.strip()
# int, float, string, exponent
value_positive = value_in.strip('+-')
if value_positive.isdigit():
# int
return int(value_in)
try:
value = float(value_in)
# float
return value
except ValueError:
pass
#if('=' in value_in or '(' in value_in or ')' in value_in):
#print("=()!")
#return value_in
# if there are non-floats/scientific notation -> string
no_ed = list(set(value_in) - set('ED 1234567890+-'))
word = ''.join(no_ed)
if word.isalpha():
# word
return value_in
val0 = value_in[0]
if val0 in ('+', '-'):
# truncate the sign for now
value_left = value_in[1:]
else:
# inplied positive value
val0 = '+'
value_left = value_in
if val0 == '-':
factor = -1.
elif val0 == '+' or val0.isdigit():
factor = 1.
else:
raise SyntaxError('the only 2 cases for a float/scientific are +/- for v0...'
'value_raw=%r val0=%r card=%s' % (value_raw, val0, card))
# dont include the 1st character, find the exponent
val_minus = value_in.find('-', 1)
val_plus = value_in.find('+', 1)
if val_minus > 0:
sline = value_left.split('-')
exp_factor = -1.
elif val_plus > 0:
sline = value_left.split('+')
exp_factor = 1.
else:
card_msg = ''
if card:
card_msg = 'card = %s\n' % card
msg = ("I thought this was in scientific notation, but I can't find "
"the exponent sign...\n"
"value_raw=%r value_left=%r\n%s"
"You also might have mixed tabs/spaces/commas or misaligned fields."
% (value_raw, value_left, card_msg))
raise SyntaxError(msg)
if sline[0][-1] == 'D':
sline[0] = sline[0][:-1]
try:
sci0 = factor * float(sline[0])
sci1 = exp_factor * int(sline[1])
except ValueError:
msg = "val_minus=%s val_plus=%s value_raw=%r" % (val_minus, val_plus, value_raw)
raise SyntaxError("cannot parse '%s' into a float and '%s' "
'into an integer\n%s\nYou HAVE mixed '
'tabs/spaces/commas!' % (sline[0], sline[1], msg))
value = sci0 * 10 ** sci1
# scientific
return value
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,656
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/op2/tables/geom/edt.py
|
"""
defines readers for BDF objects in the OP2 EDT/EDTS table
"""
from __future__ import annotations
from struct import Struct
from typing import Tuple, List, Any, TYPE_CHECKING
import numpy as np
from pyNastran.bdf.cards.aero.aero import (
#AECOMP, AECOMPL, AEFACT, AELINK, AELIST, AEPARM, AESURF, AESURFS,
#CAERO1, CAERO2, CAERO3, CAERO4, CAERO5,
#PAERO1, PAERO2, PAERO3, PAERO4, PAERO5,
MONPNT1, MONPNT2, MONPNT3, MONDSP1,
#SPLINE1, SPLINE2, SPLINE3,
SPLINE4, SPLINE5)
from pyNastran.op2.errors import DoubleCardError
from pyNastran.op2.op2_interface.op2_reader import mapfmt, reshape_bytes_block, reshape_bytes_block_size
from pyNastran.bdf.cards.elements.acoustic import ACMODL
from .utils import get_minus1_start_end
if TYPE_CHECKING: # pragma: no cover
from pyNastran.op2.op2_geom import OP2Geom
class EDT:
"""defines methods for reading aero and element deformations"""
@property
def size(self) -> int:
return self.op2.size
@property
def factor(self) -> int:
return self.op2.factor
def _read_fake(self, data: bytes, n: int) -> int:
return self.op2._read_fake(data, n)
def read_edt_4(self, data: bytes, ndata: int):
"""
3.21 EDT
Aero and element deformations.
"""
return self.op2._read_geom_4(self.edt_map, data, ndata)
def __init__(self, op2: OP2Geom):
self.op2 = op2
# F:\Program Files\Siemens\NXNastran\nxn10p1\nxn10p1\nast\tpl\fsw_eng.op2
# F:\work\pyNastran\pyNastran\master2\pyNastran\bdf\test\nx_spike\out_boltld04i.op2
# F:\work\pyNastran\pyNastran\master2\pyNastran\bdf\test\nx_spike\out_eliter17.op2
# F:\work\pyNastran\pyNastran\master2\pyNastran\bdf\test\nx_spike\out_weld01i.op2
# F:\work\pyNastran\examples\Dropbox\move_tpl\ac10901a_new.op2
self.edt_map = {
(5201, 52, 373) : ['ACMODL', self._read_acmodl],
(6301, 63, 397) : ['ADAPT', self._read_fake],
(7801, 78, 582) : ['AECOMP', self._read_aecomp],
(7901, 79, 583) : ['AECOMPL', self._read_aecompl],
(7301, 73, 574) : ['AEDW', self._read_fake],
(4002, 40, 273) : ['AEFACT', self._read_aefact],
(7501, 75, 576) : ['AEFORCE', self._read_aeforce],
(2602, 26, 386) : ['AELINK', self._read_aelink],
(2302, 23, 341) : ['AELIST', self._read_aelist],
(7001, 70, 571) : ['AEPARM', self._read_fake],
(7401, 74, 575) : ['AEPRESS', self._read_aepress],
(3202, 32, 265) : ['AERO', self._read_aero],
(2202, 22, 340) : ['AEROS', self._read_aeros],
(2102, 21, 339) : ['AESTAT', self._read_aestat],
(2002, 20, 338) : ['AESURF', self._read_aesurf],
(7701, 77, 581) : ['AESURFS', self._read_aesurfs],
(3002, 30, 263) : ['CAERO1', self._read_caero1],
(4301, 43, 167) : ['CAERO2', self._read_caero2],
(4401, 44, 168) : ['CAERO3', self._read_caero3],
(4501, 45, 169) : ['CAERO4', self._read_caero4],
(5001, 50, 175) : ['CAERO5', self._read_caero5],
(6201, 62, 143) : ['CLOAD', self._read_fake],
(6401, 64, 307) : ['CSSCHD', self._read_csschd],
(104, 1, 81) : ['DEFORM', self._read_deform],
(2702, 27, 387) : ['DIVERG', self._read_diverg],
(4102, 41, 274) : ['FLFACT', self._read_flfact],
(3902, 39, 272) : ['FLUTTER', self._read_flutter],
(17400, 174, 616) : ['GROUP', self._read_group],
(3802, 38, 271) : ['MKAERO1', self._read_mkaero1],
(3702, 37, 270) : ['MKAERO2', self._read_mkaero2],
(7601, 76, 577) : ['MONPNT1', self._read_monpnt1],
(3102, 31, 264) : ['PAERO1', self._read_paero1],
(4601, 46, 170) : ['PAERO2', self._read_paero2],
(4701, 47, 171) : ['PAERO3', self._read_fake],
(4801, 48, 172) : ['PAERO4', self._read_fake],
(5101, 51, 176) : ['PAERO5', self._read_paero5],
(5301, 53, 378) : ['PANEL', self._read_panel],
(3502, 35, 268) : ['SET1', self._read_set1],
(3602, 36, 269) : ['SET2', self._read_set2],
(4302, 43, 607) : ['SET3', self._read_set3],
(3302, 33, 266) : ['SPLINE1', self._read_spline1],
(3402, 34, 267) : ['SPLINE2', self._read_spline2],
(4901, 49, 173) : ['SPLINE3', self._read_spline3],
(6501, 65, 308) : ['SPLINE4', self._read_spline4],
(6601, 66, 309) : ['SPLINE5', self._read_spline5],
(2402, 24, 342) : ['TRIM', self._read_trim],
(7201, 72, 573) : ['UXVEC', self._read_fake],
(7108, 822, 51) : ['BOLT', self._read_fake],
(7108, 71, 251) : ['???', self._read_fake],
(5808, 58, 220) : ['ITER', self._read_fake],
(14000, 140, 568) : ['SWLDPRM', self._read_fake],
(11001, 110, 581) : ['???', self._read_fake],
#(10500, 105, 14) : ['???', self._read_fake],
#(10500, 105, 14) : ['???', self._read_fake],
#(10500, 105, 14) : ['???', self._read_fake],
#(10500, 105, 14) : ['???', self._read_fake],
#(10500, 105, 14) : ['???', self._read_fake],
#(10500, 105, 14) : ['???', self._read_fake],
#(10500, 105, 14) : ['???', self._read_fake],
#(10500, 105, 14) : ['???', self._read_fake],
#(10500, 105, 14) : ['???', self._read_fake],
#(10500, 105, 14) : ['???', self._read_fake],
(7108, 82, 251): ['BOLT', self._read_fake],
# MSC
#(1247, 12, 667): ['MONPNT2', self._read_monpnt2],
(11204, 112, 821): ['ERPPNL', self._read_fake],
(8001, 80, 511): ['SET3', self._read_set3],
(9400, 94, 641): ['MDLPRM', self._read_mdlprm],
(11004, 110, 1820_720): ['HADACRI', self._read_fake],
(8804, 88, 628): ['MONDSP1', self._read_mondsp1],
(10904, 109, 719): ['HADAPTL', self._read_fake],
(8204, 82, 621): ['MONPNT2', self._read_monpnt2],
(8304, 83, 622): ['MONPNT3', self._read_monpnt3],
#(8001, 80, 511): ['???', self._read_fake],
#(8001, 80, 511): ['???', self._read_fake],
#(8001, 80, 511): ['???', self._read_fake],
#(8001, 80, 511): ['???', self._read_fake],
#(8001, 80, 511): ['???', self._read_fake],
}
def _read_aeforce(self, data: bytes, n: int) -> int:
"""Word Name Type Description
1 MACH RS
2 SYMXZ(2) CHAR4
4 SYMXY(2) CHAR4
6 UXID I
7 MESH(2) CHAR4
9 FORCE I
10 DMIK(2) CHAR4
12 PERQ(2) CHAR4
"""
op2 = self.op2
ntotal = 52 * self.factor # 4*13
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0
assert self.factor == 1, self.factor
structi = Struct(op2._endian + b'f 8s 8s i 8s i 8s 8s')
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
mach, sym_xz_bytes, sym_xy_bytes, ux_id, mesh_bytes, force, dmik_bytes, perq_bytes = out
sym_xz = reshape_bytes_block_size(sym_xz_bytes, size=self.size)
sym_xy = reshape_bytes_block_size(sym_xy_bytes, size=self.size)
mesh = reshape_bytes_block_size(mesh_bytes, size=self.size)
dmik = reshape_bytes_block_size(dmik_bytes, size=self.size)
perq = reshape_bytes_block_size(perq_bytes, size=self.size)
aeforce = op2.add_aeforce(mach, sym_xz, sym_xy, ux_id, mesh, force, dmik, perq)
str(aeforce)
n += ntotal
return n
def _read_aepress(self, data: bytes, n: int) -> int:
"""
Parametric pressure loading for aerodynamics.
Word Name Type Description
1 MACH RS Mach number
2 SYMXZ(2) CHAR4 Character string for identifying symmetry of the
force vector. Allowable values are SYMM, ASYMM, and ANTI
4 SYMXY(2) CHAR4 Character string for identifying symmetry of the
force vector. Allowable values are SYMM, ASYMM, and ANTI
6 UXID I The identification number of a UXVEC entry
7 DMIJ(2) CHAR4 The name of a DMI or DMIJ entry that defines the pressure
per unit dynamic pressure
9 DMIJI(2) CHAR4 The name of a DMIJI entry that defines the CAERO2
interference element downwashes
"""
op2 = self.op2
ntotal = 40 * self.factor # 4*10
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0
assert self.factor == 1, self.factor
structi = Struct(op2._endian + b'f 8s 8s i 8s 8s')
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
mach, sym_xz_bytes, sym_xy_bytes, ux_id, dmij_bytes, dmiji_bytes= out
sym_xz = reshape_bytes_block_size(sym_xz_bytes, size=self.size)
sym_xy = reshape_bytes_block_size(sym_xy_bytes, size=self.size)
dmij = reshape_bytes_block_size(dmij_bytes, size=self.size)
dmiji = reshape_bytes_block_size(dmiji_bytes, size=self.size)
aepress = op2.add_aepress(mach, sym_xz, sym_xy, ux_id, dmij, dmiji)
str(aepress)
#print(mach, sym_xz, sym_xy, ux_id, dmij, dmiji)
n += ntotal
return n
def _read_mkaero2(self, data: bytes, n: int) -> int:
mkaero2x
def _read_csschd(self, data: bytes, n: int) -> int:
csschd
def _read_diverg(self, data: bytes, n: int) -> int:
"""
Record – DIVERG(2702,27,387)
Divergence analysis data.
Word Name Type Description
1 SID I Unique set identification number
2 NROOT I Number of divergence roots to output
3 M RS Mach number
Word 3 repeats until -1 occurs
"""
op2 = self.op2
ints = np.frombuffer(data[n:], op2.idtype).copy()
floats = np.frombuffer(data[n:], op2.fdtype).copy()
istart, iend = get_minus1_start_end(ints)
for (i0, i1) in zip(istart, iend):
sid, nroots = ints[i0:i0+2]
machs = floats[i0+2:i1]
#print(sid, nroots, machs)
assert ints[i1] == -1, ints[i1]
diverg = op2.add_diverg(sid, nroots, machs)
str(diverg)
return len(data)
def _read_flfact(self, data: bytes, n: int) -> int:
"""
data = (1, 0.206, -1,
2, 1.3, -1,
3, 14400.0, 15600.0, 16800.0, 18000.0, 19200.0, 20400.0, -1)
"""
op2 = self.op2
ints = np.frombuffer(data[n:], op2.idtype).copy()
floats = np.frombuffer(data[n:], op2.fdtype).copy()
istart, iend = get_minus1_start_end(ints)
for (i0, i1) in zip(istart, iend):
sid = ints[i0]
factors = floats[i0+1:i1]
assert ints[i1] == -1, ints[i1]
flfact = op2.add_flfact(sid, factors)
str(flfact)
return len(data)
def _read_mkaero1(self, data: bytes, n: int) -> int:
"""
(3802, 38, 271)
Kinda brilliant way to write the card. Weird to parse though.
data = (1.3, -1, -1, -1, -1, -1, -1, -1,
0.03, 0.04, 0.05, -1, -1, -1, -1, -1)
"""
op2 = self.op2
#assert len(data) == 76, len(data)
nvalues = (len(data) - n) // 4
nrows = nvalues // 16
assert nrows > 0, nrows
ints = np.frombuffer(data[12:], dtype=op2.idtype).reshape(nrows, 16)
floats = np.frombuffer(data[12:], dtype=op2.fdtype).reshape(nrows, 16)
irows, icols = np.where(ints != -1)
uirows = np.unique(irows)
for irow in uirows:
iaero = np.where(irows == irow)[0]
ifloats = icols[iaero]
imachsi = np.where(ifloats < 8)[0]
ikfreqsi = np.where(ifloats >= 8)[0]
imachs = ifloats[imachsi]
ikfreqs = ifloats[ikfreqsi]
machs = floats[irow, imachs]
kfreqs = floats[irow, ikfreqs]
mkaero1 = op2.add_mkaero1(machs, kfreqs)
str(mkaero1)
return len(data)
def _read_group(self, data: bytes, n: int) -> int:
"""
GROUP(17400,174,616) - NX specific
1 GID I Group identification number
2 NDESC(C) I Length of group description
3 GDESC(2) CHAR4 Group description
Word 3 repeats NDESC times
NDESC+3 GTYPE I Group type
-2 = Meta data
-3 = Property identification numbers
-4 = Grid identification numbers
-5 = Element identification numbers
GTYPE = -2 Meta data
NDESC+4 NMETA I Length of meta data (includes -1 terminator)
NDESC+5 MDESC(2) CHAR4 Meta data
Word NDESC+5 repeats NMETA times
GTYPE = -3 Property identification numbers
NDESC+5
+NMETA
ID I Property identification numbers
> 0 for ID
= 0 for THRU
= -6 for BY
= -7 for ALL
Word NDESC+5+NMETA repeats until -1 occurs
GTYPE = -4 Grid identification numbers
NDESC+5+NMETA:
ID I Grid identification numbers
> 0 for ID
= 0 for THRU
= -6 for BY
= -7 for ALL
Word NDESC+5+NMETA repeats until -1 occurs
GTYPE = -5 Element identification numbers
NDESC+5
+NMETA
ID I Element identification numbers
> 0 for ID
= 0 for THRU
= -6 for BY
= -7 for ALL
Word NDESC+5+NMETA repeats until -1 occurs
(
17400, 174, 616,
6, 0,
-2, 1, -1,
-4, 1, 0, 440, -1,
-1
)
(
17400, 174, 616,
55, 0,
-5, 90011, -1,
-1,
65, 0,
-5, 90012, -1,
-1,
75, 0,
-5 90013, -1,
-1)
GROUP 10 Assembly AA4
META 100 RPM
META Optionally continue the meta data
GRID 1 2 3 4 5 6 7 8
GRID 10 THRU 20
GRID 100 THRU 200
GRID 341 THRU 360 BY 2
ELEM 30 THRU 40
PROP ALL
strings = (b'o\x00\x00\x00\x05\x00\x00\x00THIS IS GROUP 111 \xfe\xff\xff\xff\x05\x00\x00\x00THIS IS METADATA\xff\xff\xff\xff\xfb\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\n\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff',)
ints = (111, 5, 'THIS IS GROUP 111 ', -2, 5, 'THIS IS METADATA', -1, -5, 1, 0, 10, -1, -1)
floats = (111, 5, 'THIS IS GROUP 111 ', -2, 5, 'THIS IS METADATA', -1, -5, 1, 0.0, 10, -1, -1)
# double
FLEXIBLE SLIDER(1)
doubles (float64) = (1, 5, 6.03e-154, 6.08e-154, 6.01-154, 6.01e-154, 6.04e-154,
-4, 14, -1, -1, 2, 6, 23e-154, 6.08e-154, 6.82e-154, 6.9e-154, 6.3e-154, 6.0e-154,
-5, 1, 0.0, 10, -1, -1)
long long (int64) = (1, 5, 234137606, 231102857, 23148032, 955957572, 231888857,
-4, 14, -1, -1, 2, 6, 2413766, 742102857, 231216032, 23997572, 23192817, 23453545,
-5, 1, 0, 10, -1, -1)
"""
op2 = self.op2
#print('reading group')
#assert self.factor == 1, self.factor
nentries = 0
ints = np.frombuffer(data[n:], dtype=op2.idtype8)
if self.factor == 1:
strs = np.frombuffer(data[n:], dtype='|S4')
else:
op2.show_data(data[n:], types='qds')
strs = np.frombuffer(data[n:], dtype='|S8')
size = self.size
#print(ints)
#print(strs)
i = 0
#minus1_count = 0
#minus1 = np.where(ints == -1)[0]
ndata = len(data)
while n < ndata:
#1 GID I Group identification number
#2 NDESC(C) I Length of group description
#3 GDESC(2) CHAR4 Group description
#Word 3 repeats NDESC times
group_id, ndesc = ints[i:i+2]
i += 2
n += 8
group_desc = reshape_bytes_block_size(b''.join(strs[i:i+ndesc]), size=size)
#if self.factor == 1:
#group_desc = ''.join(stri.decode('latin1') for stri in strs[i:i+ndesc]).strip()
#else:
#group_desc_bytes = reshape_bytes_block(b''.join(strs[i:i+ndesc]))
#group_desc = group_desc_bytes.decode('latin1').rstrip()
i += ndesc
n += 4 * ndesc
#------------------------------
#gtype, nmeta, mdesc
gtype = ints[i]
#i += 1
#n += 4
op2.log.debug(f'group_id={group_id} ndesc={ndesc} group_desc={group_desc!r}; gtype={gtype!r}')
data_dict = {
'meta': '',
'property': [],
'grid': [],
'element': [],
}
#i += 1
#n += 4
while n < ndata:
#Group type
#-2 = Meta data
#-3 = Property identification numbers
#-4 = Grid identification numbers
#-5 = Element identification numbers
#print(f'-----gtype={gtype}----')
#print(ints[i:])
if gtype == -1:
# end of card
i += 1
n += size
break
elif gtype == -2:
assert ints[i] == -2, ints[i]
i += 1
n += size
# meta-data
nmeta = ints[i]
assert nmeta >= 0, nmeta
#i += 1
#n += 4
#print(i, nmeta)
#print(strs[i:i+nmeta-1])
#self.show_data(data[i*4:])
istop = i+nmeta-1
assert istop > i, f'i={i} nmeta={nmeta}'
#print('strs[i:istop] =', strs[i:istop])
#print('istop =', istop, ints[istop])
#meta_desc = ''.join(stri.decode('latin1') for stri in strs[i:istop])
datai = data[n+(i+1)*size:n+istop*size]
meta_desc = datai.decode('latin1')
data_dict['meta'] = meta_desc
i += nmeta + 1
n += size * (nmeta + 1)
#print(f' gtype={gtype} nmeta={nmeta} meta_desc={meta_desc!r}')
#iminus1 = minus1[minus1_count+2]
#print('ints: ', ints[i:iminus1].tolist())
#minus1_count += 1
elif gtype == -3:
assert ints[i] == -3, ints[i]
i, n, props = _read_group_elem_prop_nids(ints, i, n, size)
data_dict['property'].append(props)
elif gtype == -4:
assert ints[i] == -4, ints[i]
i, n, grids = _read_group_elem_prop_nids(ints, i, n, size)
data_dict['grid'].append(grids)
elif gtype == -5:
assert ints[i] == -5, ints[i]
i += 1
n += size
#print(f'gtype=5 (eids); ints[{i}]={ints[i]}')
#self.show_data(data[12:], types='ifs')
#print('data', ints[i:].tolist())
#GTYPE = -5 Element identification numbers
#NDESC+5+NMETA
#ID I Element identification numbers
#> 0 for ID
#= 0 for THRU
#= -6 for BY
#= -7 for ALL
#Word NDESC+5+NMETA repeats until -1 occurs
#print('ints[i:] =', ints[i:])
assert ints[i:][0] > 0, ints[i:]
for j, nj in enumerate(ints[i:]):
if nj == -1:
break
eids_array = ints[i:i+j].tolist()
eids2 = _expand_vals(eids_array)
# print(f' eids1 = {eids_array}')
#print(f' eids2 = {eids2}')
assert 'THRU' != eids2[0], eids2
assert 'BY' != eids2[0], eids2
assert 'ALL' != eids2[0], eids2
data_dict['element'].append(eids2)
nstop = len(eids_array) + 1
i += nstop
n += nstop * self.size
else:
raise NotImplementedError(gtype)
gtype = ints[i]
assert gtype <= -1, ints[i]
#print(f'***gtype={gtype} (ndata-n)={(ndata-n)}')
#print('---------------')
#if gtype == -1 and (ndata - n) == 4:
#print('break')
#minus1_count += 1
#i += 1
#n += 4
#break
#grid=1 ndesc=4 group_desc='GROUP(1)_ELAR'
# $ROUP ID DESC
# GROUP 1Group(1)_elar +
# $ TYPE ID1 "THRU" ID2
# + ELEM 21 THRU 36
#print(data_dict)
#op2.add_group(group_id, group_desc, data_dict)
#i += 1
#n += 4
#assert ints[i] == -1, ints[i:]
meta = data_dict['meta']
nodes = data_dict['grid']
elements = data_dict['element']
properties = data_dict['property']
op2.add_group(group_id, nodes, elements, properties)
# self.log.warning(f'geom skipping GROUP in {self.table_name}')
nentries += 1
assert n == len(data), f'n={n} ndata={len(data)}'
op2.increase_card_count('GROUP', nentries)
assert nentries > 0, nentries
return n
def _read_aero(self, data: bytes, n: int) -> int:
"""
(3202, 32, 265)
Word Name Type Description
1 ACSID I
2 VELOCITY RS
3 REFC RS
4 RHOREF RS
5 SYMXZ I
6 SYMXY I
"""
op2 = self.op2
assert len(data) == 36, len(data)
struct = Struct(op2._endian + b'i 3f 2i')
out = struct.unpack(data[n:])
acsid, velocity, cref, rho_ref, sym_xz, sym_xy = out
op2.add_aero(velocity, cref, rho_ref,
acsid=acsid, sym_xz=sym_xz, sym_xy=sym_xy)
n = 36
return n
def _read_aeros(self, data: bytes, n: int) -> int:
"""
AEROS(2202, 22, 340)
AEROS 0 100 36. 360. 12960.
data = (0, 100, 36.0, 360.0, 12960.0, 0, 0)
"""
op2 = self.op2
assert len(data) == 40 * self.factor, len(data)
struct = Struct(mapfmt(op2._endian + b'2i 3f 2i', self.size))
out = struct.unpack(data[n:])
acsid, rcsid, cref, bref, sref, sym_xz, sym_xy = out
op2.add_aeros(cref, bref, sref,
acsid=acsid, rcsid=rcsid,
sym_xz=sym_xz, sym_xy=sym_xy)
n = 40 * self.factor
return n
def _read_deform(self, data: bytes, n: int) -> int:
"""
(104, 1, 81)
NX 2019.2
Word Name Type Description
1 SID I Deformation set identification number
2 EID I Element number
3 D RS Deformation
"""
op2 = self.op2
ntotal = 12 * self.factor # 4*3
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0
structi = Struct(mapfmt(op2._endian + b'2i f', self.size))
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
sid, eid, deformation = out
deform = op2.add_deform(sid, eid, deformation)
str(deform)
n += ntotal
return n
def _read_caero1(self, data: bytes, n: int) -> int:
"""
(3002, 30, 263)
MSC 2018.2
Word Name Type Description
1 EID I
2 PID I
3 CP I
4 NSPAN I
5 NCHORD I
6 LSPAN I
7 LCHORD I
8 IGID I
9 X1 RS
10 Y1 RS
11 Z1 RS
12 X12 RS
13 X4 RS
14 Y4 RS
15 Z4 RS
16 X43 RS
CAERO1 100001 100001 0 10 24 1
99.2956521.45381-11.654442.85999101.8387122.6196-2.6930832.70996
data = (100001, 100001, 0, 10, 0, 0, 24, 1,
99.3, 21.45, -11.65, 42.86, 101.8387, 122.62, -2.69, 32.71)
"""
op2 = self.op2
ntotal = 64 * self.factor # 4*16
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0
structi = Struct(mapfmt(op2._endian + b'8i 8f', self.size))
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
eid, pid, cp, nspan, nchord, lspan, lchord, igid, x1, y1, z1, x12, x4, y4, z4, x43 = out
op2.add_caero1(eid, pid, igid,
[x1, y1, z1], x12,
[x4, y4, z4], x43,
cp=cp,
nspan=nspan, lspan=lspan,
nchord=nchord, lchord=lchord)
n += ntotal
return n
def _read_caero2(self, data: bytes, n: int) -> int:
"""
MSC 2018.2
Word Name Type Description
1 EID I
2 PID I
3 CP I
4 NSB I
5 NINT I
6 LSB I
7 LINT I
8 IGID I
9 X1 RS
10 Y1 RS
11 Z1 RS
12 X12 RS
13 UNDEF(4) none
data = (54000, 4020, 0, 8, 8, 0, 0, 1, -5.0, 0, 0, 40.0, 0, 0, 0, 0),
"""
op2 = self.op2
ntotal = 64 * self.factor # 4*16
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0
structi = Struct(mapfmt(op2._endian + b'8i 4f 4i', self.size))
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
eid, pid, cp, nsb, nint, lsb, lint, igroup, x1, y1, z1, x12, zero1, zero2, zero3, zero4 = out
assert min(zero1, zero2, zero3, zero4) == max(zero1, zero2, zero3, zero4)
p1 = [x1, y1, z1]
caero2 = op2.add_caero2(eid, pid, igroup, p1, x12,
cp=cp,
nsb=nsb, nint=nint,
lsb=lsb, lint=lint)
str(caero2)
n += ntotal
return n
def _read_caero3(self, data: bytes, n: int) -> int:
"""
Aerodynamic panel element configuration.
Word Name Type Description
1 EID I Element identification number
2 PID I Property identification number of a PAERO3 entry
3 CP I Coordinate system for locating points 1 and 4
4 LISTW I Identification number of an AEFACT entry that lists
coordinate pairs for structural interpolation of the wing
5 LISTC1 I Identification number of an AEFACT entry that lists
coordinate pairs for control surfaces
6 LISTC2 I Identification number of an AEFACT entry that lists
coordinate pairs for control surfaces
7 UNDEF(2) None
9 X1 RS X-coordinate of point 1 in coordinate system CP
10 Y1 RS Y-coordinate of point 1 in coordinate system CP
11 Z1 RS Z-coordinate of point 1 in coordinate system CP
12 X12 RS Edge chord length in aerodynamic coordinate system
13 X4 RS X-coordinate of point 4 in coordinate system CP
14 Y4 RS Y-coordinate of point 4 in coordinate system CP
15 Z4 RS Z-coordinate of point 4 in coordinate system CP
16 X43 RS Edge chord length in aerodynamic coordinate system
"""
op2 = self.op2
ntotal = 64 * self.factor # 4*16
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0
structi = Struct(mapfmt(op2._endian + b'6i 2i 8f', self.size))
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
eid, pid, cp, list_w, list_c1, list_c2, zero1, zero2, x1, y1, z1, x12, x4, y4, z4, x43 = out
#eid, pid, cp, nspan, lspan, zero1, zero2, zero3, x1, y1, z1, x12, x4, y4, z4, x43 = out
assert min(zero1, zero2) == max(zero1, zero2)
p1 = [x1, y1, z1]
p4 = [x4, y4, z4]
caero3 = op2.add_caero3(
eid, pid, list_w, p1, x12, p4, x43,
cp=cp, list_c1=list_c1, list_c2=list_c2, comment='')
str(caero3)
#print(caero3)
n += ntotal
return n
def _read_caero4(self, data: bytes, n: int) -> int:
"""
Word Name Type Description
1 EID I Element identification number
2 PID I Property identification number of a PAERO4 entry
3 CP I Coordinate system for locating points 1 and 4
4 NSPAN I Number of strips
5 LSPAN I Identification number of an AEFACT entry
containing a list of division points for strips
6 UNDEF(3) None
9 X1 RS X-coordinate of point 1 in coordinate system CP
10 Y1 RS Y-coordinate of point 1 in coordinate system CP
11 Z1 RS Z-coordinate of point 1 in coordinate system CP
12 X12 RS Edge chord length in aerodynamic coordinate system
13 X4 RS X-coordinate of point 4 in coordinate system CP
14 Y4 RS Y-coordinate of point 4 in coordinate system CP
15 Z4 RS Z-coordinate of point 4 in coordinate system CP
16 X43 RS Edge chord length in aerodynamic coordinate system
"""
op2 = self.op2
ntotal = 64 * self.factor # 4*16
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0
structi = Struct(mapfmt(op2._endian + b'5i 3i 8f', self.size))
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
eid, pid, cp, nspan, lspan, zero1, zero2, zero3, x1, y1, z1, x12, x4, y4, z4, x43 = out
assert min(zero1, zero2, zero3) == max(zero1, zero2, zero3)
p1 = [x1, y1, z1]
p4 = [x4, y4, z4]
caero4 = op2.add_caero4(eid, pid, p1, x12, p4, x43,
cp=cp, nspan=nspan, lspan=lspan, comment='')
str(caero4)
#print(caero4)
n += ntotal
return n
def _read_caero5(self, data: bytes, n: int) -> int:
"""
MSC 2018.2
Word Name Type Description
1 EID I
2 PID I
3 CP I
4 NSPAN I
5 LSPAN I
6 NTHRY I
7 NTHICK I
8 UNDEF none
9 X1 RS
10 Y1 RS
11 Z1 RS
12 X12 RS
13 X4 RS
14 Y4 RS
15 Z4 RS
16 X43 RS
"""
op2 = self.op2
ntotal = 64 # 4*16
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0
structi = Struct(op2._endian + b'8i 8f')
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
eid, pid, cp, nspan, lspan, ntheory, nthick, undef, x1, y1, z1, x12, x4, y4, z4, x43 = out
p1 = [x1, y1, z1]
p4 = [x4, y4, z4]
caero5 = op2.add_caero5(eid, pid,
p1, x12,
p4, x43,
cp=cp,
nspan=nspan, lspan=lspan,
ntheory=ntheory,
nthick=nthick)
str(caero5)
n += ntotal
return n
def _read_paero1(self, data: bytes, n: int) -> int:
r"""
(3102, 31, 264)
MSC 2018.2
Word Name Type Description
1 PID I
2 B1 I
3 B2 I
4 B3 I
5 B4 I
6 B5 I
7 B6 I
8 UNDEF none
PAERO1 100001
data = (100001, 100001, 0, 10, 0, 0, 24, 1,
99.3, 21.45, -11.65, 42.86, 101.8387, 122.62, -2.69, 32.71)
C:\MSC.Software\simcenter_nastran_2019.2\tpl_post1\adb144_2.op2
PAERO1 1000 74000 74510 84610
"""
op2 = self.op2
ntotal = 32 * self.factor # 4 * 8
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0
structi = Struct(mapfmt(op2._endian + b'8i', self.size))
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
pid, b1, b2, b3, b4, b5, b6, empty = out
caero_body_ids = []
for body in [b1, b2, b3, b4, b5, b6, empty]:
if body != 0:
caero_body_ids.append(body)
paero1 = op2.add_paero1(pid, caero_body_ids=caero_body_ids)
str(paero1)
#if caero_body_ids:
#self.log.warning(str(paero1))
n += ntotal
return n
def _read_paero2(self, data: bytes, n: int) -> int:
"""
MSC 2018.2
Word Name Type Description
1 PID I
2 ORIENT CHAR4
3 UNDEF none (orient carryover)
4 WIDTH RS
5 AR RS
6 LRSB I
7 LRIB I
8 LTH1 I
9 LTH2 I
10 THI1 I
11 THN1 I
12 THI2 I
13 THN2 I
14 THI3 I
15 THN3 I
PAERO2 100001
data = (100001, 100001, 0, 10, 0, 0, 24, 1,
99.3, 21.45, -11.65, 42.86, 101.8387, 122.62, -2.69, 32.71)
"""
op2 = self.op2
ntotal = 60 * self.factor # 4 * 15
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0
if self.size == 4:
structi = Struct(op2._endian + b'i4si 2f 10i')
else:
structi = Struct(op2._endian + b'q8sq 2d 10q')
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
(pid, orient_bytes, undef, width, ar, lrsb, lrib, lth1, lth2,
thi1, thn1,
thi2, thn2,
thi3, thn3) = out
lth = [lth1, lth2]
thi = [thi1, thi2, thi3]
thn = [thn1, thn2, thn3]
orient = reshape_bytes_block_size(orient_bytes, self.size)
paero2 = op2.add_paero2(pid, orient, width, ar,
thi, thn,
lrsb=lrsb,
lrib=lrib,
lth=lth)
n += ntotal
str(paero2)
return n
def _read_paero5(self, data: bytes, n: int) -> int:
"""
MSC 2018.2
Word Name Type Description
1 PID I
2 NALPHA I
3 LALPHA I
4 NXIS I
5 LXIS I
6 NTAUS I
7 LTAUS I
8 CAOCI RS
Word 8 repeats until End of Record
"""
op2 = self.op2
ints = np.frombuffer(data[n:], op2.idtype8).copy()
floats = np.frombuffer(data[n:], op2.fdtype8).copy()
istart, iend = get_minus1_start_end(ints)
for (i0, i1) in zip(istart, iend):
pid, nalpha, lalpha, nxis, lxis, ntaus, ltaus = ints[i0:i0+7]
caoci = floats[i0+7:i1]
assert ints[i1] == -1, ints[i1]
paero5 = op2.add_paero5(
pid, caoci,
nalpha=nalpha, lalpha=lalpha,
nxis=nxis, lxis=lxis,
ntaus=ntaus, ltaus=ltaus)
str(paero5)
return len(data)
def _read_panel(self, data: bytes, n: int) -> int:
"""
MSC 2018.2
Word Name Type Description
1 NAME(2) CHAR4
3 SETID I
Words 1 through 3 repeat until End of Record
('PANEL1', 1, -1)
"""
op2 = self.op2
ints = np.frombuffer(data[n:], op2.idtype8).copy()
istart, iend = get_minus1_start_end(ints)
for (i0, i1) in zip(istart, iend):
assert ints[i1] == -1, ints[i1]
names = []
set_ids = []
while i0 < i1:
name_bytes = data[n:n+8]
name = reshape_bytes_block_size(name_bytes, self.size)
set_id = ints[i0+2]
names.append(name)
set_ids.append(set_id)
n += 12
i0 += 3
panel = op2.add_panel(names, set_ids)
str(panel)
return len(data)
def _read_acmodl(self, data: bytes, n: int) -> int:
"""Reads the ACMODL card"""
op2 = self.op2
card_name = 'ACMODL'
card_obj = ACMODL
methods = {
72 : self._read_acmodl_nx_72,
64 : self._read_acmodl_msc_64,
}
try:
n = op2.reader_geom2._read_double_card(
card_name, card_obj,
op2._add_methods._add_acmodl_object,
methods, data, n)
except DoubleCardError:
raise
return n
#n = self._read_dual_card(data, n, self._read_acmodl_nx, self._read_acmodl_msc,
#'ACMODL', op2._add_methods._add_acmodl_object)
##return self._read_acmodl_msc(data, n)
#return n
def _read_acmodl_nx_72(self, card, data: bytes, n: int) -> Tuple[int, List[ACMODL]]:
"""
NX 2019.2 - 72 bytes
Word Name Type Description
1 INTER(2) CHAR4 IDENT or DIFF method specification
3 INFO(2) CHAR4 Allowable values are ALL, ELEMENTS, PID, SET3, and NONE
5 FSET I Fluid set ID
6 SSET I Structure set ID
7 NORML RS Outward normal search distance to detect fluid-structure interface
8 METHOD(2) CHAR4 Interface calculation method
10 OVPLANG RS Angular tolerance in degrees used to decide
whether a fluid free face and a structural face
are overlapping
11 SRCHUNIT(2) CHAR4 Search unit
13 INTOL RS Inward normal search distance to detect
fluid-structure interface
14 AREAOPT I Area option
15 SKNEPS RS SKNEPS option. Only used when AREAOPT = 0
16 INTORD I Integration order
17 CTYPE(2) CHAR4 Coupling type (STRONG, WEAK, WEAKINT, or WEAKEXT)
strings = (b'IDENT NONE , \x00\x00pBREL \xcd\xccL>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00STRONG ',)
ints = ('IDENT', 'NONE', 0, 0, 1.0e-4, AS, 60.0, 541869394, 538976288, 0.2, 0, 0.0, 0, STRONG)
floats = ('IDENT', 'NONE', 0.0, 0.0, 1.0e-4, AS, 60.0, 1.7302408291410174e-19, 1.3563156426940112e-19, 0.2, 0, 0.0, 0, STRONG)
MSC 2018.2
| ACMODL | INTER | INFOR | FSET | SSET | NORMAL | METHOD | SKNEPS | DSKNEPS |
| | INTOL | ALLSSET | SRCHUNIT | | | | | |
# NX 2019.2
| ACMODL | | INFOR | FSET | SSET | NORMAL | | OVLPANG | SRCHUNIT |
| | INTOL | AREAOP | | | CTYPE | | | |
ACMODL,IDENT,,,,1.0-4
"""
op2 = self.op2
ntotal = 72 * self.factor # 4 * 8
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0, ndatai % ntotal
#structi = Struct(op2._endian + b'4i f 8s 8s 3i f') # msc
if self.size == 4:
structi = Struct(op2._endian + b'8s 8s 2i f 8s f 8s f ifi 8s')
else:
structi = Struct(op2._endian + b'16s 16s 2q d 16s d 16s d qdq 16s')
acmodls = []
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
#Type of structure-fluid interface. (Character = IDENT or DIFF;
#inter: good
#olvpang: good
#search_unit_bytes: good
#ctype: good
#area_op: good
#sk_neps/olvpang; Default=60.0
inter_bytes, infor_bytes, fset, sset, normal, method_bytes, olvpang, search_unit_bytes, intol, area_op, sk_neps, intord, ctype_bytes = out
inter = reshape_bytes_block_size(inter_bytes, self.size)
infor = reshape_bytes_block_size(infor_bytes, self.size)
method = reshape_bytes_block_size(method_bytes, self.size)
search_unit = reshape_bytes_block_size(search_unit_bytes, self.size)
ctype = reshape_bytes_block_size(ctype_bytes, self.size)
assert inter in ['IDEN', 'IDENT', 'DIFF'], inter
assert ctype in ['STRONG', 'WEAK', 'WEAKINT', 'WEAKEXT'], ctype
assert method in ['AS'], method
assert area_op in [0, 1], area_op
#If CTYPE = STRONG
#If CTYPE = WEAK
#print(f'inter={inter!r} infor={infor!r} fset={fset} sset={sset} normal={normal:g} method={method} olvpang={olvpang} search_unit={search_unit!r}\n'
#f'intol={intol:g} area_op={area_op} sk_neps={sk_neps} intord={intord} ctype={ctype!r}')
#If SRCHUNIT = ABS, then the model units are absolute.
#If SRCHUNIT = REL, then the relative model units are based on element size.
assert search_unit in ['ABS', 'REL'], search_unit
#INTOL Inward normal sea
# set2 = op2.add_set2(sid, macro, sp1, sp2, ch1, ch2, zmax, zmin)
acmodl = ACMODL(infor, fset, sset,
normal=normal, olvpang=olvpang,
search_unit=search_unit, intol=intol,
area_op=area_op,
ctype=ctype,
method='BW',
sk_neps=sk_neps,
#dsk_neps=0.75,
#all_set='NO',
inter=inter,
nastran_version='nx')
#print(acmodl)
str(acmodl)
acmodls.append(acmodl)
n += ntotal
return n, acmodls
def _read_acmodl_msc_64(self, card, data: bytes, n: int) -> Tuple[int, List[ACMODL]]:
"""
MSC 2018.2 - 64 bytes
Word Name Type Description
1 INTER(2) CHAR4 Type of structure-fluid interface:
"IDENT","DIFF"-def"DIFF"
3 INFOR(2) CHAR4 If INTER="DIFF", defines the type of list:
"GRIDS","ELEMENTS","NONE"-def"NONE"
5 FSET I SET1 ID for fluid elements or grids ID list, FSET>0 or blank
6 SSET I SET1 ID for struc elements or grids ID list, SSET>0 or blank
7 NORMAL RS Fluid normal tolerance - def 1.0
8 METHOD(2) CHAR4 Method -def" "
10 SKNEPS RS Fluid skin growth tolereance - def 0.75
11 DSKNEPS RS Fluid secondary skin growth tolerance - def 0.75
12 INTOL RS Tolerance of inward normal - def 0.5
13 ALLSSET(2) CHAR4 coupled all structure given by SSET if "YES" - def"NO"
15 SRCHUNIT(2) CHAR4 Search Units:"ABS","REL"-def"REL"
ndata = 64: # MSC
strings = (b'IDENT NONE \x00\x00\x00\x00\x00\x00\x00\x00\x17\xb7\xd18 \x00\x00\x00?\x00\x00@?\x00\x00\x00?NO REL ',)
ints = (IDENT, NONE, 0, 0, 953267991, 538976288, 538976288, 0.5, 0.75, 0.5, NO, REL)
floats = (IDENT, NONE, 0.0, 0.0, 9.999999747378752e-05, 1.3563156426940112e-19, 1.3563156426940112e-19, 0.5, 0.75, 0.5, NO, REL)
MSC 2018.2
| ACMODL | INTER | INFOR | FSET | SSET | NORMAL | METHOD | SKNEPS | DSKNEPS |
| | INTOL | ALLSSET | SRCHUNIT | | | | | |
"""
op2 = self.op2
ntotal = 64 * self.factor # 4 * 8
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0, ndatai % ntotal
#structi = Struct(op2._endian + b'4i f 8s 8s 3i f') # msc
if self.size == 4:
structi = Struct(op2._endian + b'8s 8s 2if 8s 3f 8s 8s')
else:
raise NotImplementedError(('ACMODL-MSC', self.size))
structi = Struct(op2._endian + b'16s 16s 2q d 16s d 16s d qdq 16s')
acmodls = []
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
#Type of structure-fluid interface. (Character = IDENT or DIFF;
#inter: good
#olvpang: good
#search_unit_bytes: good
#ctype: good
#area_op: good
#sk_neps/olvpang; Default=60.0
inter_bytes, infor_bytes, fset, sset, normal, method_bytes, sk_neps, dsk_neps, intol, all_sset, search_unit_bytes = out
area_op= None
olvpang = None
#inter_bytes, infor_bytes, fset, sset, normal, method_bytes, olvpang, search_unit_bytes, intol, area_op, sk_neps, intord, ctype_bytes = out
inter = reshape_bytes_block_size(inter_bytes, self.size)
infor = reshape_bytes_block_size(infor_bytes, self.size)
method = reshape_bytes_block_size(method_bytes, self.size)
search_unit = reshape_bytes_block_size(search_unit_bytes, self.size)
#ctype = reshape_bytes_block_size(ctype_bytes, self.size)
assert inter in ['IDENT', 'DIFF'], inter
#assert ctype in ['STRONG', 'WEAK', 'WEAKINT', 'WEAKEXT'], ctype
assert method in ['CP', ''], method
#assert area_op in [0, 1], area_op
#If CTYPE = STRONG
#If CTYPE = WEAK
#print(f'inter={inter!r} infor={infor!r} fset={fset} sset={sset} normal={normal:g} method={method} olvpang={olvpang} search_unit={search_unit!r}\n'
#f'intol={intol:g} area_op={area_op} sk_neps={sk_neps} intord={intord} ctype={ctype!r}')
#If SRCHUNIT = ABS, then the model units are absolute.
#If SRCHUNIT = REL, then the relative model units are based on element size.
assert search_unit in ['ABS', 'REL'], search_unit
#INTOL Inward normal sea
# set2 = op2.add_set2(sid, macro, sp1, sp2, ch1, ch2, zmax, zmin)
acmodl = ACMODL(infor, fset, sset,
normal=normal, olvpang=olvpang,
search_unit=search_unit, intol=intol,
area_op=area_op,
#ctype=ctype,
method='BW',
sk_neps=sk_neps,
#dsk_neps=0.75,
#all_set='NO',
inter=inter,
nastran_version='msc')
#print(acmodl)
str(acmodl)
acmodls.append(acmodl)
n += ntotal
return n, acmodls
def _read_aelist(self, data: bytes, n: int) -> int:
"""
MSC 2018.2
Word Name Type Description
1 SID I
2 E I
Word 2 repeats until End of Record
"""
op2 = self.op2
#self.show_data(data[12:], types='if')
ints = np.frombuffer(data[n:], op2.idtype8).copy()
floats = np.frombuffer(data[n:], op2.fdtype8).copy()
istart, iend = get_minus1_start_end(ints)
for (i0, i1) in zip(istart, iend):
sid = ints[i0]
elements = floats[i0+1:i1]
assert ints[i1] == -1, ints[i1]
op2.add_aelist(sid, elements)
#n += ntotal
return len(data)
def _read_set1(self, data: bytes, n: int) -> int:
"""
SET1: (3502, 35, 268)
MSC 2018.2
Word Name Type Description
1 SID I
2 G1 I Grid ID or -2 when SKIN is specified
Word 2 repeats until End of Record
"""
op2 = self.op2
ints = np.frombuffer(data[n:], op2.idtype8).copy()
istart, iend = get_minus1_start_end(ints)
for (i0, i1) in zip(istart, iend):
sid = ints[i0]
elements = ints[i0+1:i1].tolist()
assert -2 not in elements, elements
assert ints[i1] == -1, ints[i1]
op2.add_set1(sid, elements, is_skin=False)
return len(data)
def _read_set2(self, data: bytes, n: int) -> int:
"""
SET2
MSC 2018.2
Word Name Type Description
1 SID I
2 G1 I Grid ID or -2 when SKIN is specified
Word 2 repeats until End of Record
Record 71 - SET2(3602,36,269)
Word Name Type Description
1 SID I
2 MACRO I
3 SP1 RS
4 SP2 RS
5 CH1 RS
6 CH2 RS
7 ZMAX RS
8 ZMIN RS
"""
op2 = self.op2
#self.show_data(data)
ntotal = 32 # 4 * 8
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0
#structi = Struct(op2._endian + b'4i f 8s 8s 3i f') # msc
structi = Struct(op2._endian + b'2i 6f')
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
sid, macro, sp1, sp2, ch1, ch2, zmax, zmin = out
set2 = op2.add_set2(sid, macro, sp1, sp2, ch1, ch2, zmax, zmin)
str(set2)
n += ntotal
op2.to_nx(' because SET2 was found')
return n
def _read_set3(self, data: bytes, n: int) -> int:
"""
MSC 2018.2
Word Name Type Description
1 SID I
2 DES I Set description:
1=ELEM
2=GRID
3=POINT
4=PROP
5=RBEin
6=RBEex
3 ID1 I IDs of Grids, Elements, Points or Properties.
4 "ID1 THRU ID2" format will be EXPANDED into explicit IDs.
Words 3 through 4 repeat until End of Record
data = (1, 1, 190, ..., 238, -1,
2, 1, 71, ..., 189, -1,
4, 1, 309, ..., ..., 378, -1)
"""
op2 = self.op2
# this is setup for NX
ints = np.frombuffer(data[n:], op2.idtype8).copy()
istart, iend = get_minus1_start_end(ints)
for (i0, i1) in zip(istart, iend):
sid = ints[i0]
desc_int = ints[i0+1]
elements = ints[i0+2:i1].tolist()
if desc_int == 1:
desc = 'ELEM'
elif desc_int == 2:
desc = 'GRID'
elif desc_int == 3:
desc = 'POINT'
elif desc_int == 4:
desc = 'PROP'
elif desc_int == 5:
desc = 'RBEin'
elif desc_int == 6:
desc = 'RBEex'
else:
raise NotImplementedError(desc_int)
assert min(elements) > 0, elements
assert ints[i1] == -1, ints[i1]
set3 = op2.add_set3(sid, desc, elements)
str(set3)
return len(data)
def _read_aelink(self, data: bytes, n: int) -> int:
"""
MSC 2018.2
Word Name Type Description
1 ID I
2 LABLD(2) CHAR4
4 LABLI(2) CHAR4
6 C1 RS
Words 4 through 6 repeat until (-1,-1,-1) occurs
"""
op2 = self.op2
struct1 = Struct(op2._endian + b'i8s')
struct2 =Struct(op2._endian + b'8sf')
struct_end = Struct(op2._endian + b'3i')
ntotal = 12
while n < len(data):
edata = data[n:n+ntotal]
aelink_id, label_bytes = struct1.unpack(edata)
if aelink_id == 0:
aelink_id = 'ALWAYS'
#assert aelink_id > 0, aelink_id
label = reshape_bytes_block_size(label_bytes, self.size)
n += ntotal
linking_coefficents = []
independent_labels = []
edata = data[n:n+ntotal]
while struct_end.unpack(edata) != (-1, -1, -1):
ind_label_bytes, coeff = struct2.unpack(edata)
ind_label = reshape_bytes_block_size(ind_label_bytes, self.size)
independent_labels.append(ind_label)
linking_coefficents.append(coeff)
n += ntotal
edata = data[n:n+ntotal]
n += ntotal
aelink = op2.add_aelink(aelink_id, label,
independent_labels, linking_coefficents)
str(aelink)
return len(data)
def _read_aecomp(self, data: bytes, n: int) -> int:
"""
MSC 2018.2
Word Name Type Description
1 NAME(2) CHAR4
3 LISTTYPE(2) CHAR4
5 LISTID I
Word 5 repeats until End of Record
"""
op2 = self.op2
ints = np.frombuffer(data[n:], op2.idtype).copy()
#floats = np.frombuffer(data[n:], op2.fdtype).copy()
istart, iend = get_minus1_start_end(ints)
for (i0, i1) in zip(istart, iend):
name_bytes = data[n+i0*4:n+i0*4+8]
list_type_bytes = data[n+i0*4+8:n+i0*4+16]
lists = ints[i0+4:i1].tolist()
assert ints[i1] == -1, ints[i1]
name = name_bytes.rstrip().decode('ascii')
list_type = list_type_bytes.rstrip().decode('ascii')
#print(name, list_type, lists)
aecomp = op2.add_aecomp(name, list_type, lists)
str(aecomp)
"""
Word Name Type Description
1 NAME(2) CHAR4
3 LABEL(2) CHAR4
Words 3 through 4 repeat until (-1,-1) occurs
"""
return len(data)
def _read_aecompl(self, data: bytes, n: int) -> int:
"""
MSC 2018.2
Word Name Type Description
1 NAME(2) CHAR4
3 LABEL(2) CHAR4
Words 3 through 4 repeat until (-1,-1) occurs
"""
op2 = self.op2
struct1 = Struct(op2._endian + b'8s')
struct2 = Struct(op2._endian + b'8s')
struct_end = Struct(op2._endian + b'2i')
ntotal = 8
while n < len(data):
edata = data[n:n+ntotal]
name_bytes, = struct1.unpack(edata)
name = name_bytes.decode('latin1').rstrip()
n += ntotal
labels = []
edata = data[n:n+ntotal]
while struct_end.unpack(edata) != (-1, -1):
label_bytes, = struct2.unpack(edata)
label = reshape_bytes_block_size(label_bytes, self.size)
labels.append(label)
n += ntotal
edata = data[n:n+ntotal]
n += ntotal
aecompl = op2.add_aecompl(name, labels)
str(aecompl)
return len(data)
def _read_spline1(self, data: bytes, n: int) -> int:
"""reads the SPLINE1 card"""
n = self._read_spline1_nx(data, n)
return n
def _read_spline1_nx(self, data: bytes, n: int) -> int:
"""
Word Name Type Description
1 EID I
2 CAERO I
3 BOX1 I
4 BOX2 I
5 SETG I
6 DZ RS
7 METHOD(2) CHAR4 Method: IPS|TPS|FPS
9 USAGE(2) CHAR4 Usage flag: FORCE|DISP|BOTH
11 NELEM I Number of elements for FPS on x-axis
12 MELEM I Number of elements for FPS on y-axis
"""
op2 = self.op2
ntotal = 48 # 4 * 12
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0
#structi = Struct(op2._endian + b'4i f 8s 8s 3i f') # msc
structi = Struct(op2._endian + b'5if 8s 8s 2i')
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
eid, caero, box1, box2, setg, dz, method_bytes, usage_bytes, nelements, melements = out
method = method_bytes.rstrip().decode('ascii')
usage = usage_bytes.rstrip().decode('ascii')
#if nelements == 0:
#nelements = None
#if melements == 0:
#melements = None
spline1 = op2.add_spline1(eid, caero, box1, box2, setg,
dz=dz, method=method,
usage=usage, nelements=nelements,
melements=melements)
str(spline1)
n += ntotal
#op2.to_nx()
return n
def _read_spline2(self, data: bytes, n: int) -> int:
"""
Reads the SPLINE2 card
Word Name Type Description
1 EID I
2 CAERO I
3 ID1 I
4 ID2 I
5 SETG I
6 DZ RS
7 DTOR RS
8 CID I
9 DTHX RS
10 DTHY RS
11 USAGE(2) CHAR4 Usage flag: FORCE|DISP|BOTH
"""
op2 = self.op2
ntotal = 48 * self.factor # 4 * 12
ndatai = len(data) - n
ncards = ndatai // ntotal
#assert ndatai % ntotal == 0
if self.size == 4:
structi = Struct(op2._endian + b'5i 2f i 2f 8s')
else:
structi = Struct(op2._endian + b'5q 2d q 2d 16s')
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
eid, caero, id1, id2, setg, dz, dtor, cid, dthx, dthy, usage_bytes = out
usage = usage_bytes.rstrip().decode('latin1')
spline2 = op2.add_spline2(
eid, caero,
id1, id2, setg,
dz=dz, dtor=dtor, cid=cid,
dthx=dthx, dthy=dthy,
usage=usage)
str(spline2)
n += ntotal
#n = self._read_spline2_nx(data, n)
return n
def _read_spline3(self, data: bytes, n: int) -> int:
"""reads the SPLINE3 card"""
spline3
#n = self._read_spline2_nx(data, n)
return n
def _read_spline4(self, data: bytes, n: int) -> int:
"""reads the SPLINE4 card"""
op2 = self.op2
card_name = 'SPLINE4'
card_obj = SPLINE4
#self.show_data(data[n:])
methods = {
44 : self._read_spline4_nx_44,
52 : self._read_spline4_msc_52,
}
try:
n = op2.reader_geom2._read_double_card(
card_name, card_obj,
op2._add_methods._add_spline_object,
methods, data, n)
except DoubleCardError:
raise
return n
#n = self._read_spline4_nx(data, n)
#return n
def _read_spline4_nx_44(self, spline: SPLINE4, data: bytes, n: int) -> Tuple[int, SPLINE4]:
"""
MSC 2018.2
Word Name Type Description
1 EID I Spline element Identification
2 CAERO I Component Identifification
3 AELIST I AELIST Id for boxes
4 SETG I SETi Id for grids
5 DZ RS Smoothing Parameter
6 METHOD(2) CHAR4 Method: IPS|TPS|FPS
8 USAGE(2) CHAR4 Usage flag: FORCE|DISP|BOTH
10 NELEM I Number of elements for FPS on x-axis
11 MELEM I Number of elements for FPS on y-axis
12 FTYPE I Radial interpolation funtion fir METHOD=RIS (not in NX)
13 RCORE RS Radius of radial interpolation function (not in NX)
"""
op2 = self.op2
# 792/4 = 198
# 198 = 2 * 99 = 2 * 11 * 9
ntotal = 44 # 4 * 11
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0
#structi = Struct(op2._endian + b'4i f 8s 8s 3i f') # msc
structi = Struct(op2._endian + b'4i f 8s 8s 2i')
splines = []
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
#eid, caero, aelist, setg, dz, method_bytes, usage_bytes, nelements, melements, ftype, rcore = out # msc
eid, caero, aelist, setg, dz, method_bytes, usage_bytes, nelements, melements = out
method = method_bytes.rstrip().decode('ascii')
usage = usage_bytes.rstrip().decode('ascii')
spline = SPLINE4(eid, caero, aelist, setg,
dz, method, usage,
nelements, melements)
str(spline)
splines.append(spline)
n += ntotal
op2.to_nx(' because SPLINE4-NX was found')
return n, splines
def _read_spline4_msc_52(self, spline: SPLINE4, data: bytes, n: int) -> Tuple[int, SPLINE4]:
"""
MSC 2018.2
Word Name Type Description
1 EID I Spline element Identification
2 CAERO I Component Identifification
3 AELIST I AELIST Id for boxes
4 SETG I SETi Id for grids
5 DZ RS Smoothing Parameter
6 METHOD(2) CHAR4 Method: IPS|TPS|FPS
8 USAGE(2) CHAR4 Usage flag: FORCE|DISP|BOTH
10 NELEM I Number of elements for FPS on x-axis
11 MELEM I Number of elements for FPS on y-axis
12 FTYPE I Radial interpolation funtion fir METHOD=RIS (not in NX)
13 RCORE RS Radius of radial interpolation function (not in NX)
"""
op2 = self.op2
# 792/4 = 198
# 198 = 2 * 99 = 2 * 11 * 9
ntotal = 52 # 4 * 13
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0
#structi = Struct(op2._endian + b'4i f 8s 8s 3i f') # msc
structi = Struct(op2._endian + b'4i f 8s 8s 2i if')
splines = []
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
eid, caero, aelist, setg, dz, method_bytes, usage_bytes, nelements, melements, ftype, rcore = out # msc
#print(eid, caero, aelist, setg, dz, method_bytes, usage_bytes, nelements, melements, ftype, rcore)
method = method_bytes.rstrip().decode('ascii')
usage = usage_bytes.rstrip().decode('ascii')
spline = SPLINE4(eid, caero, aelist, setg,
dz, method, usage,
nelements, melements,
ftype=ftype, rcore=rcore)
str(spline)
splines.append(spline)
n += ntotal
op2.to_msc(' because SPLINE4-MSC was found')
return n, splines
def _read_spline5(self, data: bytes, n: int) -> int:
"""reads the SPLINE5 card"""
op2 = self.op2
card_name = 'SPLINE5'
card_obj = SPLINE5
methods = {
60 : self._read_spline5_nx_60,
68 : self._read_spline5_msc_68,
}
try:
n = op2.reader_geom2._read_double_card(
card_name, card_obj, op2._add_methods._add_spline_object,
methods, data, n)
except DoubleCardError:
raise
return n
def _read_spline5_msc_68(self, spline: SPLINE5, data: bytes, n: int) -> Tuple[int, List[SPLINE5]]:
"""
reads the SPLINE5 card
Word Name Type Description
1 EID I Spline element Identification
2 CAERO I Component Identifification
3 AELIST I AELIST Id for boxes
4 SETG I SETi Id for grids
5 DZ RS Smoothing Parameter
6 DTORXY RS Flexibility ratio in XY Plane
7 CID I Coordinate Sys. Id. for Beam CS
8 DTHX RS Smoothing/Attachment Flags for X rotations
9 DTHY RS Smoothing/Attachment Flags for Y rotations
10 DTHZ RS Smoothing/Attachment Flags for Z rotations
11 USAGE(2) CHAR4 Usage flag: FORCE|DISP|BOTH
13 METHOD(2) CHAR4 Method: IPS|TPS|FPS|RIS
15 DTORZY RS Flexibility ratio in ZY Plane
16 FTYPE I Radial interpolation funtion fir METHOD=RIS (not in NX)
17 RCORE RS Radius of radial interpolation function (not in NX)
"""
op2 = self.op2
ntotal = 68 * self.factor # 4 * 17
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0, ndatai % ntotal
if self.size == 4:
structi = Struct(op2._endian + b'4i 2f i 3f 8s8s fif')
else:
asdf
#structi = Struct(op2._endian + b'5q 2d q 2d 16s')
splines = []
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
#ftype, rcore
eid, caero, aelist, setg, dz, dtorxy, cid, dthx, dthy, dthz, usage_bytes, method_bytes, dtorzy, ftype, rcore = out
method = reshape_bytes_block_size(method_bytes, self.size)
usage = reshape_bytes_block_size(usage_bytes, self.size)
#print(f'eid={eid} caero={caero} aelist={aelist} setg={setg} dz={dz} '
#f'dtorxy={dtorxy} cid={cid} dthx={dthx} dthy={dthy} dthz={dthz} '
#f'usage={usage!r} method={method!r} dtorzy={dtorzy} ftype={ftype} rcore={rcore}')
assert method in ['IPS','TPS','FPS','RIS', 'BEAM'], method
assert usage in ['FORCE','DISP','BOTH'], usage
thx = dthx
thy = dthy
dtor = dtorzy
spline = SPLINE5(
eid, caero, aelist, setg, thx, thy,
dz=dz, dtor=dtor, cid=cid,
usage=usage, method=method,
ftype=ftype, rcore=rcore, # not in NX
)
str(spline)
splines.append(spline)
n += ntotal
op2.to_msc(' because SPLINE5-MSC was found')
return n, splines
def _read_spline5_nx_60(self, spline: SPLINE5, data: bytes, n: int) -> Tuple[int, List[SPLINE5]]:
"""
reads the SPLINE5 card
Word Name Type Description
1 EID I Spline element Identification
2 CAERO I Component Identifification
3 AELIST I AELIST Id for boxes
4 SETG I SETi Id for grids
5 DZ RS Smoothing Parameter
6 DTORXY RS Flexibility ratio in XY Plane
7 CID I Coordinate Sys. Id. for Beam CS
8 DTHX RS Smoothing/Attachment Flags for X rotations
9 DTHY RS Smoothing/Attachment Flags for Y rotations
10 DTHZ RS Smoothing/Attachment Flags for Z rotations
11 USAGE(2) CHAR4 Usage flag: FORCE|DISP|BOTH
13 METHOD(2) CHAR4 Method: IPS|TPS|FPS|RIS
15 DTORZY RS Flexibility ratio in ZY Plane
16 FTYPE I Radial interpolation funtion fir METHOD=RIS (not in NX?)
17 RCORE RS Radius of radial interpolation function (not in NX?)
"""
op2 = self.op2
ntotal = 60 * self.factor # 4 * 12
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0, ndatai % ntotal
if self.size == 4:
structi = Struct(op2._endian + b'4i 2f i 3f 8s8s f')
else:
asdf
#structi = Struct(op2._endian + b'5q 2d q 2d 16s')
splines = []
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
#ftype, rcore
eid, caero, aelist, setg, dz, dtorxy, cid, dthx, dthy, dthz, usage_bytes, method_bytes, dtorzy = out
method = reshape_bytes_block_size(method_bytes, self.size)
usage = reshape_bytes_block_size(usage_bytes, self.size)
#print(f'eid={eid} caero={caero} aelist={aelist} setg={setg} dz={dz} dtorxy={dtorxy} cid={cid} dthx={dthx} dthy={dthy} dthz={dthz} usage={usage!r} method={method!r} dtorzy={dtorzy}')
assert method in ['IPS','TPS','FPS','RIS'], method
assert usage in ['FORCE','DISP','BOTH'], usage
thx = dthx
thy = dthy
dtor = dtorzy
spline = SPLINE5(
eid, caero, aelist, setg, thx, thy,
dz=dz, dtor=dtor, cid=cid,
usage=usage, method=method,
#ftype=ftype, rcore=rcore, # not in NX
)
str(spline)
splines.append(spline)
n += ntotal
op2.to_nx(' because SPLINE5-NX was found')
return n, splines
def _read_monpnt1(self, data: bytes, n: int) -> int:
"""Reads the MONPNT1 card"""
op2 = self.op2
card_name = 'MONPNT1'
card_obj = MONPNT1
methods = {
92 : self._read_monpnt1_nx_92,
96 : self._read_monpnt1_96,
}
try:
n = op2.reader_geom2._read_double_card(
card_name, card_obj,
op2._add_methods._add_monpnt_object,
methods, data, n)
except DoubleCardError:
raise
return n
def _read_monpnt1_nx_92(self, monpnt1: MONPNT1, data: bytes, n: int) -> Tuple[int, List[MONPNT1]]:
"""
MSC 2018.2
Word Name Type Description
1 NAME(2) CHAR4
3 LABEL(14) CHAR4
17 AXES I
18 COMP(2) CHAR4
20 CP I
21 X RS
22 Y RS
23 Z RS
"""
op2 = self.op2
#ntotal = 4 * 24 # 4 * 24
ntotal = 92 # 4 * 23
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0
#structi = Struct(op2._endian + b'8s 56s i 8s i 3f i') # msc
structi = Struct(op2._endian + b'8s 56s i 8s i 3f') # nx
monpnt1s = []
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
#name_bytes, label_bytes, axes, comp_bytes, cp, x, y, z, cd = out
name_bytes, label_bytes, axes, aecomp_name_bytes, cp, x, y, z = out
name = reshape_bytes_block_size(name_bytes, self.size)
label = reshape_bytes_block_size(label_bytes, self.size)
aecomp_name = reshape_bytes_block_size(aecomp_name_bytes, self.size)
xyz = [x, y, z]
monpnt1 = MONPNT1(name, label, axes, aecomp_name,
xyz, cp=cp)
str(monpnt1)
n += ntotal
monpnt1s.append(monpnt1)
op2.to_nx(' because MONPNT1-NX was found')
return n, monpnt1s
def _read_monpnt1_96(self, monpnt1: MONPNT1, data: bytes, n: int) -> Tuple[int, List[MONPNT1]]:
"""
MSC 2018.2
Word Name Type Description
1 NAME(2) CHAR4
3 LABEL(14) CHAR4
17 AXES I
18 COMP(2) CHAR4
20 CP I
21 X RS
22 Y RS
23 Z RS
24 CD I
"""
op2 = self.op2
#ntotal = 4 * 24 # 4 * 24
ntotal = 96 # 4 * 23
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0
structi = Struct(op2._endian + b'8s 56s i 8s i 3f i') # msc
#structi = Struct(op2._endian + b'8s 56s i 8s i 3f') # nx
monpnt1s = []
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
name_bytes, label_bytes, axes, aecomp_name_bytes, cp, x, y, z, cd = out
#name_bytes, label_bytes, axes, aecomp_name_bytes, cp, x, y, z = out
name = reshape_bytes_block_size(name_bytes, self.size)
label = reshape_bytes_block_size(label_bytes, self.size)
aecomp_name = reshape_bytes_block_size(aecomp_name_bytes, self.size)
xyz = [x, y, z]
monpnt1 = MONPNT1(name, label, axes, aecomp_name,
xyz, cp=cp, cd=cd)
str(monpnt1)
n += ntotal
monpnt1s.append(monpnt1)
#op2.to_nx(' because MONPNT1-NX was found')
return n, monpnt1s
def _read_monpnt2(self, data: bytes, n: int) -> int:
"""
Record 59 - MONPNT2(8204,82,621)
Word Name Type Description
1 NAME(2) CHAR4
3 LABEL(14) CHAR4
17 TABLE(2) CHAR4
19 ELTYP(2) CHAR4
21 ITEM(2) CHAR4
23 EID I
"""
op2 = self.op2
ntotal = 92 * self.factor # 4 * 23
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0
structi = Struct(op2._endian + b'8s 56s 8s8s8s i') # msc
monpnts = []
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
name_bytes, label_bytes, table_bytes, eltype_bytes, item_bytes, eid = out
name = reshape_bytes_block_size(name_bytes, self.size)
label = reshape_bytes_block_size(label_bytes, self.size)
table = reshape_bytes_block_size(table_bytes, self.size)
Type = reshape_bytes_block_size(eltype_bytes, self.size)
nddl_item = reshape_bytes_block_size(item_bytes, self.size)
monpnt = MONPNT2(name, label, table, Type, nddl_item, eid, comment='')
op2._add_methods._add_monpnt_object(monpnt)
str(monpnt)
#print(monpnt)
n += ntotal
monpnts.append(monpnt)
#op2.to_nx(' because MONPNT3-NX was found')
return n # , monpnt1s
def _read_monpnt3(self, data: bytes, n: int) -> int:
"""
Record 60 - MONPNT3(8304,83,622)
Word Name Type Description
1 NAME(2) CHAR4
3 LABEL(14) CHAR4
17 AXES I Axes to compute
18 GRIDSET I GPF Grid Set
19 ELEMSET I GPF Elem Set
20 CID I Coord system x,y,z input in
21 X RS
22 Y RS
23 Z RS
24 XFLAG I Exclude forces from class
25 CD I
"""
op2 = self.op2
ntotal = 100 * self.factor # 4 * 25
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0
structi = Struct(op2._endian + b'8s 56s 4i 3f 2i') # msc
monpnts = []
#XFLAG Exclusion flag. Exclude the indicated Grid Point Force types from summation at the
#monitor point. Default = blank (no type excluded). See Remark 4.
#S SPCforces
#M MPC forces
#A, L, or P applied loads
#D dmig’s (and any other type not described above) at the monitored point.
# A = L = P
# 2 ^ 4 = 16
# official: 0, 2, 4, 8, 16, 28, 30
# guess: 6, 24, 26
xflag_map = {
0: None,
#1: 'A',?
2: 'S',
#3: 'SA',?
4: 'M',
#5: 'MA',
6: 'MS',
#7: 'MSA',?
8: 'A', # A = L = P
16: 'D',
24: 'DP',
26: 'SDP',
28: 'MAD',
30: 'SMAD',
}
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
name_bytes, label_bytes, axes, grid_set, elem_set, cp, x, y, z, xflag, cd = out
name = reshape_bytes_block_size(name_bytes, self.size)
label = reshape_bytes_block_size(label_bytes, self.size)
xyz = [x, y, z]
try:
xflag_str = xflag_map[xflag]
except Exception:
raise RuntimeError((name, label, xflag))
monpnt = MONPNT3(name, label, axes, grid_set, elem_set, xyz,
cp=cp, cd=cd, xflag=xflag_str, comment='')
op2._add_methods._add_monpnt_object(monpnt)
str(monpnt)
#print(monpnt)
n += ntotal
monpnts.append(monpnt)
#op2.to_nx(' because MONPNT3-NX was found')
return n # , monpnt1s
def _read_mondsp1(self, data: bytes, n: int) -> int:
"""
Record 56 - MONDSP1(8804,88,628)
Word Name Type Description
1 NAME(2) CHAR4
3 LABEL(14) CHAR4
17 AXES I
18 COMP(2) CHAR4
20 CP I
21 X RS
22 Y RS
23 Z RS
24 CD I
25 INDDOF I
"""
op2 = self.op2
ntotal = 100 * self.factor # 4 * 25
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0
structi = Struct(op2._endian + b'8s 56s i8s i 3f 2i') # msc
monpnts = []
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
name_bytes, label_bytes, axes, aecomp_name_bytes, cp, x, y, z, cd, ind_dof = out
name = reshape_bytes_block_size(name_bytes, self.size)
label = reshape_bytes_block_size(label_bytes, self.size)
aecomp_name = reshape_bytes_block_size(aecomp_name_bytes, self.size)
xyz = [x, y, z]
monpnt = MONDSP1(name, label, axes, aecomp_name, xyz, cp=cp, cd=cd,
ind_dof='123', comment='')
op2._add_methods._add_monpnt_object(monpnt)
str(monpnt)
n += ntotal
monpnts.append(monpnt)
#op2.to_nx(' because MONPNT3-NX was found')
return n # , monpnt1s
def _read_mdlprm(self, data: bytes, n: int) -> int:
"""
Word Name Type Description
1 NAME(2) CHAR4 User defined parameter
3 VALUE I parameter value
strings = (b'NSGRDS4 \x14\x00\x00\x00PEXTS4 \x00\x00\x00\x00SPBLNDX \xcd\xcc\xcc\xcc',)
ints = (b'NSGRDS4 ', 20, b'PEXTS4 ', 0, b'SPBLNDX ', -858993459)
floats = (b'NSGRDS4 ', 20, b'PEXTS4 ', 0.0, b'SPBLNDX ', -107374184.0)
MDLPRM, nsgrds4, 20, pexts4, 50., spblndx, 3.1
"""
op2 = self.op2
ntotal = 12 * self.factor # 4 * 3
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0
structi = Struct(op2._endian + b'8s i') # msc
structf = Struct(op2._endian + b'8s f') # msc
data_dict = {}
MDLPRM_FLOAT_KEYS_1 = {
'DBCTOLE', 'DELELAS', 'DELFAST', 'DELMASS', 'DELSEAM', 'DELWELD',
'PEXTS4', 'PIVTHRSH', 'SPBLNDX'}
float_names = {('-%8s' % name).encode('ascii') for name in MDLPRM_FLOAT_KEYS_1}
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
name_bytes, value = out
if name_bytes in float_names:
name_bytes, value = structf.unpack(edata)
name = reshape_bytes_block_size(name_bytes, self.size)
if name == 'SHEARP':
if value == 2:
value = 'HARDER'
else:
raise NotImplementedError((name, value))
elif name == 'OFFDEF':
if value == 8:
value = 'ELMOFF'
elif value == 65:
value = 'NODIFF'
elif value in [128, 192]:
value = 'LROFF'
else:
raise NotImplementedError((name, value))
data_dict[name] = value
n += ntotal
if 'SPBLNDX' in data_dict:
raise RuntimeError(f'SPBLNDX exists and has the wrong value...{data_dict}')
if op2.mdlprm is not None:
return n
op2.add_mdlprm(data_dict)
op2.to_msc(' because MDLPRM-MSC was found')
#monpnt = MONDSP1(name, label, axes, aecomp_name, xyz, cp=cp, cd=cd,
#ind_dof='123', comment='')
#op2._add_methods._add_monpnt_object(monpnt)
return n # , monpnt1s
def _read_aestat(self, data: bytes, n: int) -> int:
"""
MSC 2018.2
Word Name Type Description
1 ID I
2 LABEL(2) CHAR4
"""
op2 = self.op2
ntotal = 12 * self.factor # 4 * 8
if self.size == 4:
structi = Struct(op2._endian + b'i 8s')
else:
structi = Struct(op2._endian + b'q 16s')
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out = structi.unpack(edata)
aestat_id, label_bytes = out
label = reshape_bytes_block_size(label_bytes, self.size)
aestat = op2.add_aestat(aestat_id, label)
str(aestat)
n += ntotal
return n
def _read_flutter(self, data: bytes, n: int) -> int:
"""
(3902, 39, 272)
MSC 2018.2
Word Name Type Description
1 SID I
2 METHOD(2) CHAR4
4 DENS I
5 MACH I
6 RFREQ I
7 IMETH(2) CHAR4
SFLG=0 (std)
9 NEIGN I nvalue
SFLG=1 (sweep)
9 FMAX RS maximum frequency
End SFLG
10 EPR RS
11 SFLG I SWEEP FLAG
Words 1 through max repeat until End of Record
NX:
sid method, d, m, k, imethod, neign, epr, sflag
data = (30, PK, 1, 2, 3, L, 3, 0.001, -1) # ???
data = (30, KE, ' ', 1, 2, 3, L, ' ', 3, 0.001, 0.0, -1) # MSC
"""
op2 = self.op2
ints = np.frombuffer(data[n:], op2.idtype).copy()
floats = np.frombuffer(data[n:], op2.fdtype).copy()
istart, iend = get_minus1_start_end(ints)
for (i0, i1) in zip(istart, iend):
sid = ints[i0]
assert ints[i1] == -1, ints[i1]
method_bytes = data[n+i0*4+4:n+i0*4+12]
density = ints[i0+3]
mach = ints[i0+4]
reduced_freq_velocity = ints[i0+5]
imethod_bytes = data[n+i0*4+24:n+i0*4+32]
nvalue = ints[i0+8] # nvalue
epsilon = floats[i0+9]
if ints[i0+10] == -1:
assert ints[i0+10] == -1, ints[i0:i1]
else:
# msc
sweep_flag = ints[i0+10]
assert ints[i0+11] == -1, ints[i0:i1+1]
assert sweep_flag == 0, sweep_flag
method = method_bytes.rstrip().decode('ascii')
imethod = imethod_bytes.rstrip().decode('ascii')
op2.add_flutter(sid, method,
density, mach, reduced_freq_velocity,
imethod=imethod, # 'L'
nvalue=nvalue,
epsilon=epsilon, validate=True)
op2.to_nx(' because FLUTTER was found')
return len(data)
#ntotal = 12 # 4 * 8
#ndatai = len(data) - n
#ncards = ndatai // ntotal
#assert ndatai % ntotal == 0
#structi = Struct(op2._endian + b'i 8s')
#for unused_i in range(ncards):
#edata = data[n:n + ntotal]
#out = structi.unpack(edata)
#aestat_id, label = out
#label = label.rstrip().decode('latin1')
#op2.add_aestat(aestat_id, label)
#n += ntotal
#return n
def _read_trim(self, data: bytes, n: int) -> int:
"""
(2402, 24, 342)
MSC 2018.2
Word Name Type Description
1 ID I
2 MACH RS
3 Q RS
4 AEQRATIO RS
5 LABEL(2) CHAR4
7 UX RS
Words 5 through 7 repeat until (-1,-1,-1) occurs
"""
op2 = self.op2
ntotal1 = 16 * self.factor # 4 * 4
ntotal2 = 12 * self.factor # 4 * 3
#ndatai = len(data) - n
#ncards = ndatai // ntotal
if self.size == 4:
struct1 = Struct(op2._endian + b'i 3f')
struct2 = Struct(op2._endian + b'8sf')
struct_end = Struct(op2._endian + b'3i')
else:
struct1 = Struct(op2._endian + b'q 3d')
struct2 = Struct(op2._endian + b'16sd')
struct_end = Struct(op2._endian + b'3q')
while n < len(data):
edata = data[n:n+ntotal1]
trim_id, mach, q, aeqr = struct1.unpack(edata)
n += ntotal1
labels = []
uxs = []
edata = data[n:n+ntotal2]
while struct_end.unpack(edata) != (-1, -1, -1):
label_bytes, ux = struct2.unpack(edata)
label = reshape_bytes_block_size(label_bytes, self.size)
labels.append(label)
uxs.append(ux)
n += ntotal2
edata = data[n:n+ntotal2]
n += ntotal2
trim = op2.add_trim(trim_id, mach, q, labels, uxs, aeqr=aeqr, trim_type=1)
str(trim)
return n
def _read_aesurf(self, data: bytes, n: int) -> int:
"""
MSC 2018.2
Word Name Type Description
1 ID I Identification of an aerodynamic trim variable degree of freedom >0, no default
2 LABEL(2) CHAR4 Control Surface (CS) name, no default
4 CID1 I IDentification of a rectangular Coordinate system with y-axis that defines the hinge line of the CS component
5 ALID1 I IDentification of an AELIST bulk data entry that identifies all aerodynamic elements that make up the CS comp
6 CID2 I IDentification of a rectangular Coordinate system with y-axis that defines the hinge line of the CS component
7 ALID2 I IDentification of an AELIST bulk data entry that identifies all aerodynamic elements that make up the CS comp
8 EFF RS Control surface EFFectiveness, default=1.0
9 LDW I =0 create a linear down wash, >0 no linear downwash
10 CREFC RS Reference Chord Length for the CS > 0.0, default = 1.0
11 CREFS RS Reference area for the CS > 0.0, default = 1.0
12 PLLIM RS Lower deflection Limit for the control surface in radians, default=no limit
13 PULIM RS Upper deflection Limit for the control surface in radians, default=no limit
14 HMLLIM RS Lower Hinge Moment Limit for the control surface in force-length units, default=no limit
15 HMULIM RS Upper Hinge Moment Limit for the control surface in force-length units, default=no limit
16 TQLLIM RS Lower deflection Limit for the control surface as fct(q), >0, default=no limit
17 TQULIM RS Upper deflection Limit for the control surface as fct(q), >0, default=no limit
"""
op2 = self.op2
ntotal = 68 * self.factor # 4 * 17
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0
ntotali = -24 * self.factor
if self.size == 4:
struct2 = Struct(op2._endian + b'6f')
struct1 = Struct(op2._endian + b'i 8s 4i fi 2f 4s4s 4s4s 4s4s')
nan = b' '
else:
struct2 = Struct(op2._endian + b'6d')
struct1 = Struct(op2._endian + b'q 16s 4q dq 2d 8s8s 8s8s 8s8s')
nan = b' '
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out1 = struct1.unpack(edata)
(aesurf_id, label_bytes, cid1, alid1, cid2, alid2, eff, ldw_int, crefc, crefs,
pllim, pulim, hmllim, hmulim, tqllim, tqulim) = out1
#print(out1)
label = reshape_bytes_block_size(label_bytes, self.size)
if ldw_int == 0:
ldw = 'LDW'
elif ldw_int == 1:
ldw = 'NOLDW'
else:
raise NotImplementedError(ldw_int)
assert isinstance(ldw, str), ldw
if (pllim, pulim, hmllim, hmulim, tqllim, tqulim) == (nan, nan, nan, nan, nan, nan):
pllim = None
pulim = None
hmllim = None
hmulim = None
tqllim = None
tqulim = None
else:
pllim2, pulim2, hmllim2, hmulim2, tqllim2, tqulim2 = struct2.unpack(edata[ntotali:])
pllim = pllim2 if pllim != nan else None
pulim = pulim2 if pulim != nan else None
hmllim = hmllim2 if hmllim != nan else None
hmulim = hmulim if hmulim != nan else None
tqllim = tqllim2 if tqllim != nan else None
tqulim = tqulim2 if tqulim != nan else None
#print('pllim, pulim, hmllim, hmulim, tqllim, tqulim', pllim, pulim, hmllim, hmulim, tqllim, tqulim)
op2.add_aesurf(aesurf_id, label, cid1, alid1, cid2=None, alid2=None,
eff=eff, ldw=ldw, crefc=crefc, crefs=crefs,
#pllim=-np.pi/2., pulim=np.pi/2.,
pllim=pllim, pulim=pulim,
hmllim=hmllim, hmulim=hmulim, # hinge moment lower/upper limits
tqllim=tqllim, tqulim=tqulim)
n += ntotal
return n
def _read_aesurfs(self, data: bytes, n: int) -> int:
"""
Word Name Type Description
1 ID I Identification of an aerodynamic trim variable degree
of freedom >0, no default
2 LABEL(2) CHAR4 Control Surface (CS) name, no default
4 LIST1 I Identification of a SET1 that contains the grids ids
associated with this control surface
5 LIST2 I Identification of a SET1 that contains the grids ids
associated with this control surface
"""
op2 = self.op2
ntotal = 20 * self.factor # 4 * 5
ndatai = len(data) - n
ncards = ndatai // ntotal
assert ndatai % ntotal == 0
if self.size == 4:
struct1 = Struct(op2._endian + b'i 8s 2i')
else:
struct1 = Struct(op2._endian + b'i 16s 2i')
for unused_i in range(ncards):
edata = data[n:n + ntotal]
out1 = struct1.unpack(edata)
aesid, label_bytes, list1, list2 = out1
label = reshape_bytes_block_size(label_bytes, self.size)
aesurfs = op2.add_aesurfs(aesid, label, list1, list2)
str(aesurfs)
n += ntotal
return n
def _read_aefact(self, data: bytes, n: int) -> int:
"""
MSC 2018.2
Word Name Type Description
1 SID I
2 D RS
Word 2 repeats until End of Record
(1, 0.0, 0.1, 0.2, 1.0, -1,
2, 0.0, 0.1, 0.2, 0.5, 1.0, -1,
)
"""
op2 = self.op2
ints = np.frombuffer(data[n:], op2.idtype8).copy()
floats = np.frombuffer(data[n:], op2.fdtype8).copy()
istart, iend = get_minus1_start_end(ints)
for (i0, i1) in zip(istart, iend):
#if i0 == i1:
#continue
sid = ints[i0]
fractions = floats[i0+1:i1]
assert ints[i1] == -1, ints[i1]
aefact = op2.add_aefact(sid, fractions)
#print(aefact)
str(aefact)
#n += ntotal
return len(data)
def _expand_vals(grids):
grids2 = []
for val in grids:
#> 0 for ID
#= 0 for THRU
#= -6 for BY
#= -7 for ALL
if val > 0:
pass
elif val == 0:
val = 'THRU'
elif val == -6:
val = 'BY'
elif val == -7:
val = 'ALL'
else:
raise NotImplementedError(f'val={val} data={grids}')
grids2.append(val)
return grids2
def _read_group_elem_prop_nids(ints, i, n, size) -> Tuple[int, int, Any]:
"""helper for _read_group"""
i += 1
n += size
# grids
#iminus1 = minus1[minus1_count] # + 1
#print(ints[iminus1:])
#grids = ints[i+1:iminus1].tolist()
#print('ints[i:]', ints[i:])
assert ints[i:][0] > 0, ints[i:]
for j, nj in enumerate(ints[i:]):
if nj == -1:
break
grids = ints[i+1:i+j].tolist()
print('grids', grids)
grids2 = _expand_vals(grids)
print(f' grids = {grids2}')
assert 'THRU' != grids2[0]
assert 'BY' != grids2[0]
assert 'ALL' != grids2[0]
#minus1_count += 1
nstop = len(grids) + 2
i += nstop
n += nstop * size
#i = iminus1
#n = iminus1 * 4
return grids2
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,657
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/bdf/cards/bdf_sets.py
|
"""
All set cards are defined in this file. This includes:
* sets
* SET1, SET2, SET3, RADSET # ??? RADSET
* asets - aset, aset1
* omits - omit, omit1
* bsets - bset, bset1
* csets - cset, cset1
* qsets - qset, qset1
* usets - uset, uset1 # USET 1 is not supported
The superelement sets start with SE:
* se_bsets - sebset, sebset1
* se_csets - secset, secset1
* se_qsets - seqset, seqset1
* se_usets - seuset, seuset1
*se_sets
* SESET
* SEQSEP
#* Set
#* SetSuper
+------------+-----------------+
| Entry Type | Equivalent Type |
+============+=================+
| SEQSETi | QSETi |
+------------+-----------------+
| SESUP | SUPORT |
+------------+-----------------+
| SECSETi | CSETi |
+------------+-----------------+
| SEBSETi | BSETi |
+------------+-----------------+
"""
from __future__ import annotations
from typing import List, Union, Optional, Any, TYPE_CHECKING
import numpy as np
from pyNastran.utils.numpy_utils import integer_types, integer_string_types
from pyNastran.bdf.cards.base_card import (
BaseCard, _node_ids, expand_thru, write_card
)
from pyNastran.bdf.cards.collpase_card import collapse_thru, condense, build_thru_packs
from pyNastran.bdf.field_writer_8 import print_card_8
from pyNastran.bdf.bdf_interface.assign_type import (
integer, double, double_or_blank,
integer_or_blank, integer_or_string,
parse_components, components_or_blank as fcomponents_or_blank,
fields, string, integer_string_or_blank,
)
if TYPE_CHECKING: # pragma: no cover
from pyNastran.bdf.bdf import BDF
class Set(BaseCard):
"""Generic Class all SETx cards inherit from"""
def __init__(self):
#: list of IDs in the SETx
self.ids = []
def clean_ids(self) -> None:
"""eliminates duplicate IDs from self.IDs and sorts self.IDs"""
self.ids = list(set(self.ids))
self.ids.sort()
def repr_fields(self)-> List[Optional[Union[int, float, str]]]:
list_fields = self.raw_fields()
return list_fields
def __repr__(self) -> str:
return self.comment + print_card_8(self.repr_fields())
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return write_card(self.comment, card, size, is_double)
class SetSuper(Set):
"""Generic Class all Superelement SETx cards inherit from."""
def __init__(self):
Set.__init__(self)
#: Superelement identification number. Must be a primary superelement.
#: (Integer >= 0)
self.seid = None
#: list of IDs in the SESETx
self.ids = None
class ABCQSet(Set):
"""
Generic Class ASET, BSET, CSET, QSET cards inherit from.
Defines degrees-of-freedom in the analysis set (A-set)
+------+-----+----+-----+------+-----+----+-----+----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+======+=====+====+=====+======+=====+====+=====+====+
| ASET | ID1 | C1 | ID2 | C2 | ID3 | C3 | ID4 | C4 |
+------+-----+----+-----+------+-----+----+-----+----+
| ASET | 16 | 2 | 23 | 3516 | 1 | 4 | | |
+------+-----+----+-----+------+-----+----+-----+----+
"""
type = 'ABCQSet'
def _finalize_hdf5(self, encoding):
"""hdf5 helper function"""
if isinstance(self.ids, np.ndarray):
self.ids = self.ids.tolist()
if isinstance(self.components, np.ndarray):
self.components = self.components.tolist()
def __init__(self, ids: List[int], components: List[int], comment: str='') -> None:
Set.__init__(self)
if comment:
self.comment = comment
#: Identifiers of grids points. (Integer > 0)
if isinstance(ids, int):
ids = [ids]
self.ids = ids
self.components = components
self.ids_ref = None
def validate(self) -> None:
assert isinstance(self.ids, list), type(self.ids)
assert isinstance(self.components, list), type(self.components)
assert len(self.ids) == len(self.components), 'len(ids)=%s len(components)=%s' % (len(self.ids), len(self.components))
@classmethod
def add_card(cls, card, comment=''):
ids = []
components = []
nterms = len(card) // 2
for n in range(nterms):
i = n * 2 + 1
idi = integer(card, i, 'ID' + str(n))
component = parse_components(card, i + 1, 'component' + str(n))
ids.append(idi)
components.append(component)
return cls(ids, components, comment=comment)
@classmethod
def add_op2_data(cls, data: List[Any], comment: str='') -> ABCQSet:
ids = [data[0]]
components = [data[1]]
return cls(ids, components, comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by %s' % self.type
self.ids_ref = model.EmptyNodes(self.node_ids, msg=msg)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.ids = self.node_ids
self.ids_ref = None
@property
def node_ids(self):
if self.ids_ref is None:
return self.ids
msg = ', which is required by %s' % self.type
return _node_ids(self, self.ids, allow_empty_nodes=True, msg=msg)
def raw_fields(self):
"""gets the "raw" card without any processing as a list for printing"""
list_fields = [self.type] # ASET, BSET
for (idi, comp) in zip(self.node_ids, self.components):
list_fields += [idi, comp]
return list_fields
def __repr__(self):
list_fields = self.raw_fields()
return self.comment + print_card_8(list_fields)
class SuperABCQSet(Set):
"""
Generic Class ASET, BSET, CSET, QSET cards inherit from.
Defines degrees-of-freedom in the analysis set (A-set)
+--------+------+-----+----+-----+------+-----+-----+-----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+========+======+=====+====+=====+======+=====+=====+=====+
| SEBSET | SEID | ID1 | C1 | ID2 | C2 | ID3 | C3 | |
+--------+------+-----+----+-----+------+-----+-----+-----+
| SEBSET | 100 | 16 | 2 | 23 | 3516 | 1 | 4 | |
+--------+------+-----+----+-----+------+-----+-----+-----+
"""
type = 'SuperABCQSet'
def _finalize_hdf5(self, encoding):
"""hdf5 helper function"""
if isinstance(self.ids, np.ndarray):
self.ids = self.ids.tolist()
if isinstance(self.components, np.ndarray):
self.components = self.components.tolist()
def __init__(self, seid, ids, components, comment=''):
Set.__init__(self)
if comment:
self.comment = comment
self.seid = seid
#: Identifiers of grids points. (Integer > 0)
self.ids = ids
self.components = components
self.ids_ref = None
def validate(self):
assert isinstance(self.ids, list), type(self.ids)
assert isinstance(self.components, list), type(self.components)
assert len(self.ids) == len(self.components), 'len(ids)=%s len(components)=%s' % (len(self.ids), len(self.components))
@classmethod
def add_card(cls, card, comment=''):
seid = integer(card, 1, 'seid')
ids = []
components = []
nfields = len(card)
nterms = nfields // 2 - 1
delta = nfields % 2
assert delta == 0, 'The number of fields must be even; nfields=%s\ncard=%s' % (nfields, card)
for n in range(nterms):
i = n * 2 + 2
idi = integer(card, i, 'ID' + str(n))
component = parse_components(card, i + 1, 'component' + str(n))
ids.append(idi)
components.append(component)
return cls(seid, ids, components, comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by %s seid=%s' % (self.type, self.seid)
self.ids_ref = model.EmptyNodes(self.node_ids, msg=msg)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.ids = self.node_ids
self.ids_ref = None
@property
def node_ids(self):
msg = ', which is required by %s seid=%s' % (self.type, self.seid)
if self.ids_ref is None:
return self.ids
return _node_ids(self, self.ids_ref, allow_empty_nodes=True, msg=msg)
def raw_fields(self):
"""gets the "raw" card without any processing as a list for printing"""
list_fields = [self.type, self.seid] # SEASET, SEBSET
for (idi, comp) in zip(self.node_ids, self.components):
list_fields += [idi, comp]
return list_fields
def __repr__(self):
list_fields = self.raw_fields()
return self.comment + print_card_8(list_fields)
class ASET(ABCQSet):
"""
Defines degrees-of-freedom in the analysis set (A-set).
+------+-----+----+-----+------+-----+----+-----+----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+======+=====+====+=====+======+=====+====+=====+====+
| ASET | ID1 | C1 | ID2 | C2 | ID3 | C3 | ID4 | C4 |
+------+-----+----+-----+------+-----+----+-----+----+
| ASET | 16 | 2 | 23 | 3516 | 1 | 4 | | |
+------+-----+----+-----+------+-----+----+-----+----+
"""
type = 'ASET'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
ids = [1, 2]
components = ['123', '456']
return ASET(ids, components, comment='')
def __init__(self, ids, components, comment=''):
"""
Creates an ASET card, which defines the degree of freedoms that
will be retained during an ASET modal reduction.
Parameters
----------
ids : List[int]
the GRID/SPOINT ids
components : List[str]
the degree of freedoms to be retained (e.g., '1', '123')
comment : str; default=''
a comment for the card
..note :: the length of components and ids must be the same
"""
ABCQSet.__init__(self, ids, components, comment)
class BSET(ABCQSet):
"""
Defines analysis set (a-set) degrees-of-freedom to be fixed (b-set)
during generalized dynamic reduction or component mode synthesis
calculations.
+------+-----+----+-----+------+-----+----+-----+----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+======+=====+====+=====+======+=====+====+=====+====+
| BSET | ID1 | C1 | ID2 | C2 | ID3 | C3 | ID4 | C4 |
+------+-----+----+-----+------+-----+----+-----+----+
| BSET | 16 | 2 | 23 | 3516 | 1 | 4 | | |
+------+-----+----+-----+------+-----+----+-----+----+
"""
type = 'BSET'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
ids = [1, 2]
components = ['123', '456']
return BSET(ids, components, comment='')
def __init__(self, ids, components, comment=''):
"""
Creates an BSET card, which defines the degree of freedoms that
will be fixed during a generalized dynamic reduction or component
model synthesis calculation.
Parameters
----------
ids : List[int]
the GRID/SPOINT ids
components : List[str]
the degree of freedoms to be fixed (e.g., '1', '123')
comment : str; default=''
a comment for the card
..note :: the length of components and ids must be the same
"""
ABCQSet.__init__(self, ids, components, comment)
class CSET(ABCQSet):
"""
Defines the degree of freedoms that will be free during a
generalized dynamic reduction or component model synthesis
calculation.
+------+-----+----+-----+------+-----+----+-----+----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+======+=====+====+=====+======+=====+====+=====+====+
| CSET | ID1 | C1 | ID2 | C2 | ID3 | C3 | ID4 | C4 |
+------+-----+----+-----+------+-----+----+-----+----+
| CSET | 16 | 2 | 23 | 3516 | 1 | 4 | | |
+------+-----+----+-----+------+-----+----+-----+----+
"""
type = 'CSET'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
ids = [1, 2]
components = ['123', '456']
return CSET(ids, components, comment='')
def __init__(self, ids, components, comment=''):
"""
Creates an CSET card, which defines the degree of freedoms that
will be free during a generalized dynamic reduction or component
model synthesis calculation.
Parameters
----------
ids : List[int]
the GRID/SPOINT ids
components : List[str]
the degree of freedoms to be free (e.g., '1', '123')
comment : str; default=''
a comment for the card
..note :: the length of components and ids must be the same
"""
ABCQSet.__init__(self, ids, components, comment)
class QSET(ABCQSet):
"""
Defines generalized degrees-of-freedom (q-set) to be used for
dynamic reduction or component mode synthesis.
+------+-----+----+-----+------+-----+----+-----+----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+======+=====+====+=====+======+=====+====+=====+====+
| QSET | ID1 | C1 | ID2 | C2 | ID3 | C3 | ID4 | C4 |
+------+-----+----+-----+------+-----+----+-----+----+
| QSET | 16 | 2 | 23 | 3516 | 1 | 4 | | |
+------+-----+----+-----+------+-----+----+-----+----+
"""
type = 'QSET'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
ids = [1, 2]
components = ['123', '456']
return QSET(ids, components, comment='')
def __init__(self, ids, components, comment=''):
"""
Creates a QSET card, which defines generalized degrees of
freedom (q-set) to be used for dynamic reduction or component
mode synthesis.
Parameters
----------
ids : List[int]
the GRID/SPOINT ids
components : List[str]
the degree of freedoms to be created (e.g., '1', '123')
comment : str; default=''
a comment for the card
"""
ABCQSet.__init__(self, ids, components, comment)
class ABQSet1(Set):
"""
Generic Class ASET1, BSET1, QSET1 cards inherit from.
Defines degrees-of-freedom in the analysis set (a-set).
+-------+-----+-----+------+-----+-----+-----+-----+-----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=======+=====+=====+======+=====+=====+=====+=====+=====+
| xSET1 | C | ID1 | ID2 | ID3 | ID4 | ID5 | ID6 | ID7 |
+-------+-----+-----+------+-----+-----+-----+-----+-----+
| | ID8 | ID9 | | | | | | |
+-------+-----+-----+------+-----+-----+-----+-----+-----+
| xSET1 | C | ID1 | THRU | ID2 | | | | |
+-------+-----+-----+------+-----+-----+-----+-----+-----+
"""
type = 'ABQSet1'
def _finalize_hdf5(self, encoding):
"""hdf5 helper function"""
if isinstance(self.ids, np.ndarray):
self.ids = self.ids.tolist()
def __init__(self, ids, components, comment=''):
Set.__init__(self)
if comment:
self.comment = comment
#: Component number. (Integer zero or blank for scalar points or any
#: unique combination of the Integers 1 through 6 for grid points with
#: no embedded blanks.)
self.components = components
#: Identifiers of grids points. (Integer > 0)
self.ids = expand_thru(ids)
self.ids_ref = None
self.use_thru = True
@classmethod
def add_card(cls, card, comment=''):
components = fcomponents_or_blank(card, 1, 'components', 0)
nfields = len(card)
ids = []
i = 1
for ifield in range(2, nfields):
idi = integer_string_or_blank(card, ifield, 'ID%i' % i)
if idi:
i += 1
ids.append(idi)
return cls(ids, components, comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
components = str(data[0])
thru_flag = data[1]
if thru_flag == 0:
ids = data[2:]
elif thru_flag == 1:
assert len(data) == 4, data
#ids = [data[2], 'THRU', data[3]]
ids = list(range(data[2], data[3]+1))
else:
raise NotImplementedError('thru_flag=%s data=%s' % (thru_flag, data))
return cls(ids, components, comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by %s' % self.type
self.ids_ref = model.EmptyNodes(self.node_ids, msg=msg)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.ids = self.node_ids
self.ids_ref = None
#@property
#def node_ids(self):
#return self.get_ids()
@property
def node_ids(self):
msg = ', which is required by %s' % self.type
if self.ids_ref is None:
return self.ids
return _node_ids(self, self.ids_ref, allow_empty_nodes=True, msg=msg)
def raw_fields(self):
"""gets the "raw" card without any processing as a list for printing"""
if self.use_thru:
node_ids_list = collapse_thru(self.node_ids)
else:
node_ids_list = self.node_ids
list_fields = [self.type, self.components] + node_ids_list
return list_fields
def __repr__(self):
list_fields = self.raw_fields()
return self.comment + print_card_8(list_fields)
class SuperABQSet1(Set):
"""
Generic Class SEBSET1, SEQSET1 cards inherit from.
Defines degrees-of-freedom in the analysis set (a-set).
+----------+------+-----+------+------+-----+-----+-----+-----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+==========+======+=====+======+======+=====+=====+=====+=====+
| SEBSET1 | SEID | C | ID1 | ID2 | ID3 | ID4 | ID5 | ID6 |
+----------+------+-----+------+------+-----+-----+-----+-----+
| | ID7 | ID9 | | | | | | |
+----------+------+-----+------+------+-----+-----+-----+-----+
| SEBSET1 | SEID | C | ID1 | THRU | ID2 | | | |
+----------+------+-----+------+------+-----+-----+-----+-----+
"""
type = 'SuperABQSet1'
def _finalize_hdf5(self, encoding):
"""hdf5 helper function"""
if isinstance(self.ids, np.ndarray):
self.ids = self.ids.tolist()
def __init__(self, seid, ids, components, comment=''):
Set.__init__(self)
if comment:
self.comment = comment
self.seid = seid
#: Component number. (Integer zero or blank for scalar points or any
#: unique combination of the Integers 1 through 6 for grid points with
#: no embedded blanks.)
self.components = components
#: Identifiers of grids points. (Integer > 0)
self.ids = expand_thru(ids)
#print('ids =', self.ids)
assert None not in self.ids
self.ids_ref = None
self.validate()
def validate(self):
if not isinstance(self.components, integer_string_types):
msg = 'type(components)=%s must be an int/string' % type(self.components)
raise TypeError(msg)
@classmethod
def add_card(cls, card, comment=''):
seid = integer(card, 1, 'seid')
components = fcomponents_or_blank(card, 2, 'components', 0)
nfields = len(card)
ids = []
i = 1
for ifield in range(3, nfields):
idi = integer_string_or_blank(card, ifield, 'ID%i' % i)
if idi:
i += 1
ids.append(idi)
ids = expand_thru(ids)
return cls(seid, ids, components, comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
seid, components, nids = data
#assert None not in components, 'Type=%s components=%s' % (cls.type, components)
assert None not in nids, 'Type=%s nids=%s' % (cls.type, nids)
assert -1 not in nids, 'nids=%s' % (nids.tolist())
assert 0 not in nids, 'nids=%s' % (nids.tolist())
return cls(seid, nids, components, comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by %s seid=%s' % (self.type, self.seid)
self.ids_ref = model.EmptyNodes(self.node_ids, msg=msg)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.ids = self.node_ids
self.ids_ref = None
@property
def node_ids(self):
msg = ', which is required by %s seid=%s' % (self.type, self.seid)
if self.ids_ref is None:
return self.ids
return _node_ids(self, self.ids_ref, allow_empty_nodes=True, msg=msg)
def raw_fields(self):
"""gets the "raw" card without any processing as a list for printing"""
list_fields = [self.type, self.seid, self.components] + collapse_thru(self.node_ids)
return list_fields
def __repr__(self):
list_fields = self.raw_fields()
return self.comment + print_card_8(list_fields)
class ASET1(ABQSet1):
"""
Defines degrees-of-freedom in the analysis set (a-set)
+-------+-----+-----+------+-----+-----+-----+-----+-----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=======+=====+=====+======+=====+=====+=====+=====+=====+
| ASET1 | C | ID1 | ID2 | ID3 | ID4 | ID5 | ID6 | ID7 |
+-------+-----+-----+------+-----+-----+-----+-----+-----+
| | ID8 | ID9 | | | | | | |
+-------+-----+-----+------+-----+-----+-----+-----+-----+
| ASET1 | C | ID1 | THRU | ID2 | | | | |
+-------+-----+-----+------+-----+-----+-----+-----+-----+
"""
type = 'ASET1'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
ids = [1, 2]
components = '123'
return ASET1(ids, components, comment='')
def __init__(self, ids, components, comment=''):
"""
Creates an ASET1 card, which defines the degree of freedoms that
will be retained during an ASET modal reduction.
Parameters
----------
ids : List[int]
the GRID/SPOINT ids
components : str
the degree of freedoms to be retained (e.g., '1', '123')
comment : str; default=''
a comment for the card
"""
ABQSet1.__init__(self, ids, components, comment)
class OMIT(ABCQSet):
"""
Defines analysis set (a-set) degrees-of-freedom to be fixed (b-set)
during generalized dynamic reduction or component mode synthesis
calculations.
+------+-----+----+-----+------+-----+----+-----+----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+======+=====+====+=====+======+=====+====+=====+====+
| OMIT | ID1 | C1 | ID2 | C2 | ID3 | C3 | ID4 | C4 |
+------+-----+----+-----+------+-----+----+-----+----+
| OMIT | 16 | 2 | 23 | 3516 | 1 | 4 | | |
+------+-----+----+-----+------+-----+----+-----+----+
"""
type = 'OMIT'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
ids = [1, 2]
components = ['123', '456']
return BSET(ids, components, comment='')
def __init__(self, ids, components, comment=''):
"""
Creates an BSET card, which defines the degree of freedoms that
will be fixed during a generalized dynamic reduction or component
model synthesis calculation.
Parameters
----------
ids : List[int]
the GRID/SPOINT ids
components : List[str]
the degree of freedoms to be fixed (e.g., '1', '123')
comment : str; default=''
a comment for the card
..note :: the length of components and ids must be the same
"""
ABCQSet.__init__(self, ids, components, comment)
class OMIT1(ABQSet1):
"""
Defines degrees-of-freedom to be excluded (o-set) from the analysis set (a-set).
+-------+-----+-----+------+-----+-----+-----+-----+-----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=======+=====+=====+======+=====+=====+=====+=====+=====+
| OMIT | C | ID1 | ID2 | ID3 | ID4 | ID5 | ID6 | ID7 |
+-------+-----+-----+------+-----+-----+-----+-----+-----+
| | ID8 | ID9 | | | | | | |
+-------+-----+-----+------+-----+-----+-----+-----+-----+
| OMIT1 | C | ID1 | THRU | ID2 | | | | |
+-------+-----+-----+------+-----+-----+-----+-----+-----+
"""
type = 'OMIT1'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
ids = [1, 2]
components = '123'
return OMIT1(ids, components, comment='')
def __init__(self, ids, components, comment=''):
"""
Creates an OMIT1 card, which defines the degree of freedoms that
will be excluded (o-set) from the analysis set (a-set).
Parameters
----------
ids : List[int]
the GRID/SPOINT ids
components : str
the degree of freedoms to be omitted (e.g., '1', '123')
comment : str; default=''
a comment for the card
"""
ABQSet1.__init__(self, ids, components, comment)
class BSET1(ABQSet1):
"""
Defines analysis set (a-set) degrees-of-freedom to be fixed (b-set)
during generalized dynamic reduction or component mode synthesis
calculations.
+-------+-----+-----+------+-----+-----+-----+-----+-----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=======+=====+=====+======+=====+=====+=====+=====+=====+
| BSET1 | C | ID1 | ID2 | ID3 | ID4 | ID5 | ID6 | ID7 |
+-------+-----+-----+------+-----+-----+-----+-----+-----+
| | ID8 | ID9 | | | | | | |
+-------+-----+-----+------+-----+-----+-----+-----+-----+
| BSET1 | C | ID1 | THRU | ID2 | | | | |
+-------+-----+-----+------+-----+-----+-----+-----+-----+
"""
type = 'BSET1'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
ids = [1, 2]
components = '123'
return BSET1(ids, components, comment='')
def __init__(self, ids, components, comment=''):
"""
Creates an BSET1 card, which defines the degree of freedoms that
will be fixed during a generalized dynamic reduction or component
model synthesis calculation.
Parameters
----------
ids : List[int]
the GRID/SPOINT ids
components : str
the degree of freedoms to be fixed (e.g., '1', '123')
comment : str; default=''
a comment for the card
"""
ABQSet1.__init__(self, ids, components, comment)
class CSET1(Set):
"""
Defines the degree of freedoms that will be free during a
generalized dynamic reduction or component model synthesis
calculation.
+-------+-----+-----+------+-----+-----+-----+-----+-----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=======+=====+=====+======+=====+=====+=====+=====+=====+
| CSET1 | C | ID1 | ID2 | ID3 | ID4 | ID5 | ID6 | ID7 |
+-------+-----+-----+------+-----+-----+-----+-----+-----+
| | ID8 | ID9 | | | | | | |
+-------+-----+-----+------+-----+-----+-----+-----+-----+
| CSET1 | C | ID1 | THRU | ID2 | | | | |
+-------+-----+-----+------+-----+-----+-----+-----+-----+
| CSET1 | ,, | ALL | | | | | | |
+-------+-----+-----+------+-----+-----+-----+-----+-----+
"""
type = 'CSET1'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
ids = [1, 2]
components = '123'
return CSET1(ids, components, comment='')
def __init__(self, ids, components, comment=''):
"""
Creates an CSET1 card, which defines the degree of freedoms that
will be free during a generalized dynamic reduction or component
model synthesis calculation.
Parameters
----------
ids : List[int]
the GRID/SPOINT ids
components : str
the degree of freedoms to be free (e.g., '1', '123')
comment : str; default=''
a comment for the card
"""
Set.__init__(self)
if comment:
self.comment = comment
#: Identifiers of grids points. (Integer > 0)
self.ids = expand_thru(ids)
self.components = components
self.ids_ref = None
@classmethod
def add_op2_data(cls, data, comment=''):
raise RuntimeError(str(data))
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a CSET1 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
if integer_string_or_blank(card, 2, 'C') == 'ALL':
components = '123456'
else:
components = parse_components(card, 1, 'components')
ids = []
id_count = 1
for ifield in range(2, len(card)):
idi = integer_or_string(card, ifield, 'ID%i' % id_count)
ids.append(idi)
id_count += 1
return CSET1(ids, components, comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by CSET1'
self.ids_ref = model.EmptyNodes(self.node_ids, msg=msg)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.ids = self.node_ids
self.ids_ref = None
@property
def node_ids(self):
msg = ', which is required by CSET1'
if self.ids_ref is None:
return self.ids
return _node_ids(self, self.ids_ref, allow_empty_nodes=True, msg=msg)
def raw_fields(self):
"""gets the "raw" card without any processing as a list for printing"""
list_fields = ['CSET1', self.components] + collapse_thru(self.node_ids)
return list_fields
def __repr__(self):
list_fields = self.raw_fields()
return self.comment + print_card_8(list_fields)
class QSET1(ABQSet1):
"""
Defines generalized degrees-of-freedom (q-set) to be used for
dynamic reduction or component mode synthesis.
"""
type = 'QSET1'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
ids = [1, 2]
components = '123'
return QSET1(ids, components, comment='')
def __init__(self, ids, components, comment=''):
"""
Creates a QSET1 card, which defines generalized degrees of
freedom (q-set) to be used for dynamic reduction or component
mode synthesis.
Parameters
----------
ids : List[int]
the GRID/SPOINT ids
components : str
the degree of freedoms to be created (e.g., '1', '123')
comment : str; default=''
a comment for the card
"""
ABQSet1.__init__(self, ids, components, comment)
class SET1(Set):
"""
Defines a list of structural grid points or element identification
numbers.
+------+--------+--------+-----+------+-----+-----+------+-----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+======+========+========+=====+======+=====+=====+======+=====+
| SET1 | SID | ID1 | ID2 | ID3 | ID4 | ID5 | ID6 | ID7 |
+------+--------+--------+-----+------+-----+-----+------+-----+
| | ID8 | etc. | | | | | | |
+------+--------+--------+-----+------+-----+-----+------+-----+
| SET1 | 3 | 31 | 62 | 93 | 124 | 16 | 17 | 18 |
+------+--------+--------+-----+------+-----+-----+------+-----+
| | 19 | | | | | | | |
+------+--------+--------+-----+------+-----+-----+------+-----+
| SET1 | 6 | 29 | 32 | THRU | 50 | 61 | THRU | 70 |
+------+--------+--------+-----+------+-----+-----+------+-----+
| | 17 | 57 | | | | | | |
+------+--------+--------+-----+------+-----+-----+------+-----+
"""
type = 'SET1'
@classmethod
def _init_from_empty(cls):
sid = 1
ids = [1]
return SET1(sid, ids, is_skin=False, comment='')
def __init__(self, sid, ids, is_skin=False, comment=''):
"""
Creates a SET1 card, which defines a list of structural grid
points or element identification numbers.
Parameters
----------
sid : int
set id
ids : List[int, str]
AECOMP, SPLINEx, PANEL : all grid points must exist
XYOUTPUT : missing grid points are ignored
The only valid string is THRU
``ids = [1, 3, 5, THRU, 10]``
is_skin : bool; default=False
if is_skin is used; ids must be empty
comment : str; default=''
a comment for the card
"""
Set.__init__(self)
if comment:
self.comment = comment
#: Unique identification number. (Integer > 0)
self.sid = sid
#: List of structural grid point or element identification numbers.
#: (Integer > 0 or 'THRU'; for the 'THRU' option, ID1 < ID2 or 'SKIN';
#: in field 3)
self.ids = expand_thru(ids, set_fields=False, sort_fields=False)
#self.clean_ids()
self.is_skin = is_skin
self.xref_type = None
self.ids_ref = None
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a SET1 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
sid = integer(card, 1, 'sid')
ids = fields(integer_or_string, card, 'ID', i=2, j=len(card))
is_skin = False
i = 0
if len(ids) > 0:
if isinstance(ids[0], str) and ids[0] == 'SKIN':
is_skin = True
i += 1
else:
assert len(card) > 2, card
return SET1(sid, ids[i:], is_skin=is_skin, comment=comment)
#def __eq__(self, set1):
#assert self.type == set1.type, 'type=%s set1.type=%s' % (self.type, set1.type)
#self.clean_ids()
#set1.clean_ids()
#if self.get_IDs() == set1.get_IDs():
#return True
#return False
def symmetric_difference(self, set1):
ids1 = set(self.get_ids())
ids2 = set(set1.get_ids())
return ids1.symmetric_difference(ids2)
def add_set(self, set1):
self.ids += set1.get_ids()
self.clean_ids()
def raw_fields(self):
skin = []
if self.is_skin:
skin = ['SKIN']
return ['SET1', self.sid] + skin + self.get_ids()
def cross_reference_set(self, model, xref_type, msg='', allow_empty_nodes=False):
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
xref_type : str
{'Node', 'Point'}
allow_empty_nodes : bool; default=False
do all nodes need to exist?
SPLINEx, ACMODL, PANEL, AECOMP, XYOUTPUT
- nodes
- SPLINEx (all nodes must exist)
- PANEL (all nodes must exist)
- XYOUTPUT (missing nodes ignored)
- AECOMP
- ACMODL (optional)
- elements
- ACMODL (optional)
"""
msg = ', which is required by SET1 sid=%s%s' % (self.sid, msg)
if xref_type == 'Node':
self.ids_ref = model.Nodes(self.get_ids(), msg=msg)
elif xref_type == 'Point':
self.ids_ref = model.Points(self.get_ids(), msg=msg)
else:
raise NotImplementedError("xref_type=%r and must be ['Node', 'Point']" % xref_type)
self.xref_type = xref_type
def safe_cross_reference(self, model: BDF, xref_type, msg='', allow_empty_nodes=False):
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
xref_type : str
{'Node'}
allow_empty_nodes : bool; default=False
do all nodes need to exist?
SPLINEx, ACMODL, PANEL, AECOMP, XYOUTPUT
- nodes
- SPLINEx (all nodes must exist)
- PANEL (all nodes must exist)
- XYOUTPUT (missing nodes ignored)
- AECOMP
- ACMODL (optional)
- elements
- ACMODL (optional)
"""
assert msg != ''
msg = ', which is required by SET1 sid=%s%s' % (self.sid, msg)
if xref_type == 'Node':
self.ids_ref, out = model.safe_get_nodes(self.get_ids(), msg=msg)
if len(out):
model.log.warning(out)
elif xref_type == 'Point':
self.ids_ref, out = model.safe_points(self.get_ids(), msg=msg)
else:
raise NotImplementedError("xref_type=%r and must be ['Node', 'Point']" % xref_type)
self.xref_type = xref_type
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
if self.xref_type in ['Node', 'Point']:
self.ids = self.get_ids()
self.xref_type = None
else:
raise NotImplementedError("xref_type=%r and must be ['Node']" % self.xref_type)
self.ids_ref = None
def get_ids(self):
if self.ids_ref is None:
return self.ids
if self.xref_type is None:
ids = self.ids
elif self.xref_type in ['Node', 'Point']:
ids = [node if isinstance(node, integer_types) else node.nid
for node in self.ids_ref]
else:
raise NotImplementedError("xref_type=%r and must be ['Node']" % self.xref_type)
return ids
def write_card(self, size: int=8, is_double: bool=False) -> str:
skin = []
if self.is_skin:
skin = ['SKIN']
# checked in NX 2014 / MSC 2005.1
card = ['SET1', self.sid] + skin + self.get_ids()
return write_card(self.comment, card, size, is_double)
# I thought this worked in the new MSC Nastran...
# Doesn't work in NX 2014 / MSC 2005.1 (multiple duplicate sids).
# It may work with one sid, with singles and doubles on one card.
#field_packs = []
#singles, doubles = collapse_thru_packs(self.get_ids())
#if singles:
#field_packs.append(['SET1', self.sid] + skin + singles)
#if doubles:
#for pack in doubles:
#field_packs.append(['SET1', self.sid] + skin + pack)
#msg = []
#for field_pack in field_packs:
#msg.append(print_card_8(field_pack))
#return ''.join(msg)
class SET2(Set):
"""
Defines a list of structural grid points in terms of aerodynamic
macro elements.
+------+--------+-------+-----+------+-----+-----+------+------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+======+========+=======+=====+======+=====+=====+======+======+
| SET2 | SID | MACRO | SP1 | SP2 | CH1 | CH2 | ZMAX | ZMIN |
+------+--------+-------+-----+------+-----+-----+------+------+
| SET2 | 3 | 111 | 0.0 | 0.75 | 0.0 |0.667| 3.51 | |
+------+--------+-------+-----+------+-----+-----+------+------+
| SET2 | 6 | 222 | 0.0 | 0.75 | 0.0 |0.667| 3.51 | -1.0 |
+------+--------+-------+-----+------+-----+-----+------+------+
SET2 entries are referenced by:
- SPLINEi
"""
type = 'SET2'
@classmethod
def _init_from_empty(cls):
sid = 1
macro = 1
sp1 = 0.
sp2 = 1.
ch1 = 0.
ch2 = 1.
return SET2(sid, macro, sp1, sp2, ch1, ch2, comment='')
def __init__(self, sid: int, macro: int,
sp1: float, sp2: float,
ch1: float, ch2: float,
zmax: float=0.0, zmin: float=0.0,
comment: str='') -> SET2:
"""
Creates a SET2 card, which sefines a list of structural
grid points in terms of aerodynamic macro elements.
Remarks:
- Points exactly on the boundary may be missed; therefore, to
get all the grid points within the area of the macro element,
SP1=-0.01, SP2=1.01, etc. should be used.
- Use DIAG 18 to print the internal grid Ids found.
Parameters
----------
sid : int
set id
macro : int
the aerodynamic macro element id
sp1 / sp2 : float
lower/higher span division point defining the prism containing the set
ch1 / ch2 : float
lower/higher chord division point defining the prism containing the set
zmax / zmin : float; default=0.0/0.0
z-coordinate of top/bottom of the prism containing the set
a zero value implies a value of infinity
"""
Set.__init__(self)
if comment:
self.comment = comment
#: Unique identification number. (Integer > 0)
self.sid = sid
#: Aerodynamic Macro Element ID. (Integer > 0)
self.macro = macro
#: Division Points spanwise and chordwise for the selection prism. (Real)
self.sp1 = sp1
self.sp2 = sp2
self.ch1 = ch1
self.ch2 = ch2
#: Heigth limits for the selection prism. (Real)
self.zmax = zmax
self.zmin = zmin
self.xref_type = None
self.macro_ref = None
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a SET2 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
sid = integer(card, 1, 'sid')
macro = integer(card, 2, 'macro')
sp1 = double(card, 3, 'sp1')
sp2 = double(card, 4, 'sp2')
ch1 = double(card, 5, 'ch1')
ch2 = double(card, 6, 'ch2')
zmax = double_or_blank(card, 7, 'zmax', 0.0)
zmin = double_or_blank(card, 8, 'zmin', 0.0)
return SET2(sid, macro, sp1, sp2, ch1, ch2, zmax=zmax, zmin=zmin, comment=comment)
def raw_fields(self):
return ['SET2', self.sid, self.macro, self.sp1, self.sp2,
self.ch1, self.ch2, self.zmax, self.zmin]
def cross_reference_set(self, model, xref_type: str, msg=''):
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
xref_type : str
{'MACRO'} i.e. the CAEROi elements
"""
msg = f', which is required by SET2 sid={self.sid}{msg}'
if xref_type == 'MACRO':
self.macro_ref = model.CAero(self.macro, msg=msg)
else:
raise NotImplementedError(f"xref_type={xref_type!r} and must be ['MACRO']")
self.xref_type = xref_type
def get_ids(self):
return []
def safe_cross_reference(self, model: BDF, xref_type: str, msg=''):
msg = f', which is required by SET2 sid={self.sid}{msg}'
if xref_type == 'MACRO':
self.macro_ref = model.CAero(self.macro, msg=msg)
else:
model.log.error(f"xref_type={xref_type!r} and must be ['MACRO']")
return
self.xref_type = xref_type
#self.cross_reference_set(model, xref_errors, msg=msg)
def uncross_reference(self):
if self.xref_type == 'MACRO':
self.xref_type = None
else:
raise NotImplementedError(f"xref_type={xref_type!r} and must be ['MACRO']")
self.macro_ref = None
class SET3(Set):
"""
Defines a list of grids, elements or points.
SET3 entries are referenced by:
- NX
- ACMODL
- PANEL
- MSC
- PBMSECT
- PBRSECT
- RFORCE
- ELEM only (SOL 600)
- DEACTEL
- ELEM only (SOL 400)
- RBAR, RBAR1, RBE1, RBE2, RBE2GS, RBE3, RROD,
RSPLINE, RSSCON, RTRPLT and RTRPLT1
- RBEin / RBEex only
- ELSIDi / XELSIDi
- ELEM only
- NDSIDi
- GRID only
+------+-----+-------+-----+-----+-----+-----+-----+-----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+======+=====+=======+=====+=====+=====+=====+=====+=====+
| SET3 | SID | DES | ID1 | ID2 | ID3 | ID4 | ID5 | ID6 |
+------+-----+-------+-----+-----+-----+-----+-----+-----+
| | ID7 | ID8 | etc | | | | | |
+------+-----+-------+-----+-----+-----+-----+-----+-----+
| SET3 | 1 | POINT | 11 | 12 | | | | |
+------+-----+-------+-----+-----+-----+-----+-----+-----+
"""
type = 'SET3'
valid_descs = ['GRID', 'POINT', 'ELEMENT', 'PROP', 'RBEin', 'RBEex']
@classmethod
def _init_from_empty(cls):
sid = 1
desc = 'ELEM'
ids = [1]
return SET3(sid, desc, ids, comment='')
def __init__(self, sid: int, desc: str, ids: List[int], comment: str=''):
Set.__init__(self)
if comment:
self.comment = comment
#: Unique identification number. (Integer > 0)
self.sid = sid
#: Set description (Character). Valid options are 'GRID', 'ELEM',
#: 'POINT' and 'PROP'.
if desc == 'ELEM':
desc = 'ELEMENT'
elif desc == 'RBEIN':
desc = 'RBEin'
elif desc == 'RBEEX':
desc = 'RBEex'
self.desc = desc
#: Identifiers of grids points, elements, points or properties.
#: (Integer > 0)
self.ids = expand_thru(ids, set_fields=False, sort_fields=False)
self.ids_ref = None
self.xref_type = None
def validate(self):
if self.desc not in self.valid_descs:
msg = 'desc=%r; valid_descs=[%s]' % (self.desc, ', '.join(self.valid_descs))
raise ValueError(msg)
def get_ids(self):
if self.ids_ref is None:
return self.ids
if self.xref_type is None:
ids = self.ids
elif self.xref_type == 'Point':
# TODO: improve this...
ids = [point if isinstance(point, integer_types) else point.nid
for point in self.ids_ref]
else:
# 'Node',
raise NotImplementedError("xref_type=%r and must be ['Point']" % self.xref_type)
return ids
def cross_reference_set(self, model, xref_type, msg=''):
msg = ', which is required by SET3 sid=%s%s' % (self.sid, msg)
if xref_type == 'GRID': # was 'Node'
# not tested relative to Nastran, seems obvious though
# I'm not sure why Node was here vs. GRID
# the block was disabled anyways, so probably doesn't matter
self.ids = model.Nodes(self.get_ids(), msg=msg)
if xref_type == 'Point':
self.ids_ref = model.Points(self.get_ids(), msg=msg)
else:
raise NotImplementedError("xref_type=%r and must be ['Point']" % xref_type)
self.xref_type = xref_type
def add_set(self, set3):
self.ids += set3.get_ids()
assert self.sid == set3.sid, 'SET3.sid=%r; existing sid=%r new=%r' % (self.sid, self.sid, set3.sid)
assert self.desc == set3.desc, 'SET3.sid=%r; existing desc=%r new=%r' % (self.sid, self.desc, set3.desc)
self.clean_ids()
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a SET3 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
sid = integer(card, 1, 'sid')
desc = string(card, 2, 'desc')
ids = fields(integer_or_string, card, 'ID', i=3, j=len(card))
return SET3(sid, desc, ids, comment=comment)
def union(self, set3):
assert self.type == set3.type, 'type=%r set3.type=%r' % (self.type, set3.type)
assert self.desc == set3.desc, 'self.desc=%r set3.desc=%r' % (self.desc, set3.desc)
ids1 = set(self.ids)
ids2 = set(set3.ids)
self.ids = list(ids1.union(ids2))
def symmetric_difference(self, set3):
assert self.type == set3.type, 'type=%r set3.type=%r' % (self.type, set3.type)
ids1 = set(self.ids)
ids2 = set(set3.ids)
return ids1.symmetric_difference(ids2)
def is_grid(self):
if self.desc == 'GRID':
return True
return False
def is_point(self):
if self.desc == 'POINT':
return True
return False
def is_property(self):
if self.desc == 'PROP':
return True
return False
def is_element(self):
if self.desc == 'ELEMENT':
return True
return False
def SetIDs(self, collapse=True):
"""gets the IDs of the SETx"""
if collapse:
return collapse_thru(self.ids, nthru=1)
else:
return self.ids
def raw_fields(self):
"""Gets the "raw" card without any processing as a list for printing"""
list_fields = ['SET3', self.sid, self.desc] + self.SetIDs()
return list_fields
def __repr__(self):
#fields_blocks = [
#'SET3',
#[[self.sid, self.desc], False], # these are not all integers
#[self.SetIDs(), True], # these are all integers
#]
#print(fields_blocks)
#return self.comment + print_int_card_blocks(fields_blocks)
msg = self.comment
self.ids.sort()
ids = self.get_ids()
packs = condense(ids)
if len(packs) == 1:
singles, doubles = build_thru_packs(packs, max_dv=1)
packs = collapse_thru(ids)
for pack in doubles:
msg += print_card_8(['SET3', self.sid, self.desc] + pack)
if singles:
msg += print_card_8(['SET3', self.sid, self.desc] + singles)
else:
msg += print_card_8(['SET3', self.sid, self.desc] + ids)
return msg
def write_card(self, size: int=8, is_double: bool=False) -> str:
return str(self)
class SESET(SetSuper):
"""
Defines interior grid points for a superelement.
"""
type = 'SESET'
@classmethod
def _init_from_empty(cls):
seid = 1
ids = [1, 2]
return SESET(seid, ids, comment='')
def __init__(self, seid, ids, comment=''):
SetSuper.__init__(self)
if comment:
self.comment = comment
self.seid = seid
#: Grid or scalar point identification number.
#: (0 < Integer < 1000000; G1 < G2)
self.ids = expand_thru(ids)
self.clean_ids()
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a SESET card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
seid = integer_or_blank(card, 1, 'seid', 0)
ids = fields(integer_or_string, card, 'ID', i=2, j=len(card))
return SESET(seid, ids, comment=comment)
def add_seset(self, seset):
self.ids += seset.ids
self.clean_ids()
def raw_fields(self):
list_fields = ['SESET', self.seid] + collapse_thru(self.ids)
return list_fields
def __repr__(self):
thru_fields = collapse_thru(self.ids)
#list_fields = ['SESET', self.seid]
cards = []
while 'THRU' in thru_fields:
ithru = thru_fields.index('THRU')
card = print_card_8(['SESET', self.seid] +
thru_fields[ithru - 1:ithru + 2])
cards.append(card)
thru_fields = thru_fields[0:ithru - 1]+thru_fields[ithru + 2:]
if thru_fields:
card = print_card_8(['SESET', self.seid] + thru_fields)
cards.append(card)
return ''.join(cards)
def cross_reference(self, model: BDF) -> None:
pass
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
pass
class SEBSET(SuperABCQSet):
"""
Defines boundary degrees-of-freedom to be fixed (b-set) during generalized
dynamic reduction or component mode calculations.
+--------+------+-----+------+-----+----+-----+----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
+========+======+=====+======+=====+====+=====+====+
| SEBSET | SEID | ID1 | C1 | ID2 | C2 | ID3 | C3 |
+--------+------+-----+------+-----+----+-----+----+
| SEBSET | C | ID1 | THRU | ID2 | | | |
+--------+------+-----+------+-----+----+-----+----+
"""
type = 'SEBSET'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
seid = 1
ids = [1, 2]
components = ['123', '456']
return SEBSET(seid, ids, components, comment='')
def __init__(self, seid, ids, components, comment=''):
SuperABCQSet.__init__(self, seid, ids, components, comment)
class SEBSET1(SuperABQSet1):
"""
Defines boundary degrees-of-freedom to be fixed (b-set) during
generalized dynamic reduction or component mode synthesis
calculations.
+----------+------+-----+------+------+-----+-----+-----+-----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+==========+======+=====+======+======+=====+=====+=====+=====+
| SEBSET1 | SEID | C | ID1 | ID2 | ID3 | ID4 | ID5 | ID6 |
+----------+------+-----+------+------+-----+-----+-----+-----+
| | ID7 | ID9 | | | | | | |
+----------+------+-----+------+------+-----+-----+-----+-----+
| SEBSET1 | SEID | C | ID1 | THRU | ID2 | | | |
+----------+------+-----+------+------+-----+-----+-----+-----+
"""
type = 'SEBSET1'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
seid = 1
ids = [1, 2]
components = '123'
return SEBSET1(seid, ids, components, comment='')
def __init__(self, seid, ids, components, comment=''):
SuperABQSet1.__init__(self, seid, ids, components, comment)
class SECSET(SuperABCQSet):
type = 'SECSET'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
seid = 1
ids = [1, 2]
components = ['123', '456']
return SECSET(seid, ids, components, comment='')
def __init__(self, seid, ids, components, comment=''):
SuperABCQSet.__init__(self, seid, ids, components, comment)
class SECSET1(SuperABQSet1):
"""
Defines SECSET1
+----------+------+-----+------+------+-----+-----+-----+-----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+==========+======+=====+======+======+=====+=====+=====+=====+
| SECSET1 | SEID | C | ID1 | ID2 | ID3 | ID4 | ID5 | ID6 |
+----------+------+-----+------+------+-----+-----+-----+-----+
| | ID7 | ID9 | | | | | | |
+----------+------+-----+------+------+-----+-----+-----+-----+
| SECSET1 | SEID | C | ID1 | THRU | ID2 | | | |
+----------+------+-----+------+------+-----+-----+-----+-----+
"""
type = 'SECSET1'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
seid = 1
ids = [1, 2]
components = '123'
return SECSET1(seid, ids, components, comment='')
def __init__(self, seid, ids, components, comment=''):
SuperABQSet1.__init__(self, seid, ids, components, comment)
class SEQSET(SuperABCQSet):
type = 'SEQSET'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
seid = 1
ids = [1, 2]
components = ['123', '456']
return SEQSET(seid, ids, components, comment='')
def __init__(self, seid, ids, components, comment=''):
SuperABCQSet.__init__(self, seid, ids, components, comment)
class SEQSET1(SuperABQSet1):
type = 'SEQSET1'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
seid = 1
ids = [1, 2]
components = '123'
return SEQSET1(seid, ids, components, comment='')
def __init__(self, seid, ids, components, comment=''):
SuperABQSet1.__init__(self, seid, ids, components, comment)
class SEQSEP(SetSuper): # not integrated...is this an SESET ???
"""
Used with the CSUPER entry to define the correspondence of the
exterior grid points between an identical or mirror-image
superelement and its primary superelement.
"""
type = 'SEQSEP'
def __init__(self, ssid, psid, ids, comment=''):
SetSuper.__init__(self)
if comment:
self.comment = comment
#: Identification number for secondary superelement. (Integer >= 0).
self.ssid = ssid
#: Identification number for the primary superelement. (Integer >= 0).
self.psid = psid
#: Exterior grid point identification numbers for the primary
#: superelement. (Integer > 0)
self.ids = expand_thru(ids)
self.clean_ids()
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a SEQSEP card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
ssid = integer(card, 1, 'ssid')
psid = integer(card, 2, 'psid')
ids = fields(integer_or_string, card, 'ID', i=3, j=len(card))
return SEQSEP(ssid, psid, ids, comment=comment)
def get_ids(self)-> List[int]:
"""gets the ids"""
return self.ids
def raw_fields(self):
"""gets the "raw" card without any processing as a list for printing"""
list_fields = ['SEQSEP', self.ssid, self.psid] + self.get_ids()
return list_fields
class RADSET(ABQSet1):
"""
Specifies which radiation cavities are to be included for
radiation enclosure analysis.
+--------+----------+----------+----------+----------+----------+----------+----------+----------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+========+==========+==========+==========+==========+==========+==========+==========+==========+
| RADSET | ICAVITY1 | ICAVITY2 | ICAVITY3 | ICAVITY4 | ICAVITY5 | ICAVITY6 | ICAVITY7 | ICAVITY8 |
+--------+----------+----------+----------+----------+----------+----------+----------+----------+
| | ICAVITY9 | | | | | | | |
+--------+----------+----------+----------+----------+----------+----------+----------+----------+
| RADSET | 1 | 2 | 3 | 4 | | | | |
+--------+----------+----------+----------+----------+----------+----------+----------+----------+
"""
type = 'RADSET'
@classmethod
def _init_from_empty(cls):
cavities = [1, 2]
return RADSET(cavities, comment='')
def _finalize_hdf5(self, encoding):
"""hdf5 helper function"""
if isinstance(self.cavities, np.ndarray):
self.cavities = self.cavities.tolist()
def __init__(self, cavities, comment=''):
"""
Creates a RADSET card
Parameters
----------
cavities : List[int]
the RADCAV ids
comment : str; default=''
a comment for the card
"""
if comment:
self.comment = comment
self.cavities = cavities
#: Identifiers of grids points. (Integer > 0)
#self.ids = expand_thru(ids)
#self.ids_ref = None
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a USET1 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
nfields = len(card)
cavities = []
i = 1
for ifield in range(1, nfields):
cavity = integer(card, ifield, 'iCavity%i' % i)
if cavity:
i += 1
cavities.append(cavity)
return RADSET(cavities, comment=comment)
#def cross_reference(self, model: BDF) -> None:
#"""
#Cross links the card so referenced cards can be extracted directly
#Parameters
#----------
#model : BDF()
#the BDF object
#"""
#msg = ', which is required by USET1 name=%s' % (self.name)
#self.ids_ref = model.EmptyNodes(self.node_ids, msg=msg)
#def uncross_reference(self) -> None:
#self.ids = self.node_ids
#self.ids_ref = None
def raw_fields(self):
"""gets the "raw" card without any processing as a list for printing"""
list_fields = ['RADSET'] + self.cavities # collapse_thru(self.node_ids)
return list_fields
def __repr__(self):
list_fields = self.raw_fields()
return self.comment + print_card_8(list_fields)
class USET(Set):
"""
Defines a degrees-of-freedom set.
+------+-------+-----+------+-----+----+-----+----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
+======+=======+=====+======+=====+====+=====+====+
| USET | SNAME | ID1 | C1 | ID2 | C2 | ID3 | C3 |
+------+-------+-----+------+-----+----+-----+----+
| USET | JUNK | ID1 | THRU | ID2 | | | |
+------+-------+-----+------+-----+----+-----+----+
"""
type = 'USET'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
#name = 'SNAME'
ids = [1, 2]
components = ['123', '456']
return QSET(ids, components, comment='')
def __init__(self, name, ids, components, comment=''):
"""
Creates a USET card, which defines a degrees-of-freedom set.
Parameters
----------
name : str
SNAME Set name. (One to four characters or the word 'ZERO'
followed by the set name.)
ids : List[int]
the GRID/SPOINT ids
components : List[str]
the degree of freedoms (e.g., '1', '123')
comment : str; default=''
a comment for the card
"""
Set.__init__(self)
if comment:
self.comment = comment
self.name = name
#: Identifiers of grids points. (Integer > 0)
self.components = components
self.ids = ids
self.ids_ref = None
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a USET card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
name = string(card, 1, 'name')
components = []
ids = []
nsets = (len(card) - 1) // 2
for iset in range(nsets):
i = iset * 2 + 2
idi = integer(card, i, 'node_id' + str(iset))
component = parse_components(card, i + 1, 'component' + str(iset))
components.append(component)
ids.append(idi)
return USET(name, ids, components, comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
"""
tested by gspc1.op2
for some reason, the setname is an integer and has bizarre rules
that I don't understand like:
- the setname is 1-4 characters, except if it's 'ZERO%i' % sid
ummm...odd
"""
sid = data[0]
nid = data[1]
if sid < 0:
name = 'ZERO'
else:
comment = 'sid=%s (???)' % sid
name = 'U%i' % nid
assert nid > 0, nid
component = str(data[2])
for componenti in component:
assert componenti in '0123456', component
return USET(name, [nid], [component], comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by USET name=%s' % (self.name)
self.ids_ref = model.EmptyNodes(self.node_ids, msg=msg)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.ids = self.node_ids
self.ids_ref = None
@property
def node_ids(self):
if self.ids_ref is None:
return self.ids
msg = ', which is required by USET name=%s' % (self.name)
return _node_ids(self, self.ids_ref, allow_empty_nodes=True, msg=msg)
def raw_fields(self):
"""
gets the "raw" card without any processing as a list for printing
"""
list_fields = ['USET', self.name]
for (component, idi) in zip(self.components, self.node_ids):
list_fields += [idi, component]
return list_fields
class USET1(ABQSet1):
"""
Defines a degree-of-freedom set.
+-------+-------+-----+------+------+-----+-----+-----+-----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=======+=======+=====+======+======+=====+=====+=====+=====+
| USET1 | SNAME | C | ID2 | ID3 | ID4 | ID5 | ID6 | ID7 |
+-------+-------+-----+------+------+-----+-----+-----+-----+
| | ID9 | | | | | | | |
+-------+-------+-----+------+------+-----+-----+-----+-----+
| USET1 | SNAME | C | ID1 | THRU | ID2 | | | |
+-------+-------+-----+------+------+-----+-----+-----+-----+
"""
type = 'USET1'
_properties = ['node_ids']
@classmethod
def _init_from_empty(cls):
name = 'SNAME'
ids = [1, 2]
components = '123'
return USET1(name, ids, components, comment='')
def __init__(self, name, ids, components, comment=''):
"""
Creates a USET1 card, which defines a degrees-of-freedom set.
Parameters
----------
name : str
SNAME Set name. (One to four characters or the word 'ZERO'
followed by the set name.)
ids : List[int]
the GRID/SPOINT ids
components : str
the degree of freedoms (e.g., '1', '123')
comment : str; default=''
a comment for the card
"""
ABQSet1.__init__(self, ids, components, comment=comment)
#if comment:
#self.comment = comment
self.name = name
#: Component number. (Integer zero or blank for scalar points or any
#: unique combination of the Integers 1 through 6 for grid points with
#: no embedded blanks.)
#self.components = components
#: Identifiers of grids points. (Integer > 0)
#self.ids = expand_thru(ids)
#self.ids_ref = None
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a USET1 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
name = string(card, 1, 'name')
components = fcomponents_or_blank(card, 2, 'components', 0)
nfields = len(card)
ids = []
i = 1
for ifield in range(3, nfields):
idi = integer_string_or_blank(card, ifield, 'ID%i' % i)
if idi:
i += 1
ids.append(idi)
return USET1(name, ids, components, comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
"""
tested by gspc1.op2
for some reason, the setname is an integer and has bizarre rules
that I don't understand like:
- the setname is 1-4 characters, except if it's 'ZERO%i' % sid
ummm...odd
"""
name, components, ids = data
#sid = data[0]
#nid = data[1]
#if sid < 0:
#name = 'ZERO'
#else:
#comment = 'sid=%s (???)' % sid
#name = 'U%i' % nid
#assert nid > 0, nid
#component = str(data[2])
for component in components:
assert component in '0123456', components
return USET1(name, ids, components, comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by USET1 name=%s' % (self.name)
self.ids_ref = model.EmptyNodes(self.node_ids, msg=msg)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.ids = self.node_ids
self.ids_ref = None
@property
def node_ids(self):
if self.ids_ref is None:
return self.ids
msg = ', which is required by USET1 name=%s' % (self.name)
return _node_ids(self, self.ids_ref, allow_empty_nodes=True, msg=msg)
def raw_fields(self):
"""gets the "raw" card without any processing as a list for printing"""
list_fields = ['USET1', self.name, self.components] + collapse_thru(self.node_ids)
return list_fields
def __repr__(self):
list_fields = self.raw_fields()
return self.comment + print_card_8(list_fields)
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,658
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/bdf/cards/aero/dynamic_loads.py
|
# coding: utf-8
"""
All aero cards are defined in this file. This includes:
* AERO
* FLFACT
* FLUTTER
* GUST
* MKAERO1 / MKAERO2
All cards are BaseCard objects.
"""
from __future__ import annotations
from itertools import count
from typing import TYPE_CHECKING
import numpy as np
from pyNastran.utils.numpy_utils import integer_types
from pyNastran.bdf.field_writer_8 import set_blank_if_default, print_card_8
from pyNastran.bdf.field_writer_16 import print_card_16
from pyNastran.bdf.cards.base_card import BaseCard
from pyNastran.utils.atmosphere import (
make_flfacts_eas_sweep, make_flfacts_alt_sweep, make_flfacts_mach_sweep,
atm_density, _velocity_factor)
from pyNastran.bdf.bdf_interface.assign_type import (
integer, integer_or_blank, double, double_or_blank, string,
fields, string_or_blank, double_string_or_blank, interpret_value)
from pyNastran.bdf.cards.utils import wipe_empty_fields
if TYPE_CHECKING: # pragma: no cover
from pyNastran.bdf.bdf import BDF
class Aero(BaseCard):
"""Base class for AERO and AEROS cards."""
def __init__(self):
"""
Common class for AERO, AEROS
Attributes
----------
acsid : int; default=0
aerodyanmic coordinate system
defines the direction of the wind
sym_xz : int; default=0
xz symmetry flag (+1=symmetry; -1=antisymmetric)
sym_xy : int; default=0
xy symmetry flag (+1=symmetry; -1=antisymmetric)
"""
BaseCard.__init__(self)
self.sym_xy = None
self.sym_xz = None
self.acsid = None
self.acsid_ref = None
def Acsid(self):
try:
return self.acsid_ref.cid
except AttributeError:
return self.acsid
@property
def is_symmetric_xy(self):
if self.sym_xy == 1:
return True
return False
@property
def is_symmetric_xz(self):
if self.sym_xz == 1:
return True
return False
@property
def is_anti_symmetric_xy(self):
if self.sym_xy == -1:
return True
return False
@property
def is_anti_symmetric_xz(self):
if self.sym_xz == -1:
return True
return False
def set_ground_effect(self, enable): # TODO: verify
if enable:
self.sym_xy = -1
else:
self.sym_xy = 1
class AERO(Aero):
"""
Gives basic aerodynamic parameters for unsteady aerodynamics.
+------+-------+----------+------+--------+-------+-------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 |
+======+=======+==========+======+========+=======+=======+
| AERO | ACSID | VELOCITY | REFC | RHOREF | SYMXZ | SYMXY |
+------+-------+----------+------+--------+-------+-------+
| AERO | 3 | 1.3+ | 100. | 1.-5 | 1 | -1 |
+------+-------+----------+------+--------+-------+-------+
"""
type = 'AERO'
_properties = ['is_anti_symmetric_xy', 'is_anti_symmetric_xz',
'is_symmetric_xy', 'is_symmetric_xz']
_field_map = {
1: 'acsid', 2:'velocity', 3:'cRef', 4:'rhoRef', 5:'symXZ',
6:'symXY',
}
@classmethod
def _init_from_empty(cls):
velocity = 1.
cref = 1.
rho_ref = 1.
return AERO(velocity, cref, rho_ref, acsid=0, sym_xz=0, sym_xy=0, comment='')
def __init__(self, velocity, cref, rho_ref, acsid=0, sym_xz=0, sym_xy=0, comment=''):
"""
Creates an AERO card
Parameters
----------
velocity : float
the airspeed
cref : float
the aerodynamic chord
rho_ref : float
FLFACT density scaling factor
acsid : int; default=0
aerodyanmic coordinate system
defines the direction of the wind
sym_xz : int; default=0
xz symmetry flag (+1=symmetry; -1=antisymmetric)
sym_xy : int; default=0
xy symmetry flag (+1=symmetry; -1=antisymmetric)
comment : str; default=''
a comment for the card
"""
Aero.__init__(self)
if comment:
self.comment = comment
#: Aerodynamic coordinate system identification
if acsid is None:
acsid = 0
self.acsid = acsid
#: Velocity for aerodynamic force data recovery and to calculate the BOV
#: parameter
self.velocity = velocity
#: Reference length for reduced frequency
self.cref = cref
#: Reference density
self.rho_ref = rho_ref
#: Symmetry key for the aero coordinate x-z plane. See Remark 6.
#: (Integer = +1 for symmetry, 0 for no symmetry, and -1 for antisymmetry;
#: Default = 0)
self.sym_xz = sym_xz
#: The symmetry key for the aero coordinate x-y plane can be used to
#: simulate ground effect. (Integer = -1 for symmetry, 0 for no symmetry,
#: and +1 for antisymmetry; Default = 0)
self.sym_xy = sym_xy
def validate(self):
msg = ''
if not isinstance(self.acsid, integer_types):
msg += 'acsid=%r must be an integer; type=%s' % (
self.acsid, type(self.acsid))
if not isinstance(self.sym_xz, integer_types):
msg = 'sym_xz=%r must be an integer; type=%s' % (
self.sym_xz, type(self.sym_xz))
if not isinstance(self.sym_xy, integer_types):
msg = 'sym_xy=%r must be an integer; type=%s' % (
self.sym_xy, type(self.sym_xy))
if msg:
raise TypeError(msg + str(self))
def cross_reference(self, model: BDF) -> None:
"""
Cross refernece aerodynamic coordinate system.
Parameters
----------
model : BDF
The BDF object.
"""
msg = ', which is required by AERO'
self.acsid_ref = model.Coord(self.acsid, msg=msg)
def safe_cross_reference(self, model: BDF, xref_errors):
"""
Safe cross refernece aerodynamic coordinate system.
Parameters
----------
model : BDF
The BDF object.
"""
msg = ', which is required by AERO'
self.acsid_ref = model.safe_coord(self.acsid, None, xref_errors, msg=msg)
@classmethod
def add_card(cls, card, comment=''):
"""
Adds an AERO card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
acsid = integer_or_blank(card, 1, 'acsid', 0)
velocity = double_or_blank(card, 2, 'velocity')
cref = double(card, 3, 'cRef')
rho_ref = double(card, 4, 'rho_ref')
sym_xz = integer_or_blank(card, 5, 'symXZ', 0)
sym_xy = integer_or_blank(card, 6, 'symXY', 0)
assert len(card) <= 7, f'len(AERO card) = {len(card):d}\ncard={card}'
return AERO(velocity, cref, rho_ref, acsid=acsid, sym_xz=sym_xz, sym_xy=sym_xy,
comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
acsid = data[0]
velocity = data[1]
cref = data[2]
rho_ref = data[3]
sym_xz = data[4]
sym_xy = data[5]
assert len(data) == 6, 'data = %s' % data
return AERO(acsid, velocity, cref, rho_ref, sym_xz, sym_xy,
comment=comment)
# T is the tabular function
#angle = self.wg*self.t*(t-(x-self.x0)/self.V)
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.acsid_ref = None
def update(self, maps):
"""
maps = {
'coord' : cid_map,
}
"""
cid_map = maps['coord']
self.acsid = cid_map[self.acsid]
def raw_fields(self):
"""
Gets the fields in their unmodified form
Returns
-------
fields : List[int/float/str]
the fields that define the card
"""
list_fields = ['AERO', self.Acsid(), self.velocity, self.cref,
self.rho_ref, self.sym_xz, self.sym_xy]
return list_fields
def repr_fields(self):
"""
Gets the fields in their simplified form
Returns
-------
fields : List[varies]
the fields that define the card
"""
sym_xz = set_blank_if_default(self.sym_xz, 0)
sym_xy = set_blank_if_default(self.sym_xy, 0)
list_fields = ['AERO', self.Acsid(), self.velocity, self.cref,
self.rho_ref, sym_xz, sym_xy]
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
"""
Writes the card with the specified width and precision
Parameters
----------
size : int (default=8)
size of the field; {8, 16}
is_double : bool (default=False)
is this card double precision
Returns
-------
msg : str
the string representation of the card
"""
card = self.repr_fields()
return self.comment + print_card_8(card)
class FLFACT(BaseCard):
"""
+--------+-----+----+------+-----+----+----+----+----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+========+=====+====+======+=====+====+====+====+====+
| FLFACT | SID | F1 | F2 | F3 | F4 | F5 | F6 | F7 |
+--------+-----+----+------+-----+----+----+----+----+
| | F8 | F9 | etc. | | | | | |
+--------+-----+----+------+-----+----+----+----+----+
| FLFACT | 97 | .3 | .7 | 3.5 | | | | |
+--------+-----+----+------+-----+----+----+----+----+
# delta quantity approach
+--------+-----+-------+------+-------+----+--------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 |
+========+=====+=======+======+=======+====+========+
| FLFACT | SID | F1 | THRU | FNF | NF | FMID |
+--------+-----+-------+------+-------+----+--------+
| FLFACT | 201 | 0.200 | THRU | 0.100 | 11 | 0.1333 |
+--------+-----+-------+------+-------+----+--------+
"""
type = 'FLFACT'
@classmethod
def _init_from_empty(cls):
sid = 1
factors = [1.]
return FLFACT(sid, factors, comment='')
def __init__(self, sid, factors, comment=''):
"""
Creates an FLFACT card, which defines factors used for flutter
analysis. These factors define either:
- density
- mach
- velocity
- reduced frequency
depending on the FLUTTER method chosen (e.g., PK, PKNL, PKNLS)
Parameters
----------
sid : int
the id of a density, reduced_frequency, mach, or velocity table
the FLUTTER card defines the meaning
factors : varies
values : List[float, ..., float]
list of factors
List[f1, THRU, fnf, nf, fmid]
f1 : float
first value
THRU : str
the word THRU
fnf : float
second value
nf : int
number of values
fmid : float; default=(f1 + fnf) / 2.
the mid point to bias the array
TODO: does f1 need be be greater than f2/fnf???
comment : str; default=''
a comment for the card
"""
BaseCard.__init__(self)
if comment:
self.comment = comment
self.sid = sid
#self.f1 = f1
#self.fnf = fnf
#self.nf = nf
#self.fmid = fmid
# the dumb string_types thing is because we also get floats
if len(factors) > 1 and isinstance(factors[1], str) and factors[1] == 'THRU':
#msg = 'embedded THRUs not supported yet on FLFACT card\n'
nfactors = len(factors)
if nfactors == 4:
(f1, _thru, fnf, nf) = factors
fmid = (f1 + fnf) / 2.
elif nfactors == 5:
(f1, _thru, fnf, nf, fmid) = factors
#assert _thru.upper() == 'THRU', 'factors=%s' % str(factors)
else:
raise RuntimeError('factors must be length 4/5; factors=%s' % factors)
i = np.linspace(0, nf, nf, endpoint=False) + 1
factors = (
(f1*(fnf - fmid) * (nf-i) + fnf * (fmid - f1) * (i-1)) /
( (fnf - fmid) * (nf-i) + (fmid - f1) * (i-1))
)
self.factors = np.asarray(factors)
def validate(self):
if len(self.factors) == 0:
raise ValueError('FLFACT sid=%s is empty; factors=%s' % (self.sid, str(self.factors)))
@classmethod
def add_card(cls, card, comment=''):
"""
Adds an FLFACT card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
sid = integer(card, 1, 'sid')
assert len(card) > 2, 'len(FLFACT card)=%s; card=%s' % (len(card), card)
field3 = double_string_or_blank(card, 3, 'THRU')
if field3 is None:
f1 = double(card, 2, 'f1')
factors = [f1]
assert len(card) == 3, 'len(FLFACT card)=%s; card=%s' % (len(card), card)
elif isinstance(field3, float):
factors = fields(double, card, 'factors', i=2, j=len(card))
elif isinstance(field3, str) and field3 == 'THRU':
f1 = double(card, 2, 'f1')
fnf = double(card, 4, 'fnf')
nf = integer(card, 5, 'nf')
fmid_default = (f1 + fnf) / 2.
fmid = double_or_blank(card, 6, 'fmid', fmid_default)
assert len(card) <= 7, 'len(FLFACT card)=%s; card=%s' % (len(card), card)
factors = [f1, 'THRU', fnf, nf, fmid]
else:
raise SyntaxError('expected a float or string for FLFACT field 3; value=%r' % field3)
return FLFACT(sid, factors, comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
sid = data[0]
factors = data[1:]
return FLFACT(sid, factors, comment=comment)
def max(self):
return self.factors.max()
def min(self):
return self.factors.min()
#def uncross_reference(self) -> None:
#pass
def raw_fields(self):
"""
Gets the fields in their unmodified form
Returns
-------
fields : list[varies]
the fields that define the card
"""
list_fields = ['FLFACT', self.sid] + list(self.factors)
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
if size == 8:
return self.comment + print_card_8(card)
return self.comment + print_card_16(card)
FLUTTER_MSG = """
+---------+-----+--------+------+------+-------+-------+-------------+------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+=====+========+======+======+=======+=======+=============+======+
| FLUTTER | SID | METHOD | DENS | MACH | RFREQ | IMETH | NVALUE/OMAX | EPS |
+---------+-----+--------+------+------+-------+-------+-------------+------+
| FLUTTER | 19 | K | 119 | 219 | 319 | S | 5 | 1.-4 |
+---------+-----+--------+------+------+-------+-------+-------------+------+""".strip()
class FLUTTER(BaseCard):
"""
Defines data needed to perform flutter analysis.
+---------+-----+--------+------+------+-------+-------+-------------+------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+=====+========+======+======+=======+=======+=============+======+
| FLUTTER | SID | METHOD | DENS | MACH | RFREQ | IMETH | NVALUE/OMAX | EPS |
+---------+-----+--------+------+------+-------+-------+-------------+------+
| FLUTTER | 19 | K | 119 | 219 | 319 | S | 5 | 1.-4 |
+---------+-----+--------+------+------+-------+-------+-------------+------+
"""
type = 'FLUTTER'
_field_map = {
1: 'sid', 2:'method', 3:'density', 4:'mach', 5:'reduced_freq_velocity', 6:'imethod',
8:'epsilon',
}
_properties = ['_field_map', 'headers', ]
@classmethod
def _init_from_empty(cls):
sid = 1
method = 'PKNL'
density = 1
mach = 1
reduced_freq_velocity = 1
return FLUTTER(sid, method, density, mach, reduced_freq_velocity,
imethod='L', nvalue=None, omax=None, epsilon=1.0e-3, comment='')
def _get_field_helper(self, n):
"""
Gets complicated parameters on the FLUTTER card
Parameters
----------
n : int
the field number to update
Returns
-------
value : int/float/str
the value for the appropriate field
"""
if n == 7:
if self.method in ['K', 'KE']:
value = self.nvalue
elif self.method in ['PKS', 'PKNLS']:
value = self.omax
else:
value = self.nvalue
return value
else:
raise KeyError('Field %r is an invalid FLUTTER entry.' % (n))
def _update_field_helper(self, n, value):
"""
Updates complicated parameters on the FLUTTER card
Parameters
----------
n : int
the field number to update
value : int/float/str
the value for the appropriate field
"""
if n == 7:
if self.method in ['K', 'KE']:
self.nvalue = value
elif self.method in ['PKS', 'PKNLS']:
self.omax = value
else:
self.nvalue = value
else:
raise KeyError('Field %r=%r is an invalid FLUTTER entry.' % (n, value))
def __init__(self, sid: int, method, density, mach, reduced_freq_velocity,
imethod: str='L', nvalue=None, omax=None, epsilon: float=1.0e-3, comment='',
validate: bool=False):
"""
Creates a FLUTTER card, which is required for a flutter (SOL 145)
analysis.
Parameters
----------
sid : int
flutter id
method : str
valid methods = [K, KE,
PKS, PKNLS, PKNL, PKE]
density : int
defines a series of air densities in units of mass/volume
PARAM,WTMASS does not affect this
AERO affects this
references an FLFACT id
mach : int
defines a series of the mach numbers
references an FLFACT id
reduced_freq_velocity : int
Defines a series of either:
1) reduced frequencies - K, KE
2) velocities - PK, PKNL, PKS, PKNLS
depending on the method chosen.
references an FLFACT id
imethod : str; default='L'
Choice of interpolation method for aerodynamic matrix interpolation.
imethods :
1) L - linear
2) S - surface
3) TCUB - termwise cubic
nvalue : int
Number of eigenvalues beginning with the first eigenvalue for
output and plots
omax : float
For the PKS and PKNLS methods, OMAX specifies the maximum frequency, in
Hz., to be used in he flutter sweep.
MSC only.
epsilon : float; default=1.0e-3
Convergence parameter for k. Used in the PK and PKNL methods only
comment : str; default=''
a comment for the card
"""
BaseCard.__init__(self)
if comment:
self.comment = comment
self.sid = sid
if method in ['PK', 'PKNL', 'PKNLS']:
imethod = 'L'
#else:
#assert imethod in ['S', 'L', None], imethod
self.method = method
self.density = density
self.mach = mach
# KFREQ - K, KE
# VEL - PK, PKNL, PKS, PKNLS
self.reduced_freq_velocity = reduced_freq_velocity
#
self.imethod = imethod
self.nvalue = nvalue
self.omax = omax
self.epsilon = epsilon
self.density_ref = None
self.mach_ref = None
self.reduced_freq_velocity_ref = None
if validate:
self.validate()
def validate(self):
msg = ''
if self.method not in {'K', 'KE', 'PK', 'PKNL', 'PKS', 'PKNLS'}:
msg += f'method = {self.method!r}; allowed=[K, KE, PKS, PKNLS, PKNL, PK]\n'
if self.imethod not in {'L', 'S', 'TCUB'}:
msg += f'imethod = {self.imethod!r}; allowed=[L, S, TCUB]\n'
if msg:
raise ValueError(msg + str(self))
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a FLUTTER card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
sid = integer(card, 1, 'sid')
method = string_or_blank(card, 2, 'method (K, KE, PKS, PKNLS, PKNL, PK)', default='L')
density_id = integer(card, 3, 'density')
mach_id = integer(card, 4, 'mach')
reduced_freq_velocity_id = integer(card, 5, 'reduced_freq_velocity')
omax = None
imethod = string_or_blank(card, 6, 'imethod', 'L')
if method in ['K', 'KE']:
nvalue = integer_or_blank(card, 7, 'nvalue')
assert imethod in ['L', 'S', 'TCUB'], 'imethod = %s' % imethod # linear-surface
elif method in ['PKS', 'PKNLS']:
nvalue = None
omax = double_or_blank(card, 7, 'omax')
elif method == 'PKNL':
nvalue = integer_or_blank(card, 7, 'nvalue')
elif method == 'PK':
nvalue = integer_or_blank(card, 7, 'nvalue')
else:
raise NotImplementedError('FLUTTER method=%r' % method)
assert method in ['K', 'KE', 'PK', 'PKS', 'PKNL', 'PKNLS', None], method
epsilon = double_or_blank(card, 8, 'epsilon', 1e-3) # not defined in QRG
assert len(card) <= 9, f'len(FLUTTER card) = {len(card):d}\ncard={card}'
return FLUTTER(sid, method, density_id, mach_id, reduced_freq_velocity_id,
imethod=imethod, nvalue=nvalue, omax=omax,
epsilon=epsilon, comment=comment)
def make_flfacts_eas_sweep(self, model: BDF,
alt: float, eass: List[float],
alt_units: str='m',
velocity_units: str='m/s',
density_units: str='kg/m^3',
eas_units: str='m/s') -> Tuple[Any, Any, Any]:
"""
Makes a sweep across equivalent airspeed for a constant altitude.
Parameters
----------
model : BDF
the BDF model object
alt : float
Altitude in alt_units
eass : List[float]
Equivalent airspeed in eas_units
alt_units : str; default='m'
the altitude units; ft, kft, m
velocity_units : str; default='m/s'
the velocity units; ft/s, m/s, in/s, knots
density_units : str; default='kg/m^3'
the density units; slug/ft^3, slinch/in^3, kg/m^3
eas_units : str; default='m/s'
the equivalent airspeed units; ft/s, m/s, in/s, knots
"""
eass.sort()
rho, mach, velocity = make_flfacts_eas_sweep(
alt, eass,
alt_units=alt_units, velocity_units=velocity_units,
density_units=density_units, eas_units=eas_units)
flfact_rho = self.sid + 1
flfact_mach = self.sid + 2
flfact_velocity = self.sid + 3
flfact_eas = self.sid + 4
comment = ' density: min=%.3e max=%.3e %s' % (
rho.min(), rho.max(), density_units,
)
model.add_flfact(flfact_rho, rho, comment=comment)
model.add_flfact(flfact_mach, mach, comment=' Mach: %s' % mach.min())
comment = ' velocity: min=%.3f max=%.3f %s' % (
velocity.min(), velocity.max(), velocity_units)
model.add_flfact(flfact_velocity, velocity, comment=comment)
# eas in velocity units
comment = ' EAS: min=%.3f max=%.3f %s' % (
eass.min(), eass.max(), eas_units)
model.add_flfact(flfact_eas, eass, comment=comment)
def make_flfacts_alt_sweep(self, model: BDF, mach, alts, eas_limit: float=1000.,
alt_units: str='m',
velocity_units: str='m/s',
density_units: str='kg/m^3',
eas_units: str='m/s') -> Tuple[Any, Any, Any]:
"""makes an altitude sweep"""
alts.sort()
alts = alts[::-1]
rho, mach, velocity = make_flfacts_alt_sweep(
mach, alts, eas_limit=eas_limit,
alt_units=alt_units,
velocity_units=velocity_units,
density_units=density_units,
eas_units=eas_units)
flfact_rho = self.sid + 1
flfact_mach = self.sid + 2
flfact_velocity = self.sid + 3
flfact_eas = self.sid + 4
flfact_alt = self.sid + 5
alts2 = alts[:len(rho)]
assert len(rho) == len(alts2)
comment = ' density: min=%.3e max=%.3e %s; alt min=%.0f max=%.0f %s' % (
rho.min(), rho.max(), density_units,
alts2.min(), alts2.max(), alt_units,
)
model.add_flfact(flfact_rho, rho, comment=comment)
model.add_flfact(flfact_mach, mach, comment=' Mach: %s' % mach.min())
comment = ' velocity: min=%.3f max=%.3f %s' % (
velocity.min(), velocity.max(), velocity_units)
model.add_flfact(flfact_velocity, velocity, comment=comment)
# eas in velocity units
rho0 = atm_density(0., alt_units=alt_units, density_units=density_units)
eas = velocity * np.sqrt(rho / rho0)
kvel = _velocity_factor(velocity_units, eas_units)
eas_in_eas_units = eas * kvel
comment = ' EAS: min=%.3f max=%.3f %s' % (
eas_in_eas_units.min(), eas_in_eas_units.max(), eas_units)
model.add_flfact(flfact_eas, eas_in_eas_units, comment=comment)
comment = ' Alt: min=%.3f max=%.3f %s' % (alts2.min(), alts2.max(), alt_units)
model.add_flfact(flfact_alt, alts2, comment=comment)
def make_flfacts_mach_sweep(self, model, alt, machs, eas_limit=1000., alt_units='m',
velocity_units='m/s',
density_units='kg/m^3',
eas_units='m/s'):
"""makes a mach sweep"""
machs.sort()
machs = machs[::-1]
rho, mach, velocity = make_flfacts_mach_sweep(
alt, machs, eas_limit=eas_limit,
alt_units=alt_units,
velocity_units=velocity_units,
density_units=density_units,
eas_units=eas_units)
machs2 = machs[:len(rho)]
assert len(rho) == len(machs2)
flfact_rho = self.sid + 1
flfact_mach = self.sid + 2
flfact_velocity = self.sid + 3
flfact_eas = self.sid + 4
comment = ' density: min=%.3e max=%.3e %s; alt %.0f %s' % (
rho.min(), rho.max(), density_units,
alt, alt_units,
)
model.add_flfact(flfact_rho, rho, comment=comment)
comment = ' Mach: min=%s max=%s' % (mach.min(), mach.max())
model.add_flfact(flfact_mach, mach, comment=comment)
comment = ' velocity: min=%.3f max=%.3f %s' % (
velocity.min(), velocity.max(), velocity_units)
model.add_flfact(flfact_velocity, velocity, comment=comment)
# eas in velocity units
rho0 = atm_density(0., alt_units=alt_units, density_units=density_units)
eas = velocity * np.sqrt(rho / rho0)
kvel = _velocity_factor(velocity_units, eas_units)
eas_in_eas_units = eas * kvel
comment = ' EAS: min=%.3f max=%.3f %s' % (
eas_in_eas_units.min(), eas_in_eas_units.max(), eas_units)
model.add_flfact(flfact_eas, eas_in_eas_units, comment=comment)
@property
def headers(self):
headers = ['density', 'mach']
if self.method in ['PK', 'PKS', 'PKNL', 'PKNLS']:
headers.append('velocity')
elif self.method in ['K', 'KE']:
headers.append('reduced_frequency')
else:
raise NotImplementedError('FLUTTER method=%r' % self.method)
return headers
@classmethod
def add_op2_data(cls, data, comment=''):
assert len(data) == 8, 'FLUTTER = %s' % data
sid = data[0]
method = data[1]
density = data[2]
mach = data[3]
reduced_freq_velocity = data[4]
method = data[5]
imethod = data[6]
nvalue = data[7]
omax = data[8]
epsilon = None
return FLUTTER(sid, method, density, mach, reduced_freq_velocity,
imethod, nvalue, omax,
epsilon, comment=comment)
def cross_reference(self, model: BDF) -> None:
"""
Cross links the card so referenced cards can be extracted directly
Parameters
----------
model : BDF()
the BDF object
"""
msg = ', which is required by FLUTTER sid=%s' % self.sid
self.density_ref = model.FLFACT(self.density, msg=msg)
self.mach_ref = model.FLFACT(self.mach, msg=msg)
self.reduced_freq_velocity_ref = model.FLFACT(self.reduced_freq_velocity, msg=msg)
def safe_cross_reference(self, model):
msg = ', which is required by FLUTTER sid=%s' % self.sid
try:
self.density_ref = model.FLFACT(self.density, msg=msg)
except KeyError:
pass
try:
self.mach_ref = model.FLFACT(self.mach, msg=msg)
except KeyError:
pass
try:
self.reduced_freq_velocity_ref = model.FLFACT(self.reduced_freq_velocity, msg=msg)
except KeyError:
pass
def uncross_reference(self) -> None:
"""Removes cross-reference links"""
self.density = self.get_density()
self.mach = self.get_mach()
self.reduced_freq_velocity = self.get_rfreq_vel()
self.density_ref = None
self.mach_ref = None
self.reduced_freq_velocity_ref = None
def get_density(self):
if self.density_ref is not None:
return self.density_ref.sid
return self.density
def get_mach(self):
if self.mach_ref is not None:
return self.mach_ref.sid
return self.mach
def get_rfreq_vel(self):
if self.reduced_freq_velocity_ref is not None:
return self.reduced_freq_velocity_ref.sid
return self.reduced_freq_velocity
def _get_raw_nvalue_omax(self):
if self.method in ['K', 'KE']:
#assert self.imethod in ['L', 'S'], 'imethod = %s' % self.imethod
return self.imethod, self.nvalue
elif self.method in ['PKS', 'PKNLS']:
return self.imethod, self.omax
# PK, PKNL
return self.imethod, self.nvalue
def _get_repr_nvalue_omax(self):
if self.method in ['K', 'KE']:
imethod = set_blank_if_default(self.imethod, 'L')
#assert self.imethod in ['L', 'S'], 'imethod = %s' % self.imethods
return imethod, self.nvalue
elif self.method in ['PKS', 'PKNLS']:
return self.imethod, self.omax
# PK, PKNL
return self.imethod, self.nvalue
def raw_fields(self):
"""
Gets the fields in their unmodified form
Returns
-------
fields : list[varies]
the fields that define the card
"""
(imethod, nvalue) = self._get_raw_nvalue_omax()
list_fields = ['FLUTTER', self.sid, self.method, self.get_density(),
self.get_mach(), self.get_rfreq_vel(), imethod, nvalue, self.epsilon]
return list_fields
def repr_fields(self):
(imethod, nvalue) = self._get_repr_nvalue_omax()
epsilon = set_blank_if_default(self.epsilon, 0.001)
list_fields = ['FLUTTER', self.sid, self.method, self.get_density(), self.get_mach(),
self.get_rfreq_vel(), imethod, nvalue, epsilon]
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class GUST(BaseCard):
"""
Defines a stationary vertical gust for use in aeroelastic response
analysis.
+------+-----+-------+-----+-----+------+
| 1 | 2 | 3 | 4 | 5 | 6 |
+======+=====+=======+=====+=====+======+
| GUST | SID | DLOAD | WG | X0 | V |
+------+-----+-------+-----+-----+------+
| GUST | 133 | 61 | 1.0 | 0. | 1.+4 |
+------+-----+-------+-----+-----+------+
"""
type = 'GUST'
_field_map = {
1: 'sid', 2:'dload', 3:'wg', 4:'x0', 5:'V',
}
@classmethod
def _init_from_empty(cls):
sid = 1
dload = 1
wg = 1.
x0 = 0.
return GUST(sid, dload, wg, x0, V=None, comment='')
def __init__(self, sid, dload, wg, x0, V=None, comment=''):
"""
Creates a GUST card, which defines a stationary vertical gust
for use in aeroelastic response analysis.
Parameters
----------
sid : int
gust load id
dload : int
TLOADx or RLOADx entry that defines the time/frequency
dependence
wg : float
Scale factor (gust velocity/forward velocity) for gust
velocity
x0 : float
Streamwise location in the aerodynamic coordinate system of
the gust reference point.
V : float; default=None
float : velocity of the vehicle (must be the same as the
velocity on the AERO card)
None : ???
comment : str; default=''
a comment for the card
"""
BaseCard.__init__(self)
if comment:
self.comment = comment
self.sid = sid
self.dload = dload
self.wg = wg
self.x0 = x0
self.V = V
@classmethod
def add_card(cls, card, comment=''):
"""
Adds a GUST card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
sid = integer(card, 1, 'sid')
dload = integer(card, 2, 'dload')
wg = double(card, 3, 'wg')
x0 = double(card, 4, 'x0')
V = double_or_blank(card, 5, 'V')
assert len(card) <= 6, f'len(GUST card) = {len(card):d}\ncard={card}'
return GUST(sid, dload, wg, x0, V=V, comment=comment)
@classmethod
def add_op2_data(cls, data, comment=''):
sid = data[0]
dload = data[1]
wg = data[2]
x0 = data[3]
V = data[4]
assert len(data) == 5, 'data = %s' % data
return GUST(sid, dload, wg, x0, V, comment=comment)
#def Angle(self):
#angle = self.wg * self.t * (t-(x-self.x0) / self.V) # T is the tabular
#return angle
#def uncross_reference(self) -> None:
#pass
def _verify(self, model, xref):
if model.aero:
pass
#assert model.aero.V == self.V
def raw_fields(self):
"""
Gets the fields in their unmodified form
Returns
-------
fields : list[varies]
the fields that define the card
"""
list_fields = ['GUST', self.sid, self.dload, self.wg, self.x0, self.V]
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
card = self.repr_fields()
return self.comment + print_card_8(card)
class MKAERO1(BaseCard):
"""
Provides a table of Mach numbers (m) and reduced frequencies (k) for
aerodynamic matrix calculation.
+---------+----+----+----+----+----+----+----+----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+====+====+====+====+====+====+====+====+
| MKAERO1 | m1 | m2 | m3 | m4 | m5 | m6 | m7 | m8 |
+---------+----+----+----+----+----+----+----+----+
| | k1 | k2 | k3 | k4 | k5 | k6 | k7 | k8 |
+---------+----+----+----+----+----+----+----+----+
"""
type = 'MKAERO1'
@classmethod
def _init_from_empty(cls):
machs = [1.]
reduced_freqs = [1.]
return MKAERO1(machs, reduced_freqs, comment='')
def __init__(self, machs, reduced_freqs, comment=''):
"""
Creates an MKAERO1 card, which defines a set of mach and
reduced frequencies.
Parameters
----------
machs : List[float]
series of Mach numbers
reduced_freqs : List[float]
series of reduced frequencies
comment : str; default=''
a comment for the card
"""
BaseCard.__init__(self)
if comment:
self.comment = comment
self.machs = np.unique(machs)
self.reduced_freqs = np.unique(reduced_freqs)
def validate(self):
msg = ''
if None in self.machs:
msg += 'MKAERO1; None in machs=%s\n' % (self.machs)
if None in self.reduced_freqs:
msg += 'MKAERO1; None in rfreqs=%s\n' % (self.reduced_freqs)
if len(self.machs) == 0:
msg += 'MKAERO1; nmachs=%s machs=%s\n' % (len(self.machs), self.machs)
if len(self.reduced_freqs) == 0:
msg += 'MKAERO1; nrfreqs=%s rfreqs=%s' % (len(self.reduced_freqs), self.reduced_freqs)
if msg:
raise ValueError(msg.rstrip())
@classmethod
def add_card(cls, card, comment=''):
"""
Adds an MKAERO1 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
list_fields = [interpret_value(field, card) for field in card[1:]]
nfields = len(list_fields) - 8
machs = []
reduced_freqs = []
for i in range(1, 1 + nfields):
machs.append(double_or_blank(card, i, 'mach'))
reduced_freqs.append(double_or_blank(card, i + 8, 'rFreq'))
machs = wipe_empty_fields(machs)
reduced_freqs = wipe_empty_fields(reduced_freqs)
return MKAERO1(machs, reduced_freqs, comment=comment)
def mklist(self):
mklist = []
for mach in self.machs:
for kfreq in self.reduced_freqs:
mklist.append([mach, kfreq])
return mklist
def raw_fields(self):
"""
Gets the fields in their unmodified form
Returns
-------
fields : list[varies]
the fields that define the card
"""
#list_fields = ['MKAERO1']
#for (i, mach, rfreq) in zip(count(), self.machs, self.reduced_freqs):
# list_fields += [mach, rfreq]
# kind of a hack because there isn't a good way to do this for
# duplicately-defined MKAERO1s
machs = [None] * max(8, len(self.machs))
freqs = [None] * max(8, len(self.reduced_freqs))
for i, mach in enumerate(self.machs):
machs[i] = mach
for i, freq in enumerate(self.reduced_freqs):
freqs[i] = freq
list_fields = ['MKAERO1'] + machs + freqs
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
nmachs = len(self.machs)
nreduced_freqs = len(self.reduced_freqs)
if nmachs > 8 or nreduced_freqs > 8:
mach_sets = []
rfreq_sets = []
imach = 0
ifreq = 0
while imach < nmachs:
mach_sets.append(self.machs[imach:imach+8])
imach += 8
while ifreq < nreduced_freqs:
rfreq_sets.append(self.reduced_freqs[ifreq:ifreq+8])
ifreq += 8
msg = self.comment
#print('mach_sets = %s' % mach_sets)
#print('rfreq_sets = %s' % rfreq_sets)
for mach_set in mach_sets:
for rfreq_set in rfreq_sets:
msg += MKAERO1(mach_set, rfreq_set).write_card(
size=size, is_double=is_double)
return msg
machs = [None] * 8
reduced_freqs = [None] * 8
if not 0 < len(self.machs) <= 8:
msg = 'MKAERO1; nmachs=%s machs=%s' % (len(self.machs), self.machs)
raise ValueError(msg)
if not 0 < len(self.reduced_freqs) <= 8:
msg = 'MKAERO1; nrfreqs=%s rfreqs=%s' % (len(self.reduced_freqs), self.reduced_freqs)
raise ValueError(msg)
for i, mach in zip(count(), self.machs):
machs[i] = mach
for i, rfreq in zip(count(), self.reduced_freqs):
reduced_freqs[i] = rfreq
return self.comment + print_card_8(['MKAERO1'] + machs + reduced_freqs)
def __repr__(self):
return self.write_card()
class MKAERO2(BaseCard):
"""
Provides a table of Mach numbers (m) and reduced frequencies (k) for
aerodynamic matrix calculation.
+---------+----+----+----+----+----+----+----+----+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+=========+====+====+====+====+====+====+====+====+
| MKAERO2 | m1 | k1 | m2 | k2 | m3 | k3 | m4 | k4 |
+---------+----+----+----+----+----+----+----+----+
"""
type = 'MKAERO2'
@classmethod
def _init_from_empty(cls):
machs = [1.]
reduced_freqs = [1.]
return MKAERO2(machs, reduced_freqs, comment='')
def __init__(self, machs, reduced_freqs, comment=''):
"""
Creates an MKAERO2 card, which defines a set of mach and
reduced frequency pairs.
Parameters
----------
machs : List[float]
series of Mach numbers
reduced_freqs : List[float]
series of reduced frequencies
comment : str; default=''
a comment for the card
"""
BaseCard.__init__(self)
if comment:
self.comment = comment
self.machs = machs
self.reduced_freqs = reduced_freqs
def validate(self):
if len(self.machs) == 0:
msg = 'MKAERO2; nmachs=%s machs=%s' % (len(self.machs), self.machs)
raise ValueError(msg)
if len(self.reduced_freqs) == 0:
msg = 'MKAERO2; nrfreqs=%s rfreqs=%s' % (len(self.reduced_freqs), self.reduced_freqs)
raise ValueError(msg)
if len(self.machs) != len(self.reduced_freqs):
msg = 'MKAERO2; len(machs)=%s len(rfreqs)=%s; should be the same' % (
len(self.machs), len(self.reduced_freqs))
raise ValueError(msg)
@classmethod
def add_card(cls, card, comment=''):
"""
Adds an MKAERO2 card from ``BDF.add_card(...)``
Parameters
----------
card : BDFCard()
a BDFCard object
comment : str; default=''
a comment for the card
"""
list_fields = card.fields(1)
nfields = len(list_fields)
machs = []
reduced_freqs = []
for i in range(1, 1 + nfields, 2):
machs.append(double(card, i, 'mach'))
reduced_freqs.append(double(card, i + 1, 'rFreq'))
return MKAERO2(machs, reduced_freqs, comment=comment)
def mklist(self):
mklist = []
for mach, kfreq in zip(self.machs, self.reduced_freqs):
mklist.append([mach, kfreq])
return mklist
def raw_fields(self):
"""
Gets the fields in their unmodified form
Returns
-------
fields : list[varies]
the fields that define the card
"""
list_fields = ['MKAERO2']
for (mach, rfreq) in zip(self.machs, self.reduced_freqs):
list_fields += [mach, rfreq]
return list_fields
def write_card(self, size: int=8, is_double: bool=False) -> str:
cards = []
list_fields = ['MKAERO2']
nvalues = 0
for mach, rfreq in zip(self.machs, self.reduced_freqs):
list_fields += [mach, rfreq]
nvalues += 1
if nvalues == 4:
cards.append(print_card_8(list_fields))
list_fields = ['MKAERO2']
nvalues = 0
if nvalues:
cards.append(print_card_8(list_fields))
else:
if len(self.machs) != len(self.reduced_freqs) or len(self.machs) == 0:
msg = 'MKAERO2: len(machs)=%s len(reduced_freqs)=%s' % (
len(self.machs), len(self.reduced_freqs))
raise ValueError(msg)
return self.comment + ''.join(cards)
def __repr__(self):
return self.write_card()
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
32,985,659
|
benaoualia/pyNastran
|
refs/heads/main
|
/pyNastran/bdf/mesh_utils/bdf_equivalence.py
|
"""
defines:
model = bdf_equivalence_nodes(bdf_filename, bdf_filename_out, tol,
renumber_nodes=False, neq_max=4, xref=True,
node_set=None,
size=8, is_double=False,
remove_collapsed_elements=False,
avoid_collapsed_elements=False,
crash_on_collapse=False, log=None, debug=True)
"""
from __future__ import annotations
#import warnings
from itertools import combinations
from typing import Tuple, List, Set, Union, Optional, Any, TYPE_CHECKING
import numpy as np
from numpy import (array, unique, arange, searchsorted,
setdiff1d, intersect1d, asarray)
from numpy.linalg import norm # type: ignore
import scipy
import scipy.spatial
from pyNastran.nptyping import NDArrayNint, NDArrayN3float
from pyNastran.utils.numpy_utils import integer_types
from pyNastran.bdf.bdf import BDF
from pyNastran.bdf.mesh_utils.internal_utils import get_bdf_model
if TYPE_CHECKING: # pragma: no cover
from cpylog import SimpleLogger
from pyNastran.bdf.bdf import GRID
def bdf_equivalence_nodes(bdf_filename: str, bdf_filename_out: str, tol: float,
renumber_nodes: bool=False, neq_max: int=4, xref: bool=True,
node_set: Optional[Union[List[int], NDArrayNint]]=None,
size: int=8, is_double: bool=False,
remove_collapsed_elements: bool=False,
avoid_collapsed_elements: bool=False,
crash_on_collapse: bool=False,
log: Optional[SimpleLogger]=None,
debug: bool=True, method: str='new') -> BDF:
"""
Equivalences nodes; keeps the lower node id; creates two nodes with the same
Parameters
----------
bdf_filename : str / BDF
str : bdf file path
BDF : a BDF model that is fully valid (see xref)
bdf_filename_out : str
a bdf_filename to write
tol : float
the spherical tolerance
renumber_nodes : bool
should the nodes be renumbered (default=False)
neq_max : int
the number of "close" points (default=4)
xref : bool
does the model need to be cross_referenced
(default=True; only applies to model option)
node_set : List[int] / (n, ) ndarray; default=None
the list/array of nodes to consider
(not supported with renumber_nodes=True)
size : int; {8, 16}; default=8
the bdf write precision
is_double : bool; default=False
the field precision to write
remove_collapsed_elements : bool; default=False (unsupported)
True : 1D/2D/3D elements will not be collapsed;
CELASx/CDAMP/MPC/etc. are not considered
False : no elements will be removed
avoid_collapsed_elements : bool; default=False (unsupported)
True : only collapses that don't break 1D/2D/3D elements will be considered;
CELASx/CDAMP/MPC/etc. are considered
False : element can be collapsed
crash_on_collapse : bool; default=False
stop if nodes have been collapsed
False: blindly move on
True: rereads the BDF which catches doubled nodes (temporary);
in the future collapse=True won't need to double read;
an alternative is to do Patran's method of avoiding collapse)
debug : bool
bdf debugging
method: str; default='new'
'new': doesn't require neq_max; new in v1.3
'old': use neq_max; used in v1.2
log : logger(); default=None
bdf logging
Returns
-------
model : BDF()
The BDF model corresponding to bdf_filename_out
.. warning:: I doubt SPOINTs/EPOINTs work correctly
.. warning:: xref not fully implemented (assumes cid=0)
.. todo:: node_set stil does work on the all the nodes in the big
kdtree loop, which is very inefficient
.. todo:: remove_collapsed_elements is not supported
.. todo:: avoid_collapsed_elements is not supported
"""
if not isinstance(tol, float):
tol = float(tol)
#neq_max = 15
#method = 'old'
node_set = _simplify_node_set(node_set)
model, nid_pairs = _bdf_equivalence_nodes(
bdf_filename, tol,
renumber_nodes=renumber_nodes, neq_max=neq_max,
xref=xref, node_set=node_set, log=log, debug=debug,
method=method,
idtype='int32', fdtype='float64')
model.log.debug(f'equivalence {len(nid_pairs):d} nodes')
if bdf_filename_out is not None:
model.write_bdf(bdf_filename_out, size=size, is_double=is_double)
else:
model.log.debug('skipping equivalence write')
if crash_on_collapse:
# lazy way to make sure there aren't any collapsed nodes
model2 = BDF(log=log, debug=debug)
model2.read_bdf(bdf_filename_out)
return model
#def _simplify_node_set_old(node_set: Optional[Union[List[int], List[NDArrayNint]]],
#idtype: str='int32') -> Optional[List[NDArrayNint]]:
#if node_set is None:
#return
#if isinstance(node_set, np.ndarray):
#return node_set
## list
#assert isinstance(node_set, list), type(node_set)
#node = node_set[0]
#if isinstance(node, integer_types):
#return np.array(node_set, dtype=idtype)
#raise NotImplementedError(type(node))
# list of ndarrays
#return node_set
def _simplify_node_set(node_set: Optional[Union[List[int], Set[int], List[NDArrayNint]]],
idtype: str='int32') -> Optional[List[NDArrayNint]]: # pragma: no cover
"""
accepts multiple forms of the node_set parameter
- list[int]
- set[int]
- list[int ndarray]
- int ndarray
"""
if node_set is None:
return
if isinstance(node_set, np.ndarray):
return [node_set]
elif isinstance(node_set, set):
node_set_array = asarray(list(node_set), dtype=idtype)
node_set_array.sort()
return [node_set_array]
assert isinstance(node_set, list), type(node_set)
node = node_set[0]
if isinstance(node, integer_types):
# list
node_set_array = np.array(node_set, dtype=idtype)
return [node_set_array]
else:
# list of lists
# list of numpy arrays
node_set_list = []
for node_seti in node_set:
if isinstance(node_seti, list):
node_set_array = np.array(node_seti, dtype=idtype)
elif isinstance(node_seti, set):
node_set_array = np.array(list(node_seti), dtype=idtype)
else:
assert isinstance(node_seti, np.ndarray), type(node_seti)
node_set_array = node_seti
node_set_array.sort()
node_set_list.append(node_set_array)
# list of ndarrays
return node_set_list
def _bdf_equivalence_nodes(bdf_filename: str, tol: float,
renumber_nodes: bool=False, neq_max: int=4, xref: bool=True,
node_set: Optional[List[NDArrayNint]]=None,
log: Optional[SimpleLogger]=None,
debug: bool=True, method: str='new',
idtype: str='int32', fdtype: str='float64') -> Tuple[BDF,
List[Tuple[int, int]]]:
"""helper for bdf_equivalence_nodes"""
all_node_set = get_all_node_set(node_set)
nodes_xyz, model, nids, inew = _eq_nodes_setup(
bdf_filename, renumber_nodes=renumber_nodes,
xref=xref, node_set=node_set, log=log, debug=debug,
idtype=idtype, fdtype=fdtype)
log = model.log
log.debug(f'bdf_equivalence_nodes; tol={tol}')
nid_pairs = _nodes_xyz_nids_to_nid_pairs(
nodes_xyz, nids, all_node_set,
tol, log, inew,
node_set=node_set, neq_max=neq_max, method=method, debug=debug)
_eq_nodes_final(nid_pairs, model, tol, all_node_set, debug=debug)
return model, nid_pairs
def _eq_nodes_setup(bdf_filename,
renumber_nodes=False, xref=True,
node_set: Optional[List[NDArrayNint]]=None,
log: Optional[SimpleLogger]=None,
debug: bool=True,
idtype: str='int32',
fdtype: str='float64') -> Tuple[NDArrayN3float, BDF,
NDArrayNint, NDArrayNint]:
"""helper function for ``bdf_equivalence_nodes``"""
if node_set is not None:
if renumber_nodes:
raise NotImplementedError('node_set is not None & renumber_nodes=True')
#print(type(node_set))
#print('*node_set', node_set)
assert len(node_set) > 0, node_set
assert isinstance(node_set, list), type(node_set)
all_node_set = get_all_node_set(node_set)
model = get_bdf_model(bdf_filename, xref=xref, log=log, debug=debug)
# quads / tris
#nids_quads = []
#eids_quads = []
#nids_tris = []
#eids_tris = []
# map the node ids to the slot in the nids array
renumber_nodes = False
if node_set is not None:
nids, all_nids = _eq_nodes_setup_node_set(
model, node_set, all_node_set,
renumber_nodes=renumber_nodes, idtype=idtype)
else:
nids, all_nids = _eq_nodes_setup_node(
model, renumber_nodes=renumber_nodes, idtype=idtype)
nodes_xyz = _get_xyz_cid0(model, nids, fdtype=fdtype)
inew = _check_for_referenced_nodes(model, node_set, nids, all_nids, nodes_xyz)
#assert np.array_equal(nids[inew], nids_new), 'some nodes are not defined'
return nodes_xyz, model, nids, inew
def _get_xyz_cid0(model: BDF, nids: NDArrayNint, fdtype: str='float32') -> NDArrayN3float:
"""gets xyz_cid0"""
coord_ids = model.coord_ids
needs_get_position = (coord_ids == [0])
if needs_get_position:
nodes_xyz = array([model.nodes[nid].get_position()
for nid in nids], dtype=fdtype)
else:
nodes_xyz = array([model.nodes[nid].xyz
for nid in nids], dtype=fdtype)
return nodes_xyz
def _eq_nodes_setup_node_set(model: BDF,
node_set: List[NDArrayNint],
all_node_set: NDArrayNint,
renumber_nodes: bool=False,
idtype:str='int32') -> Tuple[NDArrayNint, NDArrayNint]:
"""helper function for ``_eq_nodes_setup`` that handles node_sets"""
if len(node_set) > 1:
model.log.warning(f'multi node_sets; n={len(node_set)}')
node_list = list(model.nodes.keys())
all_nids = array(node_list, dtype=idtype)
#all_nids.sort()
# B - A
# these are all the nodes that are requested from all_node_set that are missing
# thus len(diff_nodes) == 0
diff_nodes = setdiff1d(all_node_set, all_nids)
if len(diff_nodes) != 0:
msg = ('The following nodes cannot be found, but are included'
' in the reduced set; nids=%s' % diff_nodes)
raise RuntimeError(msg)
# A & B
# the nodes to analyze are the union of all the nodes and the desired set
# which is basically the same as:
# nids = unique(all_node_set)
nids = intersect1d(all_nids, all_node_set, assume_unique=True) # the new values
if renumber_nodes:
raise NotImplementedError('node_set is not None & renumber_nodes=True')
#nids = array([node.nid for nid, node in sorted(model.nodes.items())
#if nid in node_set], dtype='int32')
return nids, all_nids
def _eq_nodes_setup_node(model: BDF, renumber_nodes: bool=False,
idtype: str='int32') -> Tuple[NDArrayNint, NDArrayNint]:
"""helper function for ``_eq_nodes_setup`` that doesn't handle node sets"""
inode = 0
if renumber_nodes:
model.log.info('renumbering nodes')
for nid, node in sorted(model.nodes.items()):
node.nid = inode + 1
inode += 1
nnodes = len(model.nodes)
nids = arange(1, inode + 1, dtype=idtype)
assert nids[-1] == nnodes
else:
nodes_list = list(model.nodes.keys())
nids = array(nodes_list, dtype=idtype)
nids.sort()
all_nids = nids
return nids, all_nids
def _check_for_referenced_nodes(model: BDF,
node_set: Optional[NDArrayNint],
nids: NDArrayNint,
all_nids: NDArrayNint,
nodes_xyz: NDArrayN3float) -> Optional[NDArrayNint]:
"""helper function for ``_eq_nodes_setup``"""
if node_set is not None:
assert nodes_xyz.shape[0] == len(nids)
if 0:
# I forget entirely what this block of code is for, but my general
# recollection was that it checked that all the nodes that were
# referenced were included in the nids list. I'd rather break that
# check in order to support nodes_set.
#
# It's also possible that it's here, so you only consider nodes that
# are associated...
# there is some set of points that are used on the elements that
# will be considered.
#
# Presumably this is enough to capture all the node ids and NOT
# spoints, but I doubt it...
spoint_epoint_nid_set = set()
for unused_eid, element in sorted(model.elements.items()):
spoint_epoint_nid_set.update(element.node_ids)
for unused_eid, element in sorted(model.masses.items()):
spoint_epoint_nid_set.update(element.node_ids)
nids_new = spoint_epoint_nid_set - set(model.spoints) - set(model.epoints)
if None in nids_new:
nids_new.remove(None)
# autosorts the data
nids_new = unique(list(nids_new))
assert isinstance(nids_new[0], integer_types), type(nids_new[0])
missing_nids = list(set(nids_new) - set(all_nids))
if missing_nids:
missing_nids.sort()
msg = 'There are missing nodes...\n' # TODO: in what???
msg = 'missing nids=%s' % str(missing_nids)
raise RuntimeError(msg)
# get the node_id mapping for the kdtree
inew = searchsorted(nids, nids_new, side='left')
# print('nids_new =', nids_new)
else:
inew = None
#assert np.array_equal(nids[inew], nids_new), 'some nodes are not defined'
return inew
def get_all_node_set(node_set: Optional[List[NDArrayNint]]) -> NDArrayNint:
if node_set is None:
all_node_set = np.array([])
else:
all_node_set = np.hstack(node_set)
return all_node_set
def _eq_nodes_find_pairs(nids: NDArrayNint,
slots, ieq,
log: SimpleLogger,
all_node_set: NDArrayNint,
node_set: Optional[List[NDArrayNint]]=None) -> List[Tuple[int, int]]:
"""helper function for `bdf_equivalence_nodes`"""
irows, icols = slots
all_node_set = get_all_node_set(node_set)
if node_set is not None and len(node_set) > 1:
log.warning(f'multi node_sets; n={len(node_set)}')
#replacer = unique(ieq[slots]) ## TODO: turn this back on?
#skip_nodes = []
nid_pairs = []
if node_set is None:
for (irow, icol) in zip(irows, icols):
inid2 = ieq[irow, icol]
nid1 = nids[irow]
nid2 = nids[inid2]
if nid1 == nid2:
continue
nid_pairs.append((nid1, nid2))
return nid_pairs
for (irow, icol) in zip(irows, icols):
inid2 = ieq[irow, icol]
nid1 = nids[irow]
nid2 = nids[inid2]
if nid1 == nid2:
continue
if node_set is not None:
if nid1 not in all_node_set and nid2 not in all_node_set:
continue
for seti in node_set:
if nid1 in seti and nid2 in seti:
nid_pairs.append((nid1, nid2))
#print(f'({nid1}, {nid2})')
break
return nid_pairs
def _eq_nodes_final(nid_pairs, model: BDF, tol: float,
all_node_set: NDArrayNint, debug: bool=False) -> None:
"""apply nodal equivalencing to model"""
#log = model.log
#log.info('_eq_nodes_final')
if len(all_node_set) == 0:
# node_sets is None
for (nid1, nid2) in nid_pairs:
node1 = model.nodes[nid1]
node2 = model.nodes[nid2]
xyz1 = node1.get_position()
xyz2 = node2.get_position()
distance = norm(xyz1 - xyz2)
if distance > tol:
continue
_update_grid(node1, node2)
return
for (nid1, nid2) in nid_pairs:
node1 = model.nodes[nid1]
node2 = model.nodes[nid2]
xyz1 = node1.get_position()
xyz2 = node2.get_position()
distance = norm(xyz1 - xyz2)
#print(' irow=%s->n1=%s icol=%s->n2=%s' % (irow, nid1, icol, nid2))
if distance > tol:
#print(' *n1=%-4s xyz=%s\n *n2=%-4s xyz=%s\n *distance=%s\n' % (
#nid1, xyz1.tolist(),
#nid2, xyz2.tolist(),
#distance))
continue
assert nid1 in all_node_set, 'nid1=%s all_node_set=%s' % (nid1, all_node_set)
assert nid2 in all_node_set, 'nid2=%s all_node_set=%s' % (nid2, all_node_set)
#print(' n1=%-4s xyz=%s\n n2=%-4s xyz=%s\n distance=%s\n' % (
#nid1, str(node1.xyz),
#nid2, str(node2.xyz),
#distance))
# if hasattr(node2, 'new_node_id'):
# else:
#if xyz1[1] < 1.0:
#print(' *n1=%-4s xyz=%s\n *n2=%-4s xyz=%s\n *distance=%s\n' % (
#nid1, xyz1.tolist(),
#nid2, xyz2.tolist(),
#distance))
_update_grid(node1, node2)
# node2.new_nid = node1.nid
#skip_nodes.append(nid2)
return
def _update_grid(node1: GRID, node2: GRID):
"""helper method for _eq_nodes_final"""
node2.nid = node1.nid
node2.xyz = node1.xyz
node2.cp = node1.cp
assert node2.cd == node1.cd
assert node2.ps == node1.ps
assert node2.seid == node1.seid
#node1.cp_ref = None
#node2.cp_ref = None
def _nodes_xyz_nids_to_nid_pairs(nodes_xyz: NDArrayN3float,
nids: NDArrayNint,
all_node_set: NDArrayNint,
tol: float,
log: SimpleLogger,
inew: NDArrayNint,
node_set: Optional[NDArrayNint]=None,
neq_max: int=4, method: str='new',
debug: bool=False) -> List[Tuple[int, int]]:
"""
Helper for equivalencing
Returns
-------
nid_pairs : List[Tuple[int, int]]
a series of (nid1, nid2) pairs
"""
if tol < 0.0:
return []
unused_kdt, nid_pairs = _eq_nodes_build_tree(
nodes_xyz, nids, all_node_set,
tol, log,
inew=inew, node_set=node_set,
neq_max=neq_max, method=method, debug=debug)
return nid_pairs
def _nodes_xyz_nids_to_nid_pairs_new(kdt: scipy.spatial.cKDTree,
nids: NDArrayNint,
all_node_set: NDArrayNint,
node_set: Optional[NDArrayNint], tol: float):
"""
helper function for `bdf_equivalence_nodes`
"""
ieq3 = kdt.query_ball_tree(kdt, tol)
nid_pairs = []
if node_set is None:
for pair in ieq3:
if len(pair) == 1:
continue
# the combinations should be paired with 2 nodes in each group
for inid1, inid2 in combinations(pair, 2):
nid1 = nids[inid1]
nid2 = nids[inid2]
pair = (nid1, nid2)
if pair in nid_pairs:
continue
nid_pairs.append(pair)
return nid_pairs
nsets = len(node_set)
#if nsets > 1:
#warnings.warn('multiple node_sets not handled in _nodes_xyz_nids_to_nid_pairs_new')
for pair in ieq3:
if len(pair) == 1:
continue
# the combinations should be paired with 2 nodes in each group
for inid1, inid2 in combinations(pair, 2):
nid1 = nids[inid1]
nid2 = nids[inid2]
#if nid1 == nid2:
#continue
if nid1 not in all_node_set and nid2 not in all_node_set:
continue
pair = (nid1, nid2)
if pair in nid_pairs:
continue
nid_pairs.append(pair)
if nsets > 1:
# nid_pairs was simply the set of all potential connections
# now we filter connections that aren't part of an explicit set
nid_pairs2 = []
for pair in nid_pairs:
for seti in node_set:
nid1, nid2 = pair
if nid1 in seti and nid2 in seti:
nid_pairs2.append(pair)
break
return nid_pairs2
return nid_pairs
def _eq_nodes_build_tree(nodes_xyz: NDArrayN3float,
nids: NDArrayNint,
all_node_set: NDArrayNint,
tol: float,
log: SimpleLogger,
inew=None,
node_set: Optional[NDArrayNint]=None,
neq_max: int=4, method: str='new', msg: str='',
debug: bool=False) -> Tuple[scipy.spatial.cKDTree,
List[Tuple[int, int]]]:
"""
helper function for `bdf_equivalence_nodes`
Parameters
----------
nodes_xyz : (nnodes, 3) float ndarray
the xyzs to equivalence
nids : (nnodes,) int ndarray
the node ids
tol : float
the spherical equivalence tolerance
inew : int ndarray; default=None -> slice(None)
a slice on nodes_xyz to exclude some nodes from the equivalencing
node_set : List[int] / (n, ) ndarray; default=None
the list/array of nodes to consider
neq_max : int; default=4
the number of nodes to consider for equivalencing
msg : str; default=''
custom message used for errors
Returns
-------
kdt : cKDTree()
the kdtree object
nid_pairs : List[Tuple[int, int]]
a series of (nid1, nid2) pairs
"""
nnodes = len(nids)
if inew is None:
inew = slice(None)
assert isinstance(tol, float), 'tol=%r' % tol
kdt = _get_tree(nodes_xyz, msg=msg)
is_not_node_set = inew is None or inew == slice(None)
# check the closest 10 nodes for equality
if method == 'new' and is_not_node_set:
kdt, nid_pairs = _eq_nodes_build_tree_new(
kdt, nodes_xyz,
nids, all_node_set,
nnodes, is_not_node_set,
tol, log,
inew=inew, node_set=node_set, neq_max=neq_max, msg=msg,
debug=debug)
else:
if method == 'new':
log.warning(f'setting method to "old" because node_set is specified')
#ieq : (nnodes, neq) int ndarray
# the node indices that are close
#slots : (nnodes, neq) int ndarray
# the location of where
deq, ieq = kdt.query(nodes_xyz[inew, :], k=neq_max, distance_upper_bound=tol)
if node_set is not None:
assert len(deq) == len(nids)
# get the ids of the duplicate nodes
slots = np.where(ieq[:, :] < nnodes)
if ieq[:, -1].max() == nnodes:
log.warning(f'neq_max={neq_max} and should be increased')
nid_pairs = _eq_nodes_find_pairs(nids, slots, ieq, log,
all_node_set, node_set=node_set)
assert isinstance(nid_pairs, list), nid_pairs
return kdt, nid_pairs
def _eq_nodes_build_tree_new(kdt: scipy.spatial.cKDTree,
nodes_xyz: NDArrayN3float,
nids: NDArrayNint,
all_node_set: NDArrayNint,
nnodes: int,
is_not_node_set: bool,
tol: float,
log: SimpleLogger,
inew=None, node_set=None, neq_max: int=4, msg: str='',
debug: float=False) -> Tuple[Any, List[Tuple[int, int]]]:
assert isinstance(nnodes, int), nnodes
deq, ieq = kdt.query(nodes_xyz[inew, :], k=neq_max, distance_upper_bound=tol)
slots = np.where(ieq[:, :] < nnodes)
nid_pairs_expected = _eq_nodes_find_pairs(nids, slots, ieq, log, all_node_set, node_set=node_set)
if is_not_node_set:
nid_pairs = _nodes_xyz_nids_to_nid_pairs_new(kdt, nids, all_node_set,
node_set, tol)
else:
raise NotImplementedError(f'node_set = {node_set}')
snid_pairs = set(nid_pairs)
snid_pairs_expected = set(nid_pairs_expected)
diff_bad = snid_pairs - snid_pairs_expected
diff_missed = snid_pairs - snid_pairs_expected
if debug and len(diff_bad) or len(diff_missed): # pragma: no cover
#log.warning(f'nid_pairs = {nid_pairs}')
#log.warning(f'nid_pairs_expected = {nid_pairs_expected}')
log.warning(f'diff_bad = {diff_bad}')
log.warning(f'diff_missed = {diff_missed}')
return kdt, nid_pairs
def _get_tree(nodes_xyz: NDArrayN3float, msg: str='') -> scipy.spatial.cKDTree:
"""gets the kdtree"""
assert isinstance(nodes_xyz, np.ndarray), type(nodes_xyz)
assert nodes_xyz.shape[0] > 0, 'nnodes=0%s' % msg
# build the kdtree
try:
kdt = scipy.spatial.cKDTree(nodes_xyz)
except RuntimeError:
print(nodes_xyz)
raise RuntimeError(nodes_xyz)
return kdt
|
{"/pyNastran/bdf/cards/base_card.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/qt_files/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized/bdf.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/zona.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/dynamic_loads.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/utils/version.py": ["/pyNastran/__init__.py"], "/pyNastran/op2/tables/oes_stressStrain/oes.py": ["/pyNastran/op2/tables/oes_stressStrain/real/oes_plates.py"], "/pyNastran/op4/test/test_op4.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/cards/loads/static_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/converters/tecplot/tecplot.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_loader.py": ["/pyNastran/bdf/bdf_interface/hdf5_exporter.py"], "/pyNastran/gui/qt_files/gui_attributes.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/qt_files/load_actions.py", "/pyNastran/bdf/cards/base_card.py", "/pyNastran/utils/__init__.py"], "/pyNastran/dev/bdf_vectorized2/test/test_bdf.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/utils/__init__.py": ["/pyNastran/__init__.py"], "/pyNastran/bdf/bdf_interface/attributes.py": ["/pyNastran/utils/__init__.py", "/pyNastran/bdf/cards/aero/zona.py"], "/pyNastran/op2/op2_geom.py": ["/pyNastran/op2/tables/geom/edt.py"], "/pyNastran/converters/nastran/gui/nastran_io.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/cards/aero/zona.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/gui/gui_objects/settings.py"], "/pyNastran/bdf/cards/superelements.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/gui/gui_objects/settings.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/op2/test/test_op2.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/loads.py": ["/pyNastran/bdf/cards/loads/static_loads.py"], "/pyNastran/gui/dev/gui2/load_actions.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/bdf/bdf_interface/hdf5_exporter.py": ["/pyNastran/utils/__init__.py"], "/pyNastran/gui/gui_common.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/mesh_utils/test/test_cutting_plane.py": ["/pyNastran/__init__.py", "/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/loads/dloads.py": ["/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/mesh_utils/utils.py": ["/pyNastran/__init__.py", "/pyNastran/bdf/mesh_utils/bdf_equivalence.py"], "/pyNastran/bdf/cards/deqatn.py": ["/pyNastran/bdf/cards/base_card.py"], "/pyNastran/gui/dev/gui2/gui2.py": ["/pyNastran/__init__.py", "/pyNastran/gui/gui_objects/settings.py", "/pyNastran/gui/dev/gui2/load_actions.py"], "/pyNastran/op2/op2_interface/op2_scalar.py": ["/pyNastran/__init__.py", "/pyNastran/utils/__init__.py"], "/pyNastran/bdf/cards/constraints.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/op2/tables/geom/edt.py": ["/pyNastran/op2/op2_geom.py"], "/pyNastran/bdf/cards/bdf_sets.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"], "/pyNastran/bdf/cards/aero/dynamic_loads.py": ["/pyNastran/bdf/cards/base_card.py", "/pyNastran/bdf/bdf_interface/assign_type.py"]}
|
33,010,135
|
wdssmq/userscript
|
refs/heads/main
|
/bin/gm_md_build.py
|
""" 读取和处理脚本文件 """
import os
import re
import datetime
from bin.base import fnGetFilesInDir2, fnLog
md_head_tpl = """---
title: {title}
description: {description}
pubDate: {pubDate}
updateDate: {updateDate}
# heroImage: {heroImage}
gitUrl: {gitUrl}
gitUrlRaw: {gitUrlRaw}
cdnUrl: {cdnUrl}
docUrl: {docUrl}
tags: []
---\n"""
def gm_read_js(file_js, file_name):
"""读取脚本文件"""
with open(file_js, "r", encoding="UTF-8") as f:
con_js = f.read()
name = re.findall(r"@name\s+([^\n]+)", con_js)
desc = re.findall(r"@description\s+([^\n]+)", con_js)
# name[0] = re.sub(r"(\[|\])", r"\\\1", name[0])
gm_info = {
"name": name[0],
"desc": desc[0],
"file_gm": file_name.replace(".user.js", ""),
"file_full": file_name,
"body": "",
}
return gm_info
def gm_read_doc(file_doc):
"""读取脚本介绍文件"""
with open(file_doc, "r", encoding="UTF-8") as f:
con_md = f.read()
return con_md
def gm_build_link(branch, gm_info):
"""拼接脚本链接"""
file_full = gm_info["file_full"]
git_url = f"https://github.com/wdssmq/userscript/blob/{branch}/dist/{file_full}"
git_url_raw = f"{git_url}?raw=true"
cnd_url = f"https://cdn.jsdelivr.net/gh/wdssmq/userscript@{branch}/dist/{file_full}"
doc_url = f"https://github.com/wdssmq/userscript/tree/main/packages/{gm_info['file_gm']}#readme"
return {
"gitUrl": git_url,
"gitUrlRaw": git_url_raw,
"cdnUrl": cnd_url,
"docUrl": doc_url,
}
# 获取当前时间
now_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
def gm_md_time(gm_md_file):
"""设置发布/更新时间"""
pub_time = now_time
up_time = now_time
if os.path.exists(gm_md_file):
with open(gm_md_file, "r", encoding="UTF-8") as f:
con_md = f.read()
pub_time = re.findall(r"pubDate: ([^\n]+)", con_md)[0]
return (pub_time, up_time)
def gm_read_dist(path, changed=[]):
"""读取脚本文件夹"""
gm_info_list = []
for file_name in fnGetFilesInDir2(path, ".js"):
file_path = os.path.join(path, file_name)
# 判断是否发生修改
if changed and file_path not in changed:
fnLog(f"跳过:{file_path}")
continue
gm_info_list.append(gm_read_js(file_path, file_name))
# fnLog(gm_info_list)
return gm_info_list
def gm_md_build(gob_config):
"""生成脚本介绍文件"""
gm_info_list = gm_read_dist(gob_config["gm_dist_path"], gob_config["changed"])
for gm_info in gm_info_list:
# gm_doc_path = os.path.join(gob_config["gm_src_path"], gm_info["file"], "README.md")
# if os.path.exists(gm_doc_path):
# gm_doc_con = gm_read_doc(gm_doc_path)
# gm_info["body"] = gm_doc_con
# gm_doc = gm_read_doc(os.path.join(gob_config["gm_src_path"], gm_info["file"] + ".md"))
# ---------------
# 拼接 md 文件路径
gm_md_file = os.path.join(gob_config["gm_md_path"], gm_info["file_gm"] + ".md")
# 获取发布/更新时间
(pub_time, up_time) = gm_md_time(gm_md_file)
# fnLog(pub_time)
# fnLog(up_time)
# GM_脚本 链接拼接
gm_link_info = gm_build_link("main", gm_info)
# 拼接 md 文件内容
gm_md = md_head_tpl.format(
title=gm_info["name"],
description=gm_info["desc"],
pubDate=pub_time,
updateDate=up_time,
heroImage='""',
**gm_link_info,
)
gm_md += "\n"
gm_md += gm_info["body"]
with open(gm_md_file, "w", encoding="UTF-8") as f:
f.write(gm_md)
|
{"/bin/gm_md_build.py": ["/bin/base.py"], "/main.py": ["/bin/base.py", "/bin/gm_md_build.py"]}
|
33,010,136
|
wdssmq/userscript
|
refs/heads/main
|
/main.py
|
""" 项目入口文件 """
import os
from bin.base import fnLog
from bin.gm_md_build import gm_md_build
# 全局变量
gob_config = {
# README.md
"readme_file": os.path.join(os.getcwd(), "README.md"),
# 脚本文件夹
"gm_dist_path": os.path.join(os.getcwd(), "dist"),
# 脚本介绍输出路径
"gm_md_path": os.path.join(os.getcwd(), "site-astro", "src", "content", "gm_md"),
# 脚本源码路径,用于读取 README.md
"gm_src_path": os.path.join(os.getcwd(), "packages"),
# 发生修改的文件列表
"changed": [],
}
def fnInit():
"""初始化"""
# 读取环境变量
try:
if os.environ["GIT_CHANGED_FILES"]:
# 字符串转列表,去除首尾空格,空格分割
gob_config["changed"] = os.environ["GIT_CHANGED_FILES"].strip().split(" ")
except KeyError:
# 未设置环境变量
pass
# 遍历修改文件列表,拼接绝对路径
for i in range(len(gob_config["changed"])):
gob_config["changed"][i] = os.path.join(os.getcwd(), gob_config["changed"][i])
# 主函数定义
def fnMain():
"""主函数"""
gm_md_build(gob_config)
# 调用初始化函数
fnInit()
# 调用主函数
fnMain()
|
{"/bin/gm_md_build.py": ["/bin/base.py"], "/main.py": ["/bin/base.py", "/bin/gm_md_build.py"]}
|
33,010,137
|
wdssmq/userscript
|
refs/heads/main
|
/bin/base.py
|
""" 通用函数封装 """
import os
import time
# pylint: disable=invalid-name
def fnEmpty(arg):
""" 占位空函数,用来应对 unused-variable """
# 返回 arg 的值
return arg
# 占位空函数,用来应对 unused-variable
def fnLog(msg="", tip=None, log_type=""):
""" 输出信息 """
if not tip is None:
tip = f" ← {tip}"
else:
tip = ""
if isinstance(msg, list):
rlt = ""
for x in msg:
if not any(rlt):
rlt += str(x)
else:
rlt = rlt + "," + str(x)
msg = rlt
if isinstance(msg, int):
msg = str(msg)
if not any(msg) and not any(tip):
print("")
else:
print(f"_{log_type}{msg}{tip}")
# 输出信息
def fnBug(msg, tip=None, debug=True):
""" 调试信息输出 """
if debug:
fnLog(msg, tip, "[debug]")
# 调试信息输出
def fnErr(msg, tip=None):
""" 错误信息 """
fnLog(msg, tip, "_[err]")
# 错误信息
def fnGetTimeStr(time_stamp):
""" 时间戳转换 """
return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time_stamp))
# 时间戳转换
def fnGetDirsInDir(path):
""" 获取子文件夹 """
return [x for x in os.listdir(path) if os.path.isdir(x)]
# 获取子文件夹
def fnGetFilesInDir(path):
""" 获取文件夹中的文件 """
return [x for x in os.listdir(path) if not os.path.isdir(x)]
# 获取文件夹中的文件
def fnGetFilesInDir2(path, ext):
""" 获取指定后缀的文件 """
return [x for x in os.listdir(path) if not os.path.isdir(x) and os.path.splitext(x)[1] == ext]
# 获取指定后缀的文件
def fnGetFileTime(file):
""" 获取文件时间 """
mtime = os.stat(file).st_mtime # 文件的修改时间
ctime = os.stat(file).st_ctime # 文件的创建时间
return (int(mtime), int(ctime))
# 获取文件时间
|
{"/bin/gm_md_build.py": ["/bin/base.py"], "/main.py": ["/bin/base.py", "/bin/gm_md_build.py"]}
|
33,020,980
|
vshaladhav97/employee_management
|
refs/heads/master
|
/emp_manage/emp/admin.py
|
from django.contrib import admin
from .models import Employees, AddressDetails
# Register your models here.
admin.site.register(Employees)
# @admin.register(Employees)
# class EmployeesAdmin(admin.ModelAdmin):
# def has_change_permission(self, request, obj=None):
# return False
# def has_delete_permission(self, request, obj=None):
# return False
# # to disable view and add you can do this
# def has_view_permission(self, request, obj=None):
# return True
# def has_add_permission(self, request):
# return False
# admin.site.register(Documents)
admin.site.register(AddressDetails)
# admin.site.register(EmployeeStatus)
# admin.site.register(Roles)
# admin.site.register(DocumentVersions)
# admin.site.register(EmployeeDocument)
# admin.site.register(DocumentFolder)
|
{"/emp_manage/emp/views.py": ["/emp_manage/emp/serializers.py"]}
|
33,020,981
|
vshaladhav97/employee_management
|
refs/heads/master
|
/emp_manage/emp/views.py
|
from django.shortcuts import render, HttpResponseRedirect, redirect
from django.http.response import Http404, JsonResponse
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.auth import authenticate, login, logout
from .models import Employees, AddressDetails
from rest_framework.views import APIView
from .serializers import AddressDetailsSerializer, GetEmployeesSerializer, AddressDetailupdateSerializer, EmployeesSerializer1
from django.contrib import messages
from .forms import SignUpForm
from rest_framework import status
from rest_framework.response import Response
from django.contrib.auth.decorators import login_required
from .decorators import unauthenticated_user, allowed_users
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.models import Group, Permission
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import Permission
# Create your views here.
# signup function
@unauthenticated_user
def sign_up(request):
"""This function is used to perform sign up of user and assign group to the user."""
form = SignUpForm(request.POST)
if request.method == "POST":
if form.is_valid():
user = form.save()
username = form.cleaned_data.get('username')
group = Group.objects.get(name='employee')
user.groups.add(group)
messages.success(request, 'Account was created for ' + username)
return HttpResponseRedirect('/')
else:
form = SignUpForm()
return render(request, 'enroll/signup.html', {'form': form})
# login function
@unauthenticated_user
def user_login(request):
"""This function is used to perform login of existing user."""
if request.method == 'POST':
fm = AuthenticationForm(request=request, data=request.POST)
if fm.is_valid():
uname = fm.cleaned_data['username']
upass = fm.cleaned_data['password']
user = authenticate(username=uname, password=upass)
if user is not None:
login(request, user)
group_permissions = list(Permission.objects.filter(
group__user=request.user).values("codename"))
perm = []
for group_permission in group_permissions:
perm.append(group_permission["codename"])
return JsonResponse({'perm': perm}, status=200)
else:
messages.info(request, 'Username OR password is incorrect')
else:
fm = AuthenticationForm()
return render(request, 'enroll/userlogin.html', {'form': fm})
# logout function
def logoutUser(request):
"""This logoutUser() function is used to perform logout of existing user."""
logout(request)
return redirect('/login/')
class Management(APIView):
"""This class is used to manage user data."""
def get(self, request):
"""Get tha Employees data."""
employee = Employees.objects.all()
serializer = GetEmployeesSerializer(employee, many=True)
return Response(serializer.data)
# permissions for user and admin.
@method_decorator(login_required(login_url='login'), name='dispatch')
@method_decorator(allowed_users(allowed_roles=['admin']), name='dispatch')
# for posting data to data base.
def post(self, request):
"""Post the Employees data."""
json_data = request.data
details = {"employees": [{"first_name": json_data["first_name"], "last_name":json_data["last_name"], "username": json_data["username"],
"date_of_birth":json_data["date_of_birth"], "gender": json_data["gender"], "email_address":json_data["email_address"], "contact_number":json_data["contact_number"], "deleted": json_data["deleted"]}], "address_line_1": json_data["address_line_1"], "address_line_2": json_data["address_line_2"], "city": json_data["city"], "country": json_data["country"], "pincode": json_data["pincode"]}
address = AddressDetailsSerializer(data=details)
if address.is_valid():
address.save()
return Response(status=200)
else:
return Response(status=400)
class ManagementDetails(APIView):
"""This class is used to perform getting data and delete row in database through rest framework."""
def get_object(self, id):
"""Get the Employees data with Id"""
try:
return Employees.objects.get(id=id)
except Employees.DoesNotExist:
raise Http404
def get(self, request, id):
addressdetails = self.get_object(id)
serializer1 = GetEmployeesSerializer(addressdetails)
return Response(serializer1.data)
@method_decorator(login_required(login_url='login'), name='dispatch')
@method_decorator(allowed_users(allowed_roles=['admin']), name='dispatch')
def put(self, request, id):
"""update the Employees Data by Employees Id."""
json_data = request.data
address_data = AddressDetails.objects.get(id=id)
address_details_data = {
"id": json_data["id"],
"address_line_1": json_data["address_line_1"],
"address_line_2": json_data["address_line_2"],
"city": json_data["city"],
"country": json_data["country"],
"pincode": json_data["pincode"]
}
addressserializer = AddressDetailupdateSerializer(
data=address_details_data, instance=address_data)
emp_data = Employees.objects.get(id=id)
emp_details_data = {
"id": json_data["id"],
"first_name": json_data["first_name"],
"last_name": json_data["last_name"],
"username": json_data["username"],
"date_of_birth": json_data["date_of_birth"],
"gender": json_data["gender"],
"email_address": json_data["email_address"],
"contact_number": json_data["contact_number"],
"addressdetails": json_data["addressdetails"],
"deleted": json_data["deleted"]
}
empserializer = EmployeesSerializer1(
data=emp_details_data, instance=emp_data)
if addressserializer.is_valid() and empserializer.is_valid():
addressserializer.save()
empserializer.save()
return Response(addressserializer.data and empserializer.data)
return Response(addressserializer.errors and empserializer.errors, status=status.HTTP_400_BAD_REQUEST)
from django.views.decorators.csrf import csrf_exempt
@method_decorator(login_required(login_url='login'), name='dispatch')
@method_decorator(allowed_users(allowed_roles=['admin']), name='dispatch')
@csrf_exempt
def delete(self, request, id): # to delete record from table
"""This Function is used to delete Employees."""
addressdetails = AddressDetails.objects.get(id=id)
addressdetails.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
def clients1(request):
"""This function is used to redirect to template to add employees."""
current_user = request.user
user_name = current_user.username
context = {'user_name': user_name}
return render(request, "enroll/addemp.html", context)
def clients2(request):
"""This function is used to redirect to template to show employees."""
current_user = request.user
user_name = current_user.username
context = {'user_name': user_name}
return render(request, "enroll/showemp.html", context)
def clients3(request):
return render(request, "enroll/update.html")
def save_data_test(request, id):
"""This is used to perform updating row in database through rest framework."""
if request.method == "POST":
json_data = request.POST
address = AddressDetails.objects.get(id=id)
data = {
"id": json_data["id"],
"address_line_1": json_data["address_line_1"],
"address_line_2": json_data["address_line_2"],
"city": json_data["city"],
"country": json_data["country"],
"pincode": json_data["pincode"]
}
music = AddressDetailupdateSerializer(data=data, instance=address)
employee = Employees.objects.get(id=json_data["id"])
emp_data = {
"id": json_data["id"],
"first_name": json_data["first_name"],
"last_name": json_data["last_name"],
"username": json_data["username"],
"date_of_birth": json_data["date_of_birth"],
"gender": json_data["gender"],
"email_address": json_data["email_address"],
"contact_number": json_data["contact_number"],
"deleted": json_data["deleted"]
}
empserializer = EmployeesSerializer1(data=emp_data, instance=employee)
if music.is_valid() and empserializer.is_valid():
music.save()
empserializer.save()
return JsonResponse({"success": "success"}, status=status.HTTP_201_CREATED)
return JsonResponse({"error": "error"}, status=status.HTTP_400_BAD_REQUEST)
|
{"/emp_manage/emp/views.py": ["/emp_manage/emp/serializers.py"]}
|
33,020,982
|
vshaladhav97/employee_management
|
refs/heads/master
|
/emp_manage/emp/urls.py
|
from django.contrib import admin
from django.urls import path
from django.contrib.auth.decorators import login_required
from emp.views import sign_up, user_login, Management, clients1, clients2, clients3, ManagementDetails, logoutUser, save_data_test
urlpatterns = [
path('admin/', admin.site.urls),
path('signup/', sign_up, name='signup'),
path('login/', user_login, name='login'),
path('logout/', logoutUser, name="logout"),
path('rest_client1/classproduct/', Management.as_view()),
path('',login_required(clients2), name='show'),
path('<int:id>', clients2),
path('classproduct/', login_required(Management.as_view())),
path('classproduct/<int:id>', ManagementDetails.as_view()),
path('create/', login_required(clients1)),
path('create/classproduct/', Management.as_view()),
path('create/classproduct/<int:addressdetails>', ManagementDetails.as_view()),
path('update/', clients3),
path('update/<int:id>', clients3),
path('update/classproduct/', Management.as_view()),
path('update/classproduct/<int:id>', ManagementDetails.as_view()),
path('update-employee/<int:id>',save_data_test, name='update-employee'),
path('test/',Management.as_view(), name='post'),
]
|
{"/emp_manage/emp/views.py": ["/emp_manage/emp/serializers.py"]}
|
33,020,983
|
vshaladhav97/employee_management
|
refs/heads/master
|
/emp_manage/emp/migrations/0006_auto_20210107_1639.py
|
# Generated by Django 3.1.3 on 2021-01-07 11:09
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('emp', '0005_album_musician'),
]
operations = [
migrations.DeleteModel(
name='Album',
),
migrations.DeleteModel(
name='Musician',
),
]
|
{"/emp_manage/emp/views.py": ["/emp_manage/emp/serializers.py"]}
|
33,020,984
|
vshaladhav97/employee_management
|
refs/heads/master
|
/emp_manage/emp/serializers.py
|
from rest_framework import serializers
from .models import Employees, AddressDetails
class EmployeesSerializer(serializers.ModelSerializer):
"""This class is used to integrate Employees fields."""
class Meta:
model = Employees
fields = ('first_name', 'last_name', 'username', 'date_of_birth', 'gender',
'email_address', 'contact_number', 'deleted', 'addressdetails')
class GetEmployeesSerializer(serializers.ModelSerializer):
"""This class is used to integrate Employees fields to display addressdetails fields."""
addressdetails__address_line_1 = serializers.ReadOnlyField(
source='addressdetails.address_line_1')
addressdetails__address_line_2 = serializers.ReadOnlyField(
source='addressdetails.address_line_2')
addressdetails__city = serializers.ReadOnlyField(
source='addressdetails.city')
addressdetails__country = serializers.ReadOnlyField(
source='addressdetails.country')
addressdetails__pincode = serializers.ReadOnlyField(
source='addressdetails.pincode')
class Meta:
model = Employees
read_only_fields = ('id', 'addressdetails__address_line_1', 'addressdetails__address_line_2',
'addressdetails__city', 'addressdetails__country', 'addressdetails__pincode')
fields = ('id', 'first_name', 'last_name', 'username', 'date_of_birth', 'gender', 'email_address', 'contact_number', 'deleted', 'addressdetails',
'addressdetails__address_line_1', 'addressdetails__address_line_2', 'addressdetails__city', 'addressdetails__country', 'addressdetails__pincode')
class GetEmployeesSerializer1(serializers.ModelSerializer):
"""This class is used to integrate Employees fields to display addressdetails fields."""
addressdetails__address_line_1 = serializers.ReadOnlyField(
source='addressdetails.address_line_1')
addressdetails__address_line_2 = serializers.ReadOnlyField(
source='addressdetails.address_line_2')
addressdetails__city = serializers.ReadOnlyField(
source='addressdetails.city')
addressdetails__country = serializers.ReadOnlyField(
source='addressdetails.country')
addressdetails__pincode = serializers.ReadOnlyField(
source='addressdetails.pincode')
class Meta:
model = Employees
read_only_fields = ('id', 'addressdetails__address_line_1', 'addressdetails__address_line_2',
'addressdetails__city', 'addressdetails__country', 'addressdetails__pincode')
fields = ('id', 'first_name', 'last_name', 'username', 'date_of_birth', 'gender', 'email_address', 'contact_number', 'addressdetails',
'addressdetails__address_line_1', 'addressdetails__address_line_2', 'addressdetails__city', 'addressdetails__country', 'addressdetails__pincode')
def update(self, instance, validated_data):
addressdetails = validated_data.pop('addressdetails')
instance.addressdetails__address_line_1 = addressdetails.address_line_1
instance.addressdetails__address_line_2 = addressdetails.address_line_2
instance.addressdetails__city = addressdetails.city
instance.addressdetails__country = addressdetails.country
instance.addressdetails__pincode = addressdetails.pincode
# ... plus any other fields you may want to update
return instance
class AddressDetailsSerializer(serializers.ModelSerializer):
"""This class is used to integrate Employees fields and addressdetails fields together for posting data"""
employees = EmployeesSerializer(many=True)
class Meta:
model = AddressDetails
fields = ("id", "address_line_1", "address_line_2",
"city", "country", "pincode", "employees")
def create(self, validated_data):
employees_data = validated_data.pop('employees')
addressdetails = AddressDetails.objects.create(**validated_data)
for employee_data in employees_data:
Employees.objects.create(
addressdetails=addressdetails, **employee_data)
return addressdetails
def update(self, instance, validated_data):
employees_data = validated_data.pop('employees')
employees = (instance.employees).all()
employees = list(employees)
instance.first_name = validated_data.get(
'first_name', instance.first_name)
instance.last_name = validated_data.get(
'last_name', instance.last_name)
instance.username = validated_data.get('username', instance.username)
instance.date_of_birth = validated_data.get(
'date_of_birth', instance.date_of_birth)
instance.gender = validated_data.get('gender', instance.gender)
instance.email_address = validated_data.get(
'email_address', instance.email_address)
instance.contact_number = validated_data.get(
'contact_number', instance.contact_number)
instance.deleted = validated_data.get('deleted', instance.deleted)
instance.save()
for employee_data in employees_data:
employee = employees.pop(0)
employee.address_line_1 = employee_data.get(
'address_line_1', employee.address_line_1)
employee.address_line_2 = employee_data.get(
'address_line_2', employee.address_line_2)
employee.city = employee_data.get('city', employee.city)
employee.country = employee_data.get('country', employee.country)
employee.pincode = employee_data.get('pincode', employee.pincode)
employee.save()
return instance
class EmployeesSerializer1(serializers.ModelSerializer):
"""This class is used to integrate Employees fields."""
class Meta:
model = Employees
fields = ('id', 'first_name', 'last_name', 'username', 'date_of_birth',
'gender', 'email_address', 'contact_number', 'deleted', 'addressdetails')
class AddressDetailsSerializer1(serializers.ModelSerializer):
"""This class is used to integrat addressdetails t integrate with addressdetails."""
employees_address = EmployeesSerializer1(read_only=True)
class Meta:
model = AddressDetails
fields = ("id", "address_line_1", "address_line_2", "city",
"country", "pincode", "employees_address")
def update(self, instance, validated_data):
employees_data = validated_data.pop('employees_address')
employees = (instance.employees_address).all()
employees = list(employees)
instance.address_line_1 = validated_data.get(
'address_line_1', instance.address_line_1)
instance.address_line_2 = validated_data.get(
'address_line_2', instance.address_line_2)
instance.city = validated_data.get('city', instance.city)
instance.country = validated_data.get('country', instance.country)
instance.pincode = validated_data.get('pincode', instance.pincode)
instance.save()
for employee_data in employees_data:
employee = employees.pop(0)
employee.first_name = employee_data.get(
'first_name', employee.first_name)
employee.last_name = employee_data.get(
'last_name', employee.last_name)
employee.username = employee_data.get(
'username', employee.username)
employee.date_of_birth = employee_data.get(
'date_of_birth', employee.date_of_birth)
employee.gender = employee_data.get('gender', employee.gender)
employee.email_address = employee_data.get(
'email_address', employee.email_address)
employee.contact_number = employee_data.get(
'contact_number', employee.contact_number)
employee.deleted = employee_data.get('deleted', employee.deleted)
employee.save()
return instance
class AddressDetailupdateSerializer(serializers.ModelSerializer):
"""This class is used to integrate addressdetails fields."""
class Meta:
model = AddressDetails
fields = "__all__"
|
{"/emp_manage/emp/views.py": ["/emp_manage/emp/serializers.py"]}
|
33,020,985
|
vshaladhav97/employee_management
|
refs/heads/master
|
/emp_manage/emp/migrations/0007_auto_20210113_1412.py
|
# Generated by Django 3.1.3 on 2021-01-13 08:42
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('emp', '0006_auto_20210107_1639'),
]
operations = [
migrations.RemoveField(
model_name='documentfolder',
name='documentversions',
),
migrations.RemoveField(
model_name='documentfolder',
name='parent',
),
migrations.RemoveField(
model_name='documentversions',
name='documents',
),
migrations.RemoveField(
model_name='documentversions',
name='uploaded_by',
),
migrations.RemoveField(
model_name='employeedocument',
name='documentversion',
),
migrations.RemoveField(
model_name='employeedocument',
name='employees',
),
migrations.DeleteModel(
name='EmployeeStatus',
),
migrations.DeleteModel(
name='Roles',
),
migrations.DeleteModel(
name='DocumentFolder',
),
migrations.DeleteModel(
name='Documents',
),
migrations.DeleteModel(
name='DocumentVersions',
),
migrations.DeleteModel(
name='EmployeeDocument',
),
]
|
{"/emp_manage/emp/views.py": ["/emp_manage/emp/serializers.py"]}
|
33,071,239
|
wuyue92tree/service_manager
|
HEAD
|
/service_manager/apps/Ansible/models.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from service_manager.libs.accounts.models import AccountUser
# Create your models here.
from service_manager.middleware import threadlocals
class Supplier(models.Model):
# 主机服务商
name = models.CharField(max_length=255, unique=True, verbose_name="主机服务商")
description = models.TextField(verbose_name="服务商描述")
def __unicode__(self):
return "%s" % self.name
class Meta:
verbose_name = "主机服务商配置"
verbose_name_plural = verbose_name
class Config(models.Model):
# 系统平台
PLATFORM_CHOICE = (
("LINUX", "LINUX"),
("WINDOW", "WINDOWS"),
("MAC", "MAC")
)
host = models.CharField(max_length=255, verbose_name=u"主机地址")
port = models.IntegerField(default=22, verbose_name=u"端口")
username = models.CharField(max_length=255, blank=True, null=True,
verbose_name=u"登录用户")
password = models.CharField(max_length=255, blank=True, null=True,
verbose_name=u"登录密码")
supplier = models.ForeignKey("Supplier", verbose_name=u"主机服务商")
platform = models.CharField(max_length=255, choices=PLATFORM_CHOICE,
verbose_name=u"系统平台")
owner = models.ForeignKey(AccountUser, verbose_name=u"创建者",
related_name='ansible_config')
create_time = models.DateTimeField(auto_now=True, verbose_name=u"加入时间")
def __unicode__(self):
return u"%s:%d" % (self.host, self.port)
def save(self, *args, **kwargs):
if not self.owner_id:
self.owner = threadlocals.get_current_user()
super(Config, self).save(*args, **kwargs)
class Meta:
unique_together = ('host', 'port', 'owner')
verbose_name = "主机配置"
verbose_name_plural = verbose_name
|
{"/service_manager/libs/accounts/forms.py": ["/service_manager/libs/accounts/models.py", "/service_manager/apps/Ansible/models.py"], "/service_manager/libs/accounts/views.py": ["/service_manager/libs/accounts/forms.py", "/service_manager/libs/accounts/models.py", "/service_manager/apps/Ansible/models.py"], "/service_manager/apps/Ansible/models.py": ["/service_manager/libs/accounts/models.py"], "/service_manager/libs/accounts/urls.py": ["/service_manager/libs/accounts/views.py"], "/service_manager/apps/Ansible/admin.py": ["/service_manager/apps/Ansible/models.py"]}
|
33,078,374
|
kishanshetty1991/expensewebsite
|
refs/heads/master
|
/expenses/migrations/0002_auto_20210814_1456.py
|
# Generated by Django 3.2.6 on 2021-08-14 14:56
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('expenses', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='category',
options={'verbose_name_plural': 'Categories'},
),
migrations.AlterField(
model_name='expense',
name='date',
field=models.DateField(blank=True, default=django.utils.timezone.now, null=True),
),
]
|
{"/auth_app/urls.py": ["/auth_app/views.py"], "/auth_app/views.py": ["/auth_app/utils.py", "/user_profile/models.py"], "/user_profile/views.py": ["/user_profile/models.py", "/user_profile/utils.py"], "/expense_app/admin.py": ["/expense_app/models.py"], "/income_app/api.py": ["/income_app/models.py"], "/income_app/views.py": ["/income_app/models.py", "/user_profile/models.py"], "/income_app/admin.py": ["/income_app/models.py"], "/generate_data.py": ["/expense_app/models.py", "/income_app/models.py", "/user_profile/models.py"], "/expense_project/views.py": ["/expense_app/models.py", "/income_app/models.py", "/user_profile/models.py"], "/expense_app/views.py": ["/expense_app/models.py", "/user_profile/models.py", "/expense_app/utils.py"], "/expense_app/utils.py": ["/expense_app/models.py", "/auth_app/utils.py", "/user_profile/models.py"], "/expense_app/api.py": ["/expense_app/models.py"], "/user_profile/migrations/0002_auto_20210302_1614.py": ["/user_profile/models.py"]}
|
33,078,375
|
kishanshetty1991/expensewebsite
|
refs/heads/master
|
/expenses/views.py
|
from django.shortcuts import render, redirect
from django.contrib.auth.decorators import login_required
from .models import Expense, Category
from django.contrib import messages
from django.contrib.auth.models import User
from django.core.paginator import Paginator
import json
from django.utils.timezone import localtime
from django.http import JsonResponse, HttpResponse
from userpreferences.models import UserPreference
import datetime
import csv,xlwt
from django.template.loader import render_to_string
from weasyprint import HTML
import tempfile
from django.db.models import Sum
def search_expenses(request):
if request.method == 'POST':
search_str = json.loads(request.body).get('searchText')
expenses = Expense.objects.filter(amount__istartswith=search_str,owner=request.user) | Expense.objects.filter(
date__istartswith=search_str,owner=request.user) | Expense.objects.filter(
description__icontains=search_str,owner=request.user) | Expense.objects.filter(
category__icontains=search_str,owner=request.user)
data = expenses.values()
return JsonResponse(list(data), safe=False)
# Create your views here.
@login_required(login_url='/authentication/login')
def index(request):
categories = Category.objects.all()
expenses = Expense.objects.filter(owner=request.user)
paginator = Paginator(expenses, 2)
page_number = request.GET.get('page')
page_obj = Paginator.get_page(paginator, page_number)
if UserPreference.objects.filter(user = request.user).exists():
currency = UserPreference.objects.get(user = request.user).currency
else:
currency = 'INR - Indian Rupee'
context = {
'expenses': expenses,
'page_obj': page_obj,
'currency': currency
}
return render(request, 'expenses/index.html', context)
@login_required(login_url='/authentication/login')
def add_expense(request):
categories = Category.objects.all()
context = {
'categories': categories,
'values': request.POST
}
if request.method == 'GET':
return render(request, 'expenses/add_expense.html', context)
if request.method == 'POST':
amount = request.POST['amount']
if not amount:
messages.error(request, "Amount is required")
return render(request,'expenses/add_expense.html', context)
description = request.POST['description']
date = request.POST['expense_date']
category = request.POST['category']
if not description:
messages.error(request, "Description is required")
return render(request,'expenses/add_expense.html', context)
if category == '':
messages.error(request,'ExpenseCategory cannot be empty')
return render(request,'expenses/add_expense.html', context)
if date == '':
date = localtime()
Expense.objects.create(owner=request.user, amount=amount, date=date, category=category, description=description)
messages.success(request, 'Expense saved successfully')
return redirect('expenses')
@login_required(login_url='/authentication/login')
def expense_edit(request, id):
expense = Expense.objects.get(pk=id)
categories = Category.objects.all()
context = {
'expense': expense,
'values': expense,
'categories': categories
}
if request.method == 'GET':
return render(request, 'expenses/edit-expense.html', context)
if request.method == 'POST':
amount = request.POST['amount']
if not amount:
messages.error(request, "Amount is required")
return render(request,'expenses/edit-expense.html', context)
description = request.POST['description']
date = request.POST['expense_date']
category = request.POST['category']
if not description:
messages.error(request, "Description is required")
return render(request,'expenses/edit-expense.html', context)
expense.owner=request.user
expense.amount=amount
expense.date=date
expense.category=category
expense.description=description
expense.save()
messages.success(request, 'Expense updated successfully')
return redirect('expenses')
messages.info(request, 'Handling post form')
return render(request, 'expenses/edit-expense.html', context)
def delete_expense(request, id):
expense = Expense.objects.get(pk=id)
expense.delete()
messages.success(request, 'Expense Removed')
return redirect('expenses')
def expense_category_summary(request):
todays_date = datetime.date.today()
six_months_ago = todays_date - datetime.timedelta(days = 30*6)
expenses = Expense.objects.filter(owner=request.user, date__gte = six_months_ago, date__lte=todays_date)
finalrep = {}
def get_category(expense):
return expense.category
def get_expense_category_amount(category):
amount = 0
filtered_by_category = expenses.filter(category=category)
for item in filtered_by_category:
amount += item.amount
return amount
category_list= list(set(map(get_category, expenses)))
for x in expenses:
for y in category_list:
finalrep[y] = get_expense_category_amount(y)
return JsonResponse({'expense_category_data': finalrep}, safe=False)
def stats_view(request):
return render(request, 'expenses/stats.html')
def export_csv(request):
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=Expenses'+ \
str(datetime.datetime.now())+'.csv'
writer = csv.writer(response)
writer.writerow(['Amount','Description','Category','Date'])
expenses = Expense.objects.filter(owner=request.user)
for expense in expenses:
writer.writerow([expense.amount, expense.description, expense.category, expense.date])
return response
def export_excel(request):
response = HttpResponse(content_type='application/ms-excel')
response['Content-Disposition'] = 'attachment; filename=Expenses'+ \
str(datetime.datetime.now())+'.xls'
wb = xlwt.Workbook(encoding = 'utf-8')
ws = wb.add_sheet('Expenses')
row_num = 0
font_style = xlwt.XFStyle()
font_style.font.bold = True
columns = ['Amount','Description','Category','Date']
for col_num in range(len(columns)):
ws.write(row_num, col_num, columns[col_num], font_style)
font_style = xlwt.XFStyle()
rows = Expense.objects.filter(owner=request.user).values_list('amount', 'description', 'category', 'date')
for row in rows:
row_num += 1
for col_num in range(len(row)):
ws.write(row_num, col_num, str(row[col_num]), font_style)
wb.save(response)
return response
def export_pdf(request):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'inline; attachment; filename=Expenses'+ \
str(datetime.datetime.now())+'.pdf'
response['Content-Transfer-Encoding'] = 'binary'
expenses = Expense.objects.filter(owner=request.user)
total_sum = expenses.aggregate(Sum('amount'))
html_string = render_to_string('expenses/pdf-output.html', {'expenses': expenses, 'total': total_sum['amount__sum']})
html = HTML(string = html_string)
result = html.write_pdf()
with tempfile.NamedTemporaryFile(delete=True) as output:
output.write(result)
output.flush()
output=open(output.name, 'rb')
response.write(output.read())
return response
|
{"/auth_app/urls.py": ["/auth_app/views.py"], "/auth_app/views.py": ["/auth_app/utils.py", "/user_profile/models.py"], "/user_profile/views.py": ["/user_profile/models.py", "/user_profile/utils.py"], "/expense_app/admin.py": ["/expense_app/models.py"], "/income_app/api.py": ["/income_app/models.py"], "/income_app/views.py": ["/income_app/models.py", "/user_profile/models.py"], "/income_app/admin.py": ["/income_app/models.py"], "/generate_data.py": ["/expense_app/models.py", "/income_app/models.py", "/user_profile/models.py"], "/expense_project/views.py": ["/expense_app/models.py", "/income_app/models.py", "/user_profile/models.py"], "/expense_app/views.py": ["/expense_app/models.py", "/user_profile/models.py", "/expense_app/utils.py"], "/expense_app/utils.py": ["/expense_app/models.py", "/auth_app/utils.py", "/user_profile/models.py"], "/expense_app/api.py": ["/expense_app/models.py"], "/user_profile/migrations/0002_auto_20210302_1614.py": ["/user_profile/models.py"]}
|
33,083,049
|
ZaidKaraymeh/Essay.io
|
refs/heads/master
|
/blog/migrations/0003_auto_20210719_1711.py
|
# Generated by Django 3.2.5 on 2021-07-19 14:11
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('blog', '0002_alter_essay_essay'),
]
operations = [
migrations.RenameField(
model_name='essay',
old_name='essay',
new_name='content',
),
migrations.RenameField(
model_name='essay',
old_name='essayTitle',
new_name='title',
),
]
|
{"/blog/urls.py": ["/blog/views.py"], "/blog/views.py": ["/blog/models.py", "/blog/forms.py"], "/blog/forms.py": ["/blog/models.py"]}
|
33,083,050
|
ZaidKaraymeh/Essay.io
|
refs/heads/master
|
/users/migrations/0007_alter_profile_image.py
|
# Generated by Django 3.2.5 on 2021-07-21 15:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0006_auto_20210721_1857'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='image',
field=models.ImageField(default='default.png', height_field='100px', upload_to='profile_pics', width_field='100px'),
),
]
|
{"/blog/urls.py": ["/blog/views.py"], "/blog/views.py": ["/blog/models.py", "/blog/forms.py"], "/blog/forms.py": ["/blog/models.py"]}
|
33,083,051
|
ZaidKaraymeh/Essay.io
|
refs/heads/master
|
/blog/models.py
|
from django.db import models
from django.contrib.auth.models import User
from django.urls import reverse
from ckeditor.fields import RichTextField
class Essay(models.Model):
author = models.ForeignKey(User, on_delete=models.CASCADE, null=True)
title = models.CharField(max_length=150)
content = RichTextField(blank=True, null=True)
def __str__(self):
return self.title
# def get_absolute_url(self):
# return reverse("blog-paper", kwargs={"id":str(self.id)})
|
{"/blog/urls.py": ["/blog/views.py"], "/blog/views.py": ["/blog/models.py", "/blog/forms.py"], "/blog/forms.py": ["/blog/models.py"]}
|
33,083,052
|
ZaidKaraymeh/Essay.io
|
refs/heads/master
|
/blog/migrations/0005_alter_essay_content.py
|
# Generated by Django 3.2.5 on 2021-07-20 13:39
from django.db import migrations
import ckeditor.fields
class Migration(migrations.Migration):
dependencies = [
('blog', '0004_alter_essay_author'),
]
operations = [
migrations.AlterField(
model_name='essay',
name='content',
field=ckeditor.fields.RichTextField(blank=True, null=True),
),
]
|
{"/blog/urls.py": ["/blog/views.py"], "/blog/views.py": ["/blog/models.py", "/blog/forms.py"], "/blog/forms.py": ["/blog/models.py"]}
|
33,083,053
|
ZaidKaraymeh/Essay.io
|
refs/heads/master
|
/users/migrations/0005_auto_20210721_1857.py
|
# Generated by Django 3.2.5 on 2021-07-21 15:57
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('users', '0004_alter_profile_image'),
]
operations = [
migrations.CreateModel(
name='Instructor',
fields=[
('profile_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='users.profile')),
],
options={
'permissions': [('can_create_assignments', 'Can create assignments')],
},
bases=('users.profile',),
),
migrations.AlterField(
model_name='profile',
name='image',
field=models.ImageField(default='default.png', height_field='100', upload_to='profile_pics', width_field='100'),
),
migrations.CreateModel(
name='Student',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'permissions': [('can_post_assignments', 'Can post assignments')],
},
),
]
|
{"/blog/urls.py": ["/blog/views.py"], "/blog/views.py": ["/blog/models.py", "/blog/forms.py"], "/blog/forms.py": ["/blog/models.py"]}
|
33,083,054
|
ZaidKaraymeh/Essay.io
|
refs/heads/master
|
/blog/views.py
|
from django.shortcuts import render, redirect, get_object_or_404
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from .models import Essay
from django.contrib import messages
from .forms import EssayForm
import ast
# Create your views here.
def home(request):
return render(request, "blog/home.html", {})
@login_required
def Papers(request):
author = request.user
papersNonCleaned = Essay.objects.filter(author=author)
# reverses list
papers = [{"title": x[2], "content": x[3], "id": x[0]} for x in papersNonCleaned.values_list()]
papers = list(reversed(papers))
context = {"papers": papers }
return render(request, "blog/papers.html", context)
@login_required
def createPaper(request):
if request.method == "POST":
form = EssayForm(request.POST)
if form.is_valid():
essay = form.save(commit=False)
essay.author = request.user
form.save()
messages.success(request, "Essay Posted!")
return redirect("blog-papers")
else:
form = EssayForm()
context = {"form": form}
return render(request, "blog/create.html", context)
@login_required
def deletePaper(request, id):
paper = Essay.objects.filter(id=id)
paper.delete()
messages.info(request, "Essay Deleted!")
return redirect("blog-papers")
@login_required
def getPaper(request, id):
papersNonCleaned = Essay.objects.filter(id=id)
paper = [{"title": x[2], "content": x[3], "id": x[0]} for x in papersNonCleaned.values_list()]
context = {"paper": paper[0]}
return render(request, "blog/paper.html", context)
@login_required
def editPaper(request, id):
obj = Essay.objects.filter(id=id).first()
if request.method == "POST":
form = EssayForm(request.POST or None, instance=obj)
if form.is_valid():
form.save(commit=False)
form.save()
messages.success(request, "Essay Edited!")
return redirect("blog-paper", id=obj.id)
else:
form = EssayForm(instance=obj)
context = {"form": form, "id":id}
return render(request, "blog/edit.html", context)
def about(request):
return render(request, "blog/about.html")
|
{"/blog/urls.py": ["/blog/views.py"], "/blog/views.py": ["/blog/models.py", "/blog/forms.py"], "/blog/forms.py": ["/blog/models.py"]}
|
33,083,055
|
ZaidKaraymeh/Essay.io
|
refs/heads/master
|
/users/models.py
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
image = models.ImageField(default="default.png", upload_to = "profile_pics", editable = True, blank=True)
# image_height = models.PositiveIntegerField(null=True, blank=True, editable=False, default="100")
# image_width = models.PositiveIntegerField(null=True, blank=True, editable=False, default="100")
# def __unicode__(self):
# return "{0}".format(self.image)
# def save(self):
# if not self.image:
# return
# super(Profile, self).save()
# image = Image.open(self.photo)
# (width, height) = image.size
# size = ( 100, 100)
# image = image.resize(size, image.ANTIALIAS)
# image.save(self.photo.path)
def __str__(self):
return f"{self.user.username} Profile"
# class Student(models.Model):
# user_id = models.ForeignKey(User, on_delete=models.CASCADE)
# class Meta:
# permissions = [("can_post_assignments", "Can post assignments")]
# class Instructor(Profile):
# class Meta:
# permissions = [("can_create_assignments", "Can create assignments")]
|
{"/blog/urls.py": ["/blog/views.py"], "/blog/views.py": ["/blog/models.py", "/blog/forms.py"], "/blog/forms.py": ["/blog/models.py"]}
|
33,083,056
|
ZaidKaraymeh/Essay.io
|
refs/heads/master
|
/blog/forms.py
|
from django import forms
from .models import Essay
# creating a form
class EssayForm(forms.ModelForm):
class Meta:
model = Essay
fields = ["title", "content"]
|
{"/blog/urls.py": ["/blog/views.py"], "/blog/views.py": ["/blog/models.py", "/blog/forms.py"], "/blog/forms.py": ["/blog/models.py"]}
|
33,083,057
|
ZaidKaraymeh/Essay.io
|
refs/heads/master
|
/users/migrations/0006_auto_20210721_1857.py
|
# Generated by Django 3.2.5 on 2021-07-21 15:57
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('users', '0005_auto_20210721_1857'),
]
operations = [
migrations.RemoveField(
model_name='student',
name='user_id',
),
migrations.DeleteModel(
name='Instructor',
),
migrations.DeleteModel(
name='Student',
),
]
|
{"/blog/urls.py": ["/blog/views.py"], "/blog/views.py": ["/blog/models.py", "/blog/forms.py"], "/blog/forms.py": ["/blog/models.py"]}
|
33,083,058
|
ZaidKaraymeh/Essay.io
|
refs/heads/master
|
/blog/urls.py
|
from django.urls import path
from .views import about, createPaper, editPaper, getPaper, home, Papers, deletePaper
urlpatterns = [
path("", home, name="blog-home" ),
path("papers/", Papers, name="blog-papers"),
path("about/", about, name="blog-about"),
path("create/", createPaper, name="blog-create"),
path("paper/<int:id>/delete", deletePaper, name="blog-paper-delete"),
path("paper/<int:id>/", getPaper, name="blog-paper"),
path("paper/<int:id>/edit", editPaper, name="blog-paper-edit"),
]
|
{"/blog/urls.py": ["/blog/views.py"], "/blog/views.py": ["/blog/models.py", "/blog/forms.py"], "/blog/forms.py": ["/blog/models.py"]}
|
33,204,186
|
hayatoVTA/manga_walker
|
refs/heads/main
|
/backend/manga_walker_backend/migrations/0003_alter_storebook_category.py
|
# Generated by Django 3.2 on 2021-07-21 06:27
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('manga_walker_backend', '0002_bookcomponent_cover_img'),
]
operations = [
migrations.AlterField(
model_name='storebook',
name='category',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='category', to='manga_walker_backend.bookcomponent'),
),
]
|
{"/backend/web-back/manga_walker_back/views.py": ["/backend/web-back/manga_walker_back/models.py"], "/backend/web-back/manga_walker_back/serializers.py": ["/backend/web-back/manga_walker_back/models.py"]}
|
33,204,187
|
hayatoVTA/manga_walker
|
refs/heads/main
|
/backend/manga_walker_backend/admin.py
|
from django.contrib import admin
from .models import StoreBook, BookComponent
admin.site.register(StoreBook)
admin.site.register(BookComponent)
|
{"/backend/web-back/manga_walker_back/views.py": ["/backend/web-back/manga_walker_back/models.py"], "/backend/web-back/manga_walker_back/serializers.py": ["/backend/web-back/manga_walker_back/models.py"]}
|
33,204,188
|
hayatoVTA/manga_walker
|
refs/heads/main
|
/backend/manga_walker_backend/migrations/0004_auto_20210721_1531.py
|
# Generated by Django 3.2 on 2021-07-21 06:31
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('manga_walker_backend', '0003_alter_storebook_category'),
]
operations = [
migrations.AlterField(
model_name='bookcomponent',
name='cover_img',
field=models.ImageField(blank=True, null=True, upload_to='cover'),
),
migrations.AlterField(
model_name='storebook',
name='category',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='manga_walker_backend.bookcomponent'),
),
]
|
{"/backend/web-back/manga_walker_back/views.py": ["/backend/web-back/manga_walker_back/models.py"], "/backend/web-back/manga_walker_back/serializers.py": ["/backend/web-back/manga_walker_back/models.py"]}
|
33,204,189
|
hayatoVTA/manga_walker
|
refs/heads/main
|
/backend/manga_walker_backend/migrations/0002_bookcomponent_cover_img.py
|
# Generated by Django 3.2 on 2021-07-21 06:23
from django.db import migrations, models
import manga_walker_backend.models
class Migration(migrations.Migration):
dependencies = [
('manga_walker_backend', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='bookcomponent',
name='cover_img',
field=models.ImageField(blank=True, null=True, upload_to=manga_walker_backend.models.upload_BookCoverImage_path),
),
]
|
{"/backend/web-back/manga_walker_back/views.py": ["/backend/web-back/manga_walker_back/models.py"], "/backend/web-back/manga_walker_back/serializers.py": ["/backend/web-back/manga_walker_back/models.py"]}
|
33,204,190
|
hayatoVTA/manga_walker
|
refs/heads/main
|
/backend/manga_walker_backend/migrations/0005_alter_storebook_category.py
|
# Generated by Django 3.2 on 2021-07-23 12:01
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('manga_walker_backend', '0004_auto_20210721_1531'),
]
operations = [
migrations.AlterField(
model_name='storebook',
name='category',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='manga_walker_backend.bookcomponent'),
),
]
|
{"/backend/web-back/manga_walker_back/views.py": ["/backend/web-back/manga_walker_back/models.py"], "/backend/web-back/manga_walker_back/serializers.py": ["/backend/web-back/manga_walker_back/models.py"]}
|
33,204,191
|
hayatoVTA/manga_walker
|
refs/heads/main
|
/backend/manga_walker_backend/migrations/0001_initial.py
|
# Generated by Django 3.2 on 2021-07-21 05:50
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='BookComponent',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('created_at', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='StoreBook',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('url', models.URLField()),
('stored_at', models.DateTimeField(auto_now_add=True)),
('category', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='manga_walker_backend.bookcomponent')),
],
),
]
|
{"/backend/web-back/manga_walker_back/views.py": ["/backend/web-back/manga_walker_back/models.py"], "/backend/web-back/manga_walker_back/serializers.py": ["/backend/web-back/manga_walker_back/models.py"]}
|
33,230,403
|
viktorzhelev/dev-ops-training
|
refs/heads/master
|
/app/pages/views.py
|
from flask import render_template, current_app, request, redirect, flash
from . import pages
from boto3 import client
from dotenv import load_dotenv
load_dotenv()
sns = client('sns')
def send_sms(phone, msg):
sns.publish(
# 'TopicArn='arn:aws:sns:us-east-1:750385577863:test',
PhoneNumber=phone,
Message=msg
)
@pages.route('/', methods=['GET', 'POST'])
def index_stores():
if request.method == 'POST':
print(request.data)
phone, msg = request.form.get('phonenumber'), request.form.get('msg')
print(phone, msg)
flash(f'Successfully sent a text message to <strong>{phone}</strong>')
send_sms(phone, msg)
return redirect('/')
return render_template('index.html.jinja')
|
{"/app/pages/views.py": ["/app/pages/__init__.py"], "/app/__init__.py": ["/app/helpers/memoize.py", "/app/pages/__init__.py"]}
|
33,230,404
|
viktorzhelev/dev-ops-training
|
refs/heads/master
|
/app/helpers/memoize.py
|
import hashlib
from datetime import datetime, timedelta
from functools import wraps
from flask import current_app
import logging
class _Memoizer(object):
"""
In memory cache.
The memoize() method can be used as a decorator to a function to memoize the decorated function.
The decorated function must be called with kwargs only
TODO - work with *args too
"""
def __init__(self, expires: timedelta = timedelta(hours=1)):
self._cache_storage = {} # {"hashed_request":{"inserted_at":, "content":} }
self.expires_at = expires
def init_app(self, app):
config_expires_hours = app.config.get("MEMOIZE_EXPIRE_AFTER_HOURS")
if config_expires_hours:
self.expires_at = timedelta(hours=config_expires_hours)
@staticmethod
def _get_request_hash(**kwargs):
sorted_kwargs = sorted([(k, v) for k, v in kwargs.items()])
return hashlib.md5(str(sorted_kwargs).encode('utf-8')).hexdigest()
def is_in_cache(self, **kwargs):
"""
first will invalidate stale entries, then will check if we have a valid entry for the given kwargs
:param kwargs:
:return:
"""
request_hash = self._get_request_hash(**kwargs)
self._clean_stale()
return request_hash in self._cache_storage
def _clean_stale(self):
now = datetime.utcnow()
to_invalidate = []
for hash, item in self._cache_storage.items():
if (now - item['inserted_at']) > self.expires_at:
to_invalidate.append(hash)
_ = [self._cache_storage.pop(hash) for hash in to_invalidate]
def get_from_cache(self, **kwargs):
return self._cache_storage[self._get_request_hash(**kwargs)]['content']
def set_cache(self, content, **kwargs):
now = datetime.utcnow()
self._cache_storage[self._get_request_hash(**kwargs)] = {'inserted_at': now, 'content': content}
def memoize_decorator(self, func):
@wraps(func)
def inner(**kwargs):
inner_kwargs = {**kwargs, '__func_name': func.__name__}
if self.is_in_cache(**inner_kwargs):
logging.debug(f'cache hit for {str(inner_kwargs)[:100]}')
return self.get_from_cache(**inner_kwargs)
else:
logging.debug(f'cache miss for {str(inner_kwargs)[:100]}')
response = func(**kwargs)
self.set_cache(content=response, **inner_kwargs)
return response
return inner
memoizer = _Memoizer()
def memoized():
return memoizer.memoize_decorator
|
{"/app/pages/views.py": ["/app/pages/__init__.py"], "/app/__init__.py": ["/app/helpers/memoize.py", "/app/pages/__init__.py"]}
|
33,230,405
|
viktorzhelev/dev-ops-training
|
refs/heads/master
|
/app/__init__.py
|
from flask import Flask
from flask_bootstrap import Bootstrap
import logging
from logging.config import dictConfig
from app.helpers.memoize import memoizer
def _base_app(app_config):
"""
initialise a barebone flask app.
:arg config_name [string] - the name of the environment; must be a key in the "config" dict
"""
configure_logging(app_config)
app = Flask(__name__)
app.config.from_object(app_config)
app_config.init_app(app)
app.static_folder='templates/static'
app.static_url_path='/static'
Bootstrap(app)
return app
def create_app(app_config):
"""
The factory function that creates the Flask app.
Register all blueprints here
"""
import logging as log
log.info(f"Creating an app for environment: {app_config.__class__.__name__}")
app = _base_app(app_config)
memoizer.init_app(app)
from .api import api as api_blueprint
app.register_blueprint(api_blueprint, url_prefix="/api")
from .pages import pages as main_blueprint
app.register_blueprint(main_blueprint)
return app
def configure_logging(app_config):
"""
http://flask.pocoo.org/docs/1.0/logging/
"If possible, configure logging before creating the application object.
"""
dictConfig({
'version': 1,
'formatters': {'default': {
'format': '[%(asctime)s] %(levelname)s in %(module)s: %(message)s',
}},
'handlers': {'wsgi': {
'class': 'logging.StreamHandler',
'stream': 'ext://flask.logging.wsgi_errors_stream',
'formatter': 'default'
}},
'root': {
'level': app_config.LOG_LEVEL,
'handlers': ['wsgi']
}
})
logging.getLogger('botocore').setLevel(logging.CRITICAL)
|
{"/app/pages/views.py": ["/app/pages/__init__.py"], "/app/__init__.py": ["/app/helpers/memoize.py", "/app/pages/__init__.py"]}
|
33,230,406
|
viktorzhelev/dev-ops-training
|
refs/heads/master
|
/app/pages/__init__.py
|
from flask import Blueprint
pages = Blueprint('pages', __name__)
from . import views
|
{"/app/pages/views.py": ["/app/pages/__init__.py"], "/app/__init__.py": ["/app/helpers/memoize.py", "/app/pages/__init__.py"]}
|
33,270,510
|
akashnandi0/crypto
|
refs/heads/master
|
/serializervalidation/myapp2.py
|
# Create New Student
import requests
import json
URL = "http://127.0.0.1:8000/stucreate/"
data = {
'name' : 'Maria',
'roll' : '03',
'state': 'TamilNadu',
'city' : 'Chennai'
}
json_data = json.dumps(data)
r = requests.post(url = URL, data = json_data)
data = r.json()
print(data)
|
{"/serializervalidation/gs1/serializers.py": ["/serializervalidation/gs1/models.py"], "/book/api/views.py": ["/book/api/models.py"], "/serializervalidation/gs1/views.py": ["/serializervalidation/gs1/models.py", "/serializervalidation/gs1/serializers.py"]}
|
33,270,511
|
akashnandi0/crypto
|
refs/heads/master
|
/book/book/urls.py
|
from django.contrib import admin
from django.urls import path,include
from api import views
# from rest_framework.routers import DefaultRouter
# router = DefaultRouter()
# router.register('bookapi',views.BookModelViewSet, basename='book')
urlpatterns = [
path('admin/', admin.site.urls),
# path('',include(router.urls)),
path('bookstore/',include('api.urls'))
# path('auth/',include('rest_framework.urls',
# namespace='rest_framework'))
]
|
{"/serializervalidation/gs1/serializers.py": ["/serializervalidation/gs1/models.py"], "/book/api/views.py": ["/book/api/models.py"], "/serializervalidation/gs1/views.py": ["/serializervalidation/gs1/models.py", "/serializervalidation/gs1/serializers.py"]}
|
33,270,512
|
akashnandi0/crypto
|
refs/heads/master
|
/serializervalidation/gs1/serializers.py
|
from rest_framework import serializers
from .models import Student
# Validators
def name_caps(value):
if value[0] != value[0].upper():
raise serializers.ValidationError('Start with Capital Letter')
class StudentSerializers(serializers.Serializer):
name = serializers.CharField(max_length=100, validators=[name_caps])
roll = serializers.IntegerField()
state = serializers.CharField(max_length=100, validators=[name_caps])
city = serializers.CharField(max_length=100, validators=[name_caps])
# Create Instance
def create(self, validate_data):
return Student.objects.create(**validate_data)
# Update Instance
def update(self,instance,validate_data):
print(instance.name)
instance.name = validate_data.get('name',instance.name)
print(instance.name)
instance.roll = validate_data.get('roll',instance.roll)
instance.state = validate_data.get('state',instance.state)
instance.city = validate_data.get('city',instance.city)
instance.save()
return instance
# Field Level Validation
def validate_roll(self,value):
if value >= 200:
raise serializers.ValidationError('Seat Full')
return value
# Object Level Validation
def validate(self,data):
nm = data.get('name')
ct = data.get('city')
if nm.lower() == 'rohit' and ct.lower() != 'ranchi' :
raise serializers.ValidationError('City must be ranchi')
return data
|
{"/serializervalidation/gs1/serializers.py": ["/serializervalidation/gs1/models.py"], "/book/api/views.py": ["/book/api/models.py"], "/serializervalidation/gs1/views.py": ["/serializervalidation/gs1/models.py", "/serializervalidation/gs1/serializers.py"]}
|
33,270,513
|
akashnandi0/crypto
|
refs/heads/master
|
/gs13/api/views.py
|
# CLASS-BASED VIEWSETS WITH BASIC AUTHENTICATION AND ISADMIN PERMISSION
from .models import Student
from .serializers import StudentSerializers
from rest_framework import viewsets
from rest_framework.authentication import BasicAuthentication, SessionAuthentication
from rest_framework.permissions import IsAuthenticated,AllowAny,IsAdminUser,IsAuthenticatedOrReadOnly,DjangoModelPermissions,DjangoModelPermissionsOrAnonReadOnly
class StudentModelViewSet(viewsets.ModelViewSet):
queryset = Student.objects.all()
serializer_class = StudentSerializers
authentication_classes = [BasicAuthentication]
permission_classes = [IsAdminUser]
|
{"/serializervalidation/gs1/serializers.py": ["/serializervalidation/gs1/models.py"], "/book/api/views.py": ["/book/api/models.py"], "/serializervalidation/gs1/views.py": ["/serializervalidation/gs1/models.py", "/serializervalidation/gs1/serializers.py"]}
|
33,270,514
|
akashnandi0/crypto
|
refs/heads/master
|
/serializervalidation/serializervalidation/urls.py
|
from django.contrib import admin
from django.urls import path
from gs1 import views
urlpatterns = [
path('admin/', admin.site.urls),
path('stuinfo/<int:pk>/',views.student_detail),
path('stulist/',views.student_list),
path('stucreate/', views.student_create),
path('studentapi/',views.student_api),
]
|
{"/serializervalidation/gs1/serializers.py": ["/serializervalidation/gs1/models.py"], "/book/api/views.py": ["/book/api/models.py"], "/serializervalidation/gs1/views.py": ["/serializervalidation/gs1/models.py", "/serializervalidation/gs1/serializers.py"]}
|
33,270,515
|
akashnandi0/crypto
|
refs/heads/master
|
/serializervalidation/myapp3.py
|
# GET, POST, UPDATE(Partial and Full), DELETE
import requests
import json
URL = "http://127.0.0.1:8000/studentapi/"
# Get Data
def get_data(id = None):
data = {}
if id is not None:
data = {'id':id}
json_data = json.dumps(data)
r = requests.get(url = URL, data = json_data)
data = r.json()
print(data)
# get_data()
# Post Data
def post_data():
data = {
'name':'Shreyas',
'roll':'2',
'state':'Delhi',
'city' : 'Noida'
}
json_data = json.dumps(data)
r = requests.post(url=URL, data=json_data)
data = r.json()
print(data)
post_data()
# Update Data
def update_data():
data = {
# 'id': '3',
'name':'Sikha',
'state':'Maharashtra',
'city' : 'Mumbai'
}
json_data = json.dumps(data)
r = requests.put(url=URL, data=json_data)
data = r.json()
print(data)
# update_data()
#Delele Data
def delete_data():
data = {
'id': '4'
}
json_data = json.dumps(data)
r = requests.delete(url=URL, data=json_data)
data = r.json()
print(data)
# delete_data()
|
{"/serializervalidation/gs1/serializers.py": ["/serializervalidation/gs1/models.py"], "/book/api/views.py": ["/book/api/models.py"], "/serializervalidation/gs1/views.py": ["/serializervalidation/gs1/models.py", "/serializervalidation/gs1/serializers.py"]}
|
33,270,516
|
akashnandi0/crypto
|
refs/heads/master
|
/serializervalidation/gs1/models.py
|
from django.db import models
class Student(models.Model):
name = models.CharField(max_length=100)
roll = models.IntegerField(unique=True)
state = models.CharField(max_length=100)
city = models.CharField(max_length=100)
|
{"/serializervalidation/gs1/serializers.py": ["/serializervalidation/gs1/models.py"], "/book/api/views.py": ["/book/api/models.py"], "/serializervalidation/gs1/views.py": ["/serializervalidation/gs1/models.py", "/serializervalidation/gs1/serializers.py"]}
|
33,270,517
|
akashnandi0/crypto
|
refs/heads/master
|
/book/api/views.py
|
from django.shortcuts import render
from .serializers import BookSerializer
from .models import Books
from rest_framework.authentication import BasicAuthentication
from rest_framework.permissions import IsAuthenticated, IsAdminUser, IsAuthenticatedOrReadOnly
from rest_framework import viewsets, filters
from rest_framework.generics import CreateAPIView, ListAPIView, RetrieveAPIView, DestroyAPIView, UpdateAPIView
class BookModelViewSet(viewsets.ModelViewSet):
search_fields = ['author','book_name']
filter_backends = (filters.SearchFilter,)
queryset = Books.objects.all()
serializer_class = BookSerializer
authentication_classes = [BasicAuthentication]
permission_classes = [IsAuthenticatedOrReadOnly]
class BookListView(ListAPIView):
search_fields = ['author','book_name']
filter_backends = (filters.SearchFilter,)
queryset = Books.objects.all()
serializer_class = BookSerializer
authentication_classes = [BasicAuthentication]
permission_classes = [IsAuthenticated]
class BookCreateView(CreateAPIView):
serializer_class = BookSerializer
search_fields = ['author','book_name']
filter_backends = (filters.SearchFilter,)
serializer_class = BookSerializer
authentication_classes = [BasicAuthentication]
permission_classes = [IsAdminUser]
class BookRetrieveView(RetrieveAPIView):
search_fields = ['author','book_name']
filter_backends = (filters.SearchFilter,)
queryset = Books.objects.all()
serializer_class = BookSerializer
authentication_classes = [BasicAuthentication]
permission_classes = [IsAdminUser]
class BookDestroyView(DestroyAPIView):
search_fields = ['author','book_name']
filter_backends = (filters.SearchFilter,)
queryset = Books.objects.all()
authentication_classes = [BasicAuthentication]
permission_classes = [IsAdminUser]
class BookUpdateView(UpdateAPIView):
search_fields = ['author','book_name']
filter_backends = (filters.SearchFilter,)
queryset = Books.objects.all()
serializer_class = BookSerializer
authentication_classes = [BasicAuthentication]
permission_classes = [IsAdminUser]
|
{"/serializervalidation/gs1/serializers.py": ["/serializervalidation/gs1/models.py"], "/book/api/views.py": ["/book/api/models.py"], "/serializervalidation/gs1/views.py": ["/serializervalidation/gs1/models.py", "/serializervalidation/gs1/serializers.py"]}
|
33,270,518
|
akashnandi0/crypto
|
refs/heads/master
|
/serializervalidation/myapp1.py
|
# List the data and details
import requests
URL = "http://127.0.0.1:8000/stuinfo/1"
r = requests.get(url = URL)
data = r.json()
print(data)
|
{"/serializervalidation/gs1/serializers.py": ["/serializervalidation/gs1/models.py"], "/book/api/views.py": ["/book/api/models.py"], "/serializervalidation/gs1/views.py": ["/serializervalidation/gs1/models.py", "/serializervalidation/gs1/serializers.py"]}
|
33,270,519
|
akashnandi0/crypto
|
refs/heads/master
|
/book/api/models.py
|
from django.db import models
class Books(models.Model):
book_name = models.CharField(max_length=500)
pub_date = models.DateTimeField('date published',null=True)
author = models.CharField(max_length=200,null=True)
def __str__(self):
return self.book_name
|
{"/serializervalidation/gs1/serializers.py": ["/serializervalidation/gs1/models.py"], "/book/api/views.py": ["/book/api/models.py"], "/serializervalidation/gs1/views.py": ["/serializervalidation/gs1/models.py", "/serializervalidation/gs1/serializers.py"]}
|
33,270,520
|
akashnandi0/crypto
|
refs/heads/master
|
/book/api/urls.py
|
from django.urls import path
from rest_framework.urlpatterns import format_suffix_patterns
from api import views
urlpatterns = [
path('booklist/', views.BookListView.as_view(),name='book_list'),
path('bookcreate/',views.BookCreateView.as_view(),name='book_create'),
path('book/<int:pk>/detail/', views.BookRetrieveView.as_view(), name='book_retrieve'),
path('book/<int:pk>/delete/',views.BookDestroyView.as_view(),name='book_destroy'),
path('book/<int:pk>/update/',views.BookUpdateView.as_view(),name='book_update')
]
urlpatterns = format_suffix_patterns(urlpatterns)
|
{"/serializervalidation/gs1/serializers.py": ["/serializervalidation/gs1/models.py"], "/book/api/views.py": ["/book/api/models.py"], "/serializervalidation/gs1/views.py": ["/serializervalidation/gs1/models.py", "/serializervalidation/gs1/serializers.py"]}
|
33,270,521
|
akashnandi0/crypto
|
refs/heads/master
|
/serializervalidation/gs1/views.py
|
from django.shortcuts import render
from .models import Student
from .serializers import StudentSerializers
from django.http import HttpResponse, JsonResponse
from rest_framework.renderers import JSONRenderer
import io
from rest_framework.parsers import JSONParser
from django.views.decorators.csrf import csrf_exempt
# Model Object - Single Student Data
def student_detail(request,pk):
stu = Student.objects.get(id=pk)
serializer = StudentSerializers(stu)
json_data = JSONRenderer().render(serializer.data)
return HttpResponse(json_data,content_type = 'application/json')
# Query Set - All Student Data
def student_list(request):
stu = Student.objects.all()
serializer = StudentSerializers(stu, many=True)
# json_data = JSONRenderer().render(serializer.data)
# return HttpResponse(json_data,content_type = 'application/json')
return JsonResponse(serializer.data,safe=False)
# Create Student
@csrf_exempt
def student_create(request):
if request.method == 'POST':
json_data = request.body
stream = io.BytesIO(json_data)
python_data = JSONParser().parse(stream)
serializer = StudentSerializers(data=python_data)
if serializer.is_valid():
serializer.save()
res = {"msg":"data created !"}
json_data = JSONRenderer().render(res)
return HttpResponse(json_data,content_type='application/json')
json_data = JSONRenderer().render(serializer.errors)
return HttpResponse(json_data,content_type='application/json')
# Get, Post, Put, Delete Data
@csrf_exempt
def student_api(request):
if request.method == 'GET':
json_data = request.body
stream = io.BytesIO(json_data)
python_data = JSONParser().parse(stream)
id = python_data.get('id',None)
if id is not None:
stu = Student.objects.get(id=id)
serializer = StudentSerializers(stu)
json_data = JSONRenderer().render(serializer.data)
return HttpResponse(json_data,content_type='application/json')
stu = Student.objects.all()
serializer = StudentSerializers(stu,many=True)
json_data = JSONRenderer().render(serializer.data)
return HttpResponse(json_data,content_type='application/json')
if request.method == 'POST':
json_data = request.body
stream = io.BytesIO(json_data)
python_data = JSONParser().parse(stream)
serializer = StudentSerializers(data=python_data)
if serializer.is_valid():
serializer.save()
res = {'msg':'data created'}
json_data = JSONRenderer().render(res)
return HttpResponse(json_data, content_type='application/json')
json_data = JSONRenderer().render(serializer.errors)
return HttpResponse(json_data, content_type='application/json')
if request.method == 'PUT':
json_data = request.body
stream = io.BytesIO(json_data)
python_data = JSONParser().parse(stream)
id = python_data.get('id')
stu = Student.objects.get(id=id)
serializer = StudentSerializers(stu,data=python_data,partial=True)
if serializer.is_valid():
serializer.save()
res = {'msg':'data updated'}
json_data = JSONRenderer().render(res)
return HttpResponse(json_data,content_type='application/json')
json_data = JSONRenderer().render(res)
return HttpResponse(json_data,content_type='application/json')
if request.method == 'DELETE':
json_data = request.body
stream = io.BytesIO(json_data)
python_data = JSONParser().parse(stream)
id = python_data.get('id')
stu = Student.objects.get(id=id)
stu.delete()
res = {'msg':'Data Deleted'}
json_data = JSONRenderer().render(res)
return HttpResponse(json_data,content_type = 'application/json')
|
{"/serializervalidation/gs1/serializers.py": ["/serializervalidation/gs1/models.py"], "/book/api/views.py": ["/book/api/models.py"], "/serializervalidation/gs1/views.py": ["/serializervalidation/gs1/models.py", "/serializervalidation/gs1/serializers.py"]}
|
33,329,738
|
joseivo01/LearningUnitTestPythonOO
|
refs/heads/master
|
/src/test_cliente.py
|
import unittest
from cliente import Cliente
class TestClient(unittest.TestCase):
def setUp(self):
self.cliente = Cliente("João","0124578")
#testar se clinte não inicializa nullo
def test_notNullClientName(self):
self.assertIsNotNone(self.cliente.get_nome())
def test_notNullClientCpf(self):
self.assertIsNotNone(self.cliente.get_cpf())
if __name__ == '__main__':
unittest.main()
|
{"/test_conta.py": ["/conta.py"], "/test_correntista.py": ["/correntista.py", "/conta.py"], "/src/test_cliente.py": ["/cliente.py"]}
|
33,329,739
|
joseivo01/LearningUnitTestPythonOO
|
refs/heads/master
|
/src/cliente.py
|
class Cliente:
def __init__(self, nome, cpf):
self.nome = nome
self.cpf = cpf
#Os métodos que dão acesso são nomeados como properties
#É utilizado para dar acesso aos metodos sem a utilização de ()
def get_nome(self):
return self.nome
def set_nome(self, nome):
self.nome = nome
def get_cpf(self):
return self.nome
def set_cpf(self, cpf):
self.cpf = cpf
|
{"/test_conta.py": ["/conta.py"], "/test_correntista.py": ["/correntista.py", "/conta.py"], "/src/test_cliente.py": ["/cliente.py"]}
|
33,329,740
|
joseivo01/LearningUnitTestPythonOO
|
refs/heads/master
|
/cliente.py
|
class Cliente:
def __init__(self, nome):
self.nome = nome
#Os métodos que dão acesso são nomeados como properties
#É utilizado para dar acesso aos metodos sem a utilização de ()
@property
def nome(self):
return self.nome.title()
|
{"/test_conta.py": ["/conta.py"], "/test_correntista.py": ["/correntista.py", "/conta.py"], "/src/test_cliente.py": ["/cliente.py"]}
|
33,537,253
|
pawel1830/cassandra_app
|
refs/heads/main
|
/check_cassandra.py
|
import os
import sys
import time
from cassandra.cluster import Cluster
cassandra_hosts = os.environ.get('CASSANDRA_HOSTS', 'localhost')
for r in range(1, 5):
try:
print("Connecting %i" % r)
cluster = Cluster(cassandra_hosts.split(','))
session = cluster.connect()
print("Connected")
session.shutdown()
sys.exit(0)
except Exception as exc:
print(exc)
print("Wait 10 seconds")
time.sleep(10.0)
sys.exit(1)
|
{"/test_project/api/views.py": ["/test_project/api/models.py", "/test_project/api/serializer.py"], "/test_project/api/management/commands/delete_unnecessary_messages.py": ["/test_project/api/models.py"], "/test_project/api/serializer.py": ["/test_project/api/models.py"], "/test_project/api/urls.py": ["/test_project/api/views.py"], "/test_project/api/tests.py": ["/test_project/api/models.py"]}
|
33,537,254
|
pawel1830/cassandra_app
|
refs/heads/main
|
/test_project/settings.py
|
"""
Django settings for test_project project.
Generated by 'django-admin startproject' using Django 3.1.2.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
import os
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '#_7^2-hmz#5#*6qq!_tzu-nv$7v-f@c)jx&8(4=@$!g0h-_-te'
cassandra_hosts = os.environ.get('CASSANDRA_HOSTS', 'localhost')
debug = os.environ.get('DEBUG', 'true').lower() == 'true'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = bool(debug)
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django_cassandra_engine',
'django.contrib.auth',
'django.contrib.contenttypes',
'test_project.api'
]
MIDDLEWARE = [
]
ROOT_URLCONF = 'test_project.urls'
WSGI_APPLICATION = 'test_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django_cassandra_engine',
'NAME': 'cassandra_db',
'TEST_NAME': 'test_db',
'HOST': cassandra_hosts,
'OPTIONS': {
'replication': {
'strategy_class': 'SimpleStrategy',
'replication_factor': 1
}
}
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
'PAGE_SIZE': 100
}
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_USE_TLS = True
EMAIL_PORT = 587
EMAIL_HOST_USER = 'jtestowy21@gmail.com'
EMAIL_HOST_PASSWORD = 'JanTestowyKtoregoMuszeUzyc'
# EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# EMAIL_HOST = '1.2.3.4'
# EMAIL_TIMEOUT = 5
DEFAULT_FROM_EMAIL = EMAIL_HOST_USER
|
{"/test_project/api/views.py": ["/test_project/api/models.py", "/test_project/api/serializer.py"], "/test_project/api/management/commands/delete_unnecessary_messages.py": ["/test_project/api/models.py"], "/test_project/api/serializer.py": ["/test_project/api/models.py"], "/test_project/api/urls.py": ["/test_project/api/views.py"], "/test_project/api/tests.py": ["/test_project/api/models.py"]}
|
33,537,255
|
pawel1830/cassandra_app
|
refs/heads/main
|
/test_project/api/views.py
|
from django.core.exceptions import ValidationError
from rest_framework.generics import ListAPIView, CreateAPIView
from rest_framework.parsers import JSONParser
from rest_framework.response import Response
from rest_framework import status
from django.core.validators import validate_email
import logging
from django.core.mail import send_mail
from rest_framework.views import APIView
from django.conf import settings
from .models import Message
from .serializer import MessageSerializer
logger = logging.getLogger(__name__)
class MessageSendView(APIView):
def validate_magic_number(self, magic_number):
if not magic_number:
return 0, 'Magic number not exists'
try:
int(magic_number)
except ValueError:
return 0, 'Magic number must be int'
return 1, ''
def post(self, request, *args, **kwargs):
email_user = settings.EMAIL_HOST_USER
request_data = JSONParser().parse(request)
magic_number = request_data.get('magic_number')
ok, error = self.validate_magic_number(magic_number)
if not ok:
return Response(
data={"errors": error},
status=status.HTTP_400_BAD_REQUEST
)
messages = Message.objects.filter(magic_number=int(magic_number))
for message in messages:
try:
send_mail(message.title, message.content, email_user, [message.email],
fail_silently=False)
message.delete()
except Exception as e:
logger.error(e)
return Response({'errors': 'Internal Error'},
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
return Response({"message": "Messages send"})
class MessageListView(ListAPIView):
serializer_class = MessageSerializer
def get_queryset(self):
email_value = self.kwargs['email_value']
return Message.objects.filter(email=email_value)
def get(self, request, *args, **kwargs):
try:
email_value = self.kwargs['email_value']
validate_email(email_value)
except ValidationError:
return Response({"errors": 'Email invalid'}, status=status.HTTP_400_BAD_REQUEST)
return super(MessageListView, self).get(request, *args, **kwargs)
class MessageCreateView(CreateAPIView):
serializer_class = MessageSerializer
|
{"/test_project/api/views.py": ["/test_project/api/models.py", "/test_project/api/serializer.py"], "/test_project/api/management/commands/delete_unnecessary_messages.py": ["/test_project/api/models.py"], "/test_project/api/serializer.py": ["/test_project/api/models.py"], "/test_project/api/urls.py": ["/test_project/api/views.py"], "/test_project/api/tests.py": ["/test_project/api/models.py"]}
|
33,537,256
|
pawel1830/cassandra_app
|
refs/heads/main
|
/test_project/api/serializer.py
|
from django.utils.timezone import now
from rest_framework import serializers
from .models import Message
class MessageSerializer(serializers.Serializer):
magic_number = serializers.IntegerField(required=True)
title = serializers.CharField(required=True)
email = serializers.EmailField(required=True)
content = serializers.CharField(required=True)
created_at = serializers.DateTimeField(default=now, read_only=True)
def create(self, validated_data):
message = Message.objects.create(**validated_data)
return message
|
{"/test_project/api/views.py": ["/test_project/api/models.py", "/test_project/api/serializer.py"], "/test_project/api/management/commands/delete_unnecessary_messages.py": ["/test_project/api/models.py"], "/test_project/api/serializer.py": ["/test_project/api/models.py"], "/test_project/api/urls.py": ["/test_project/api/views.py"], "/test_project/api/tests.py": ["/test_project/api/models.py"]}
|
33,537,257
|
pawel1830/cassandra_app
|
refs/heads/main
|
/test_project/api/models.py
|
import uuid
from django.db import connection
from django.utils.timezone import now
from cassandra.cqlengine import columns
from django_cassandra_engine.models import DjangoCassandraModel
class Message(DjangoCassandraModel):
uuid = columns.UUID(primary_key=True, default=uuid.uuid4)
email = columns.Text(index=True)
magic_number = columns.Integer(index=True)
title = columns.Text(required=True)
created_at = columns.DateTime(default=now)
content = columns.Text(required=True)
__options__ = {
"default_time_to_live": 600
}
__table_name__ = 'message'
def truncate(self):
with connection.cursor() as cursor:
cursor.execute('TRUNCATE TABLE {}'.format(self.__table_name__))
|
{"/test_project/api/views.py": ["/test_project/api/models.py", "/test_project/api/serializer.py"], "/test_project/api/management/commands/delete_unnecessary_messages.py": ["/test_project/api/models.py"], "/test_project/api/serializer.py": ["/test_project/api/models.py"], "/test_project/api/urls.py": ["/test_project/api/views.py"], "/test_project/api/tests.py": ["/test_project/api/models.py"]}
|
33,537,258
|
pawel1830/cassandra_app
|
refs/heads/main
|
/test_project/api/tests.py
|
import json
from django.urls import reverse
from test_project.api.models import Message
class TestMessage(object):
sample_data1 = {
"email": "anna.zajkowska@example.com",
"title": "Interview 3",
"content": "simple text3",
"magic_number": 101
}
sample_data2 = {
"email": "jan.kowalski@example.com",
"title": "Interview 2",
"content": "simple text 2",
"magic_number":22
}
bad_email_sample_data = {
"email": "jan.kowalski",
"title": "Interview 2",
"content": "simple text 2",
"magic_number": 22
}
sample_data3 = {
"email": "jan.kowalski@example.com",
"title": "Interview",
"content": "simpletext",
"magic_number": 101
}
def test_can_get_message_by_email(self, client):
Message.objects.create(email=self.sample_data1['email'],
title=self.sample_data1['title'],
content=self.sample_data1['content'],
magic_number=self.sample_data1['magic_number'])
url = reverse('get-messages', kwargs={'email_value': self.sample_data1['email']})
response = client.get(url)
response_data = dict(response.data)
results = response_data['results'][0]
assert response.status_code == 200
assert results['email'] == self.sample_data1['email']
assert results['title'] == self.sample_data1['title']
assert results['content'] == self.sample_data1['content']
assert results['magic_number'] == self.sample_data1['magic_number']
def test_can_get_message_wrong_email(self, client):
url = reverse('get-messages', kwargs={'email_value': 'hdjshdsakjd'})
response = client.get(url)
assert response.status_code == 400
def test_can_create_message_correct_data(self, client):
url = reverse('create-message')
Message.truncate()
response = client.post(
url,
data=json.dumps(self.sample_data2),
content_type="application/json"
)
message = Message.objects.get(email=self.sample_data2['email'])
assert response.status_code == 201
assert message.title == self.sample_data2['title']
assert message.content == self.sample_data2['content']
assert message.magic_number == self.sample_data2['magic_number']
def test_can_create_message_bad_data(self, client):
url = reverse('create-message')
response = client.post(
url,
data=json.dumps(self.bad_email_sample_data),
content_type="application/json"
)
assert response.status_code == 400
def test_can_send_messages(self, client):
url = reverse('send-message')
magic_number = self.sample_data3['magic_number']
data = {'magic_number': magic_number}
Message.objects.create(email=self.sample_data3['email'],
title=self.sample_data3['title'],
content=self.sample_data3['content'],
magic_number=magic_number)
response = client.post(url, data, content_type="application/json")
assert Message.objects.filter(magic_number=magic_number).count() == 0
assert response.status_code == 200
def test_send_messages_bad_magic_number(self, client):
url = reverse('send-message')
data = {'magic_number': 1000}
response = client.post(url, data, content_type="application/json")
assert Message.objects.filter(magic_number=1000).count() == 0
assert response.status_code == 200
|
{"/test_project/api/views.py": ["/test_project/api/models.py", "/test_project/api/serializer.py"], "/test_project/api/management/commands/delete_unnecessary_messages.py": ["/test_project/api/models.py"], "/test_project/api/serializer.py": ["/test_project/api/models.py"], "/test_project/api/urls.py": ["/test_project/api/views.py"], "/test_project/api/tests.py": ["/test_project/api/models.py"]}
|
33,537,259
|
pawel1830/cassandra_app
|
refs/heads/main
|
/test_project/api/urls.py
|
from django.urls import path
from .views import MessageListView, MessageCreateView, MessageSendView
urlpatterns = [
path('message', MessageCreateView.as_view(), name='create-message'),
path('send', MessageSendView.as_view(), name='send-message'),
path('messages/<email_value>', MessageListView.as_view(), name='get-messages')
]
|
{"/test_project/api/views.py": ["/test_project/api/models.py", "/test_project/api/serializer.py"], "/test_project/api/management/commands/delete_unnecessary_messages.py": ["/test_project/api/models.py"], "/test_project/api/serializer.py": ["/test_project/api/models.py"], "/test_project/api/urls.py": ["/test_project/api/views.py"], "/test_project/api/tests.py": ["/test_project/api/models.py"]}
|
33,552,786
|
nvvaulin/medical_imaging
|
refs/heads/master
|
/datasets/coronahack.py
|
from torchvision import datasets
import pandas as pd
import numpy as np
import os
from .basic_dataset import BasicDataset
class Coronahack(BasicDataset):
def __init__(self,root='/media/nvme/data/datasets/medical_datasets/coronahack',
mode='train',**kwargs):
data = pd.read_csv(os.path.join(root,'Chest_xray_Corona_Metadata.csv'),sep=',')
data = data[data['Dataset_type']==mode.upper()]
samples = data['X_ray_image_name'].values
named_labels = data['Label'].values
super().__init__(os.path.join(root,'Coronahack-Chest-XRay-Dataset/Coronahack-Chest-XRay-Dataset/'+mode.lower()),
samples,named_labels,**kwargs)
|
{"/models/__init__.py": ["/models/basic_classifier.py", "/models/svd_densenet.py"], "/utils/__init__.py": ["/utils/load_object.py"], "/train.py": ["/config.py", "/models/__init__.py", "/utils/existed_checkpoint.py", "/datasets/__init__.py", "/utils/__init__.py"], "/datasets/chest_xray.py": ["/datasets/basic_dataset.py"], "/datasets/covid_aid.py": ["/datasets/basic_dataset.py"], "/datasets/__init__.py": ["/datasets/coronahack.py", "/datasets/chest_xray.py", "/datasets/join_datasets.py", "/datasets/covid_chest_xray.py", "/datasets/covid_aid.py"], "/shared_train.py": ["/models/__init__.py", "/utils/existed_checkpoint.py", "/datasets/__init__.py", "/utils/__init__.py"], "/datasets/covid_chest_xray.py": ["/datasets/basic_dataset.py"], "/datasets/coronahack.py": ["/datasets/basic_dataset.py"]}
|
33,552,787
|
nvvaulin/medical_imaging
|
refs/heads/master
|
/train.py
|
from config import ex
import pytorch_lightning as pl
from pytorch_lightning.callbacks import EarlyStopping,ModelCheckpoint
from torchvision import models
import datasets
from pytorch_lightning import loggers as pl_loggers
from torchvision import transforms
from torch.utils.data import DataLoader, random_split
from models import BasicClassifierModel
import os
import numpy as np
import re
import parse
import pandas as pd
# class JoinedLoader(DataLoader):
# def __init__(self,*loaders):
# self.loaders = loaders
#
# @property
# def batch_size(self):
# return sum([i.bach_size for i in self.loaders])
#
# def __len__(self):
# return min([len(i) for i in self.loaders])
#
# def __iter__(self):
# return zip(*self.loaders)
@ex.capture
def load_train_val_test(dataset, batch_size=64,input_size=(224,224),num_workers=8):
# imagenet maan and std
mean = [0.485, 0.456, 0.406]
std = [0.229, 0.224, 0.225]
input_size = tuple(input_size)
test_transform = transforms.Compose([
transforms.Resize(input_size),
transforms.ToTensor(),
transforms.Normalize(mean,std)
])
train_transform = transforms.Compose([
transforms.RandomSizedCrop(input_size,(0.8,1.2)),
transforms.RandomHorizontalFlip(),
transforms.Scale(input_size),
transforms.CenterCrop(input_size),
transforms.ToTensor(),
transforms.Normalize(mean,std)
])
train_val = datasets.__dict__[dataset['name']](mode='train',transform=train_transform,**dataset.get('params',{}))
test = datasets.__dict__[dataset['name']](mode='test',transform=test_transform,**dataset.get('params',{}))
assert (train_val.label_names!=test.label_names).sum()==0,'wrong!'
train, val = random_split(train_val,[int(0.9*len(train_val)),len(train_val)-int(0.9*len(train_val))])
train_loader = DataLoader(train, batch_size=batch_size,num_workers=num_workers,shuffle=True)
val_loader = DataLoader(val, batch_size=batch_size,num_workers=num_workers)
test_loader = DataLoader(test, batch_size=batch_size,num_workers=num_workers)
print('num_classes %d, dataset size: train %d; val %d; test %d'%(train_val.num_classes,len(train),len(val),len(test)))
return train_val.label_names, train_loader,val_loader,test_loader
#
# @ex.capture
# def list_checkpoints(exp_root,exp_name,version):
# checkpoints = glob.glob(ckpt
#
def load_backbone(name='resnet18',params={}):
return models.__dict__[name](**params)
#
#
# def train
#
# @ex.capture
# def load_checkpoint(exp_root,exp_name,version,load_epoch='best'):
# if load_epoch == 'best'
#
# @ex.command(model=None,load_epoch=1)
# def test
class ExistedModelCheckpoint(ModelCheckpoint):
def __init__(self,*args,**kwargs):
super().__init__(*args,**kwargs)
self._init_folder()
def _init_folder(self):
chpts = self.read_dir()
if chpts is None or len(chpts) == 0:
return
if self.save_top_k:
inx = np.argsort(chpts[self.monitor].values)
if self.mode == 'max':
inx = inx[::-1]
inx = inx[:self.save_top_k]
self.best_k_models = dict(zip(chpts['path'].values[inx],chpts[self.monitor].values[inx]))
self.kth_best_model_path = chpts['path'].values[inx[-1]]
if self.mode == 'min':
self.best_model_score = chpts[self.monitor].min()
self.best_model_path = chpts['path'].values[chpts[self.monitor].values.argmin()]
else:
self.best_model_score = chpts[self.monitor].max()
self.best_model_path = chpts['path'].values[chpts[self.monitor].values.argmax()]
self.last_model_path = chpts['path'].values[chpts['epoch'].values.argmax()]
self.current_score = chpts['epoch'].values.max()
def get_checkpoint_path(self,val='best',key='epoch'):
if val is None:
return None
elif val == 'best':
return self.best_model_path
elif val == 'last':
return self.last_model_path
else:
chpts = self.read_dir()
if chpts is None:
return None
for i in range(len(chpts)):
if chpts[key].values[i] == val:
return chpts['path'].values[i]
return None
def read_dir(self):
if not os.path.exists(self.dirpath) or len(os.listdir(self.dirpath)) == 0:
return None
pattern = self.filename
for i in re.findall('\{[A-Za-z_:\.0-9]+\}', pattern):
pattern = pattern.replace(i, '%s=%s' % (re.findall('[A-Za-z_0-9]+', i)[0], i))
chpts = [ dict(path=os.path.join(self.dirpath,i),**parse.search(pattern,i).named) for i in os.listdir(self.dirpath)]
chpts = pd.DataFrame(chpts)
chpts['epoch'] = pd.to_numeric(chpts['epoch'])
return chpts
@ex.command
def test(exp_root,exp_name,version,_config,load_epoch=None):
tb_logger = pl_loggers.TensorBoardLogger(exp_root, exp_name, version)
label_names, _, _, test_loader = load_train_val_test(_config['dataset'])
backbone = load_backbone(**_config.get('backbone', {}))
model = BasicClassifierModel(backbone, label_names, _config['optimizer'], _config['scheduler'])
checkpointer = ExistedModelCheckpoint(monitor='val_loss',
mode='min',
save_top_k=5,
dirpath=os.path.join(exp_root, exp_name, version, 'checkpoints'),
filename=_config['backbone']['name'] + '-{epoch}-{val_loss:.3f}-{train_loss:.3f}')
trainer = pl.Trainer(logger=tb_logger,callbacks=[checkpointer],**_config.get('trainer',{}))
trainer.test(model=model,test_dataloaders=test_loader,ckpt_path=checkpointer.get_checkpoint_path(load_epoch))
@ex.automain
def main(exp_root,exp_name,version,_config,load_epoch=None):
tb_logger = pl_loggers.TensorBoardLogger(exp_root,exp_name,version)
label_names,train_loader, val_loader, test_loader = load_train_val_test(_config['dataset'])
backbone = load_backbone(**_config.get('backbone',{}))
model = BasicClassifierModel(backbone,label_names,_config['optimizer'],_config['scheduler'])
checkpointer = ExistedModelCheckpoint(monitor='val_loss',
mode='min',
save_top_k=5,
dirpath = os.path.join(exp_root,exp_name,version,'checkpoints'),
filename=_config['backbone']['name']+'-{epoch}-{val_loss:.3f}-{train_loss:.3f}')
callbacks = [checkpointer,EarlyStopping(monitor='val_loss',patience=10)]
trainer = pl.Trainer(logger=tb_logger,
resume_from_checkpoint=checkpointer.get_checkpoint_path(load_epoch),
callbacks=callbacks,**_config.get('trainer',{}))
trainer.fit(model, train_loader, val_loader)
print('load best epoch ',checkpointer.best_model_path)
model.load_from_checkpoint(checkpointer.best_model_path)
result = trainer.test(test_dataloaders=test_loader)
print(result)
|
{"/models/__init__.py": ["/models/basic_classifier.py", "/models/svd_densenet.py"], "/utils/__init__.py": ["/utils/load_object.py"], "/train.py": ["/config.py", "/models/__init__.py", "/utils/existed_checkpoint.py", "/datasets/__init__.py", "/utils/__init__.py"], "/datasets/chest_xray.py": ["/datasets/basic_dataset.py"], "/datasets/covid_aid.py": ["/datasets/basic_dataset.py"], "/datasets/__init__.py": ["/datasets/coronahack.py", "/datasets/chest_xray.py", "/datasets/join_datasets.py", "/datasets/covid_chest_xray.py", "/datasets/covid_aid.py"], "/shared_train.py": ["/models/__init__.py", "/utils/existed_checkpoint.py", "/datasets/__init__.py", "/utils/__init__.py"], "/datasets/covid_chest_xray.py": ["/datasets/basic_dataset.py"], "/datasets/coronahack.py": ["/datasets/basic_dataset.py"]}
|
33,552,788
|
nvvaulin/medical_imaging
|
refs/heads/master
|
/datasets/__init__.py
|
from .coronahack import Coronahack
from .chest_xray import ChestXRay
|
{"/models/__init__.py": ["/models/basic_classifier.py", "/models/svd_densenet.py"], "/utils/__init__.py": ["/utils/load_object.py"], "/train.py": ["/config.py", "/models/__init__.py", "/utils/existed_checkpoint.py", "/datasets/__init__.py", "/utils/__init__.py"], "/datasets/chest_xray.py": ["/datasets/basic_dataset.py"], "/datasets/covid_aid.py": ["/datasets/basic_dataset.py"], "/datasets/__init__.py": ["/datasets/coronahack.py", "/datasets/chest_xray.py", "/datasets/join_datasets.py", "/datasets/covid_chest_xray.py", "/datasets/covid_aid.py"], "/shared_train.py": ["/models/__init__.py", "/utils/existed_checkpoint.py", "/datasets/__init__.py", "/utils/__init__.py"], "/datasets/covid_chest_xray.py": ["/datasets/basic_dataset.py"], "/datasets/coronahack.py": ["/datasets/basic_dataset.py"]}
|
33,552,789
|
nvvaulin/medical_imaging
|
refs/heads/master
|
/datasets/chest_xray.py
|
import pandas as pd
import numpy as np
import os
import glob
from .basic_dataset import BasicDataset
def search(a,key):
inx = np.argsort(a)
inx = inx[np.searchsorted(a[inx],key)]
assert (a[inx]!=key).sum()==0,'not all keys found'
return inx
class ChestXRay(BasicDataset):
def __init__(self,root='/media/nvme/data/datasets/medical_datasets/chestXray',mode='train',**kwargs):
if mode.lower() == 'train':
imlist = 'train_val_list.txt'
elif mode.lower() == 'test':
imlist = 'test_list.txt'
else:
assert False, 'unknown mode '+mode
imlist = open(os.path.join(root,imlist)).read().split('\n')
samples = np.array([i[len(root)+1:] for i in glob.glob(os.path.join(root,'images*/images/*'))])
samples = samples[search(np.array([i.split('/')[-1] for i in samples]),imlist)]
data = pd.read_csv(os.path.join(root,'Data_Entry_2017.csv'),sep=',')
inx = search(data['Image Index'].values,imlist)
named_labels = data['Finding Labels'].values[inx]
named_labels = [i.split('|') for i in named_labels]
super().__init__(root,samples,named_labels,multilabel=True,**kwargs)
|
{"/models/__init__.py": ["/models/basic_classifier.py", "/models/svd_densenet.py"], "/utils/__init__.py": ["/utils/load_object.py"], "/train.py": ["/config.py", "/models/__init__.py", "/utils/existed_checkpoint.py", "/datasets/__init__.py", "/utils/__init__.py"], "/datasets/chest_xray.py": ["/datasets/basic_dataset.py"], "/datasets/covid_aid.py": ["/datasets/basic_dataset.py"], "/datasets/__init__.py": ["/datasets/coronahack.py", "/datasets/chest_xray.py", "/datasets/join_datasets.py", "/datasets/covid_chest_xray.py", "/datasets/covid_aid.py"], "/shared_train.py": ["/models/__init__.py", "/utils/existed_checkpoint.py", "/datasets/__init__.py", "/utils/__init__.py"], "/datasets/covid_chest_xray.py": ["/datasets/basic_dataset.py"], "/datasets/coronahack.py": ["/datasets/basic_dataset.py"]}
|
33,552,790
|
nvvaulin/medical_imaging
|
refs/heads/master
|
/models/basic_classifier.py
|
import torch
from torch import nn
import pytorch_lightning as pl
from pytorch_lightning.metrics.functional import auroc,accuracy
import numpy as np
from sklearn.metrics import roc_curve,roc_auc_score
import matplotlib.pyplot as plt
class BasicClassifierModel(pl.LightningModule):
def __init__(self, backbone,class_names,
optimizer={'name':'Adam','params':{'lr':0.1}},
scheduler={'name':'MultiStepLR','params':{'gamma':0.1,'milestones':[100]}}):
super().__init__()
self.scheduler = scheduler
self.optimizer = optimizer
self.backbone = backbone
self.num_classes = len(class_names)
if hasattr(self.backbone,'classifier'):
self.backbone.classifier = nn.Sequential(nn.Linear(self.backbone.classifier.in_features, self.num_classes), nn.Sigmoid())
else:
self.backbone.fc= nn.Sequential(nn.Linear(self.backbone.fc.in_features, self.num_classes), nn.Sigmoid())
self.loss = nn.BCELoss()
self.class_names = class_names
def forward(self, x):
embedding = self.backbone(x)
pred = self.fc(embedding)
return pred
def _step(self,type,batch, batch_idx):
x, y = batch
y_hat = self.backbone(x)
loss = self.loss(y_hat, y)
self.log('%s_loss'%type, loss)
return {'loss':loss,'target':y,'pred':y_hat.detach()}
def _epoch_end(self,type,outputs):
pred = np.concatenate([i['pred'].cpu().numpy() for i in outputs],0)
target = np.concatenate([i['target'].cpu().numpy() for i in outputs],0)
for i,n in enumerate(self.class_names):
n = n.replace(' ','_')
if len(np.unique(target[:,i])) == 1:
continue
fpr,tpr,_ = roc_curve(target[:,i],pred[:,i])
fig = plt.figure(figsize=(10,10))
roc_auc = roc_auc_score(target[:,i],pred[:,i])
self.log('%s_%s_auc'%(type,n), roc_auc)
plt.plot(fpr,tpr,label='ep-%d;roc_auc-%.2f'%(self.current_epoch,roc_auc))
self.logger.experiment.add_figure('%s_%s_roc%d'%(type,n,self.current_epoch),fig)
print('%s %s auc: %.3f'%(type,n,roc_auc))
def training_step(self, batch, batch_idx):
return self._step('train',batch,batch_idx)
def validation_step(self, batch, batch_idx):
return self._step('val',batch,batch_idx)
def validation_epoch_end(self,outputs):
self._epoch_end('val',outputs)
def train_epoch_end(self,outputs):
self._epoch_end('train',outputs)
def test_epoch_end(self,outputs):
self._epoch_end('test',outputs)
def test_step(self, batch, batch_idx):
return self._step('test',batch,batch_idx)
def configure_optimizers(self):
optimizer = torch.optim.__dict__[self.optimizer['name']](self.parameters(), **self.optimizer['params'])
lr_scheduler = torch.optim.lr_scheduler.__dict__[self.scheduler['name']](optimizer,**self.scheduler['params'])
return {"optimizer": optimizer, "lr_scheduler": lr_scheduler, "monitor": "train_loss"}
|
{"/models/__init__.py": ["/models/basic_classifier.py", "/models/svd_densenet.py"], "/utils/__init__.py": ["/utils/load_object.py"], "/train.py": ["/config.py", "/models/__init__.py", "/utils/existed_checkpoint.py", "/datasets/__init__.py", "/utils/__init__.py"], "/datasets/chest_xray.py": ["/datasets/basic_dataset.py"], "/datasets/covid_aid.py": ["/datasets/basic_dataset.py"], "/datasets/__init__.py": ["/datasets/coronahack.py", "/datasets/chest_xray.py", "/datasets/join_datasets.py", "/datasets/covid_chest_xray.py", "/datasets/covid_aid.py"], "/shared_train.py": ["/models/__init__.py", "/utils/existed_checkpoint.py", "/datasets/__init__.py", "/utils/__init__.py"], "/datasets/covid_chest_xray.py": ["/datasets/basic_dataset.py"], "/datasets/coronahack.py": ["/datasets/basic_dataset.py"]}
|
33,552,791
|
nvvaulin/medical_imaging
|
refs/heads/master
|
/config.py
|
from sacred import Experiment
from sacred.commands import save_config,_format_config
import os
import glob
import datetime
import shutil
import logging
ex = Experiment('default')
def mkdir(fpath):
dpath = os.path.dirname(fpath)
if not os.path.exists(dpath):
os.makedirs(dpath)
@ex.pre_run_hook
def set_up_loging(exp_path,_config,_run,loglevel='INFO'):
spath = os.path.join(exp_path,'scources')
lpath = os.path.join(exp_path,'log.txt')
cpath = os.path.join(exp_path,'config.json')
for src in (glob.glob('./*.py')+glob.glob('./*/*.py')):
dst = os.path.join(spath,src[2:])
mkdir(dst)
shutil.copy(src,dst)
mkdir(lpath)
handler = logging.FileHandler(lpath)
handler.setFormatter(logging.Formatter(fmt='%(asctime)s %(levelname)s: %(message)s',
datefmt='%m-%d %H:%M:%S'))
_run.run_logger.setLevel(loglevel)
_run.run_logger.addHandler(handler)
mkdir(cpath)
save_config(_run.config,_run.run_logger,cpath)
_run.run_logger.info(_format_config(_run.config,_run.config_modifications))
@ex.config
def config():
exp_root='exps'
exp_name='multilabel'
version = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
exp_path = os.path.join(exp_root,exp_name,version)
optimizer = dict(name='SGD',params={'lr':0.01,'momentum':0.9,'weight_decay':1e-4})
scheduler = dict(name='ReduceLROnPlateau',params={'factor':0.3,'patience':5,'min_lr':1e-5,'verbose':True})
# scheduler = dict(name='MultiStepLR',params={'gamma':0.1,'milestones':[]})
dataset = dict(name='ChestXRay',params={'reduce_size':(256,256)})#'Coronahack')
batch_size = 16
input_size = (224,224)
trainer = dict(
auto_select_gpus=True,
gpus=1,
max_epochs=100,
)
backbone = dict(name = 'densenet121',params={'pretrained':True})
# def load_config(exp_name,config_path='config.yml',exp_root='exps',loglevel='INFO'):
# import yaml
# from pytorch_lightning import loggers as pl_loggers
# name = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
# exp_root = os.path.join(exp_root, exp_name,name)
# config = yaml.load(open(config_path))
#
# lpath = os.path.join(exp_root, 'log.txt')
# cpath = os.path.join(exp_root, 'config.yml')
#
# for src in (glob.glob('./*.py') + glob.glob('./*/*.py')):
# dst = os.path.join(exp_root,'scources', src[2:])
# mkdir(dst)
# shutil.copy(src, dst)
#
# mkdir(lpath)
# handler = logging.FileHandler(lpath)
# handler.setFormatter(logging.Formatter(fmt='%(asctime)s %(levelname)s: %(message)s',
# datefmt='%m-%d %H:%M:%S'))
# _run.run_logger.setLevel(loglevel)
# _run.run_logger.addHandler(handler)
#
# mkdir(cpath)
# yaml.dump(config,open(cpath,'w'))
# tb_logger = pl_loggers.TensorBoardLogger(exp_root)
# return tb_logger
#
# def build_object(_config,_module,**kwargs):
# if isinstance(config,str):
# return module.__dict__[config]
# elif isinstance(config,dict):
# assert len(config) == 1,'wrong'
# return module.__dict__[list(config.keys())[0]](**list(config.values())[0])
# else:
# assert False,'config must be str or dict, provided '+str(type(config))
|
{"/models/__init__.py": ["/models/basic_classifier.py", "/models/svd_densenet.py"], "/utils/__init__.py": ["/utils/load_object.py"], "/train.py": ["/config.py", "/models/__init__.py", "/utils/existed_checkpoint.py", "/datasets/__init__.py", "/utils/__init__.py"], "/datasets/chest_xray.py": ["/datasets/basic_dataset.py"], "/datasets/covid_aid.py": ["/datasets/basic_dataset.py"], "/datasets/__init__.py": ["/datasets/coronahack.py", "/datasets/chest_xray.py", "/datasets/join_datasets.py", "/datasets/covid_chest_xray.py", "/datasets/covid_aid.py"], "/shared_train.py": ["/models/__init__.py", "/utils/existed_checkpoint.py", "/datasets/__init__.py", "/utils/__init__.py"], "/datasets/covid_chest_xray.py": ["/datasets/basic_dataset.py"], "/datasets/coronahack.py": ["/datasets/basic_dataset.py"]}
|
33,552,792
|
nvvaulin/medical_imaging
|
refs/heads/master
|
/datasets/basic_dataset.py
|
from torchvision import datasets
import numpy as np
import os
import cv2
import shutil
from tqdm import tqdm
class BasicDataset(datasets.VisionDataset):
def __init__(self,root,samples,named_labels,transforms=None, transform=None,target_transform=None,reduce_size=None,multilabel=False):
self.samples = samples
if multilabel:
self.label_names, self.labels = self._parse_multilabel(named_labels)
else:
self.label_names, self.labels = self._parse_one_hot_labels(named_labels)
self.loader = datasets.folder.default_loader
if not (reduce_size is None):
reduce_size = tuple(reduce_size)
out_prefix = os.path.join(root,'resized_%d_%d'%reduce_size)
if not os.path.exists(os.path.join(out_prefix,self.samples[-1])):
self._reduce_size(root,reduce_size)
root = out_prefix
super().__init__(root,transforms, transform,target_transform)
def _parse_one_hot_labels(self,named_labels):
return np.unique(named_labels,return_inverse=True)
def _parse_multilabel(self,named_labels):
named_labels = [np.array(i) for i in named_labels]
label_names = np.sort(np.unique(np.concatenate(named_labels)))
labels = []
for i,nl in enumerate(named_labels):
l = np.zeros(len(label_names),dtype=np.float32)
l[np.searchsorted(label_names,nl)] = 1.
labels.append(l)
return label_names,np.array(labels)
def __getitem__(self, item):
sample = os.path.join(self.root,self.samples[item])
sample = self.loader(sample)
label = self.labels[item]
if self.transform is not None:
sample = self.transform(sample)
if self.target_transform is not None:
label = self.target_transform(label)
return sample, label
@property
def num_classes(self):
return len(self.label_names)
def __len__(self):
return len(self.labels)
def _reduce_size(self,root,imsize):
print('reduce size of dataset')
out_prefix = os.path.join(root,'resized_%d_%d'%imsize)
for i in tqdm(self.samples):
src = os.path.join(root,i)
dst = os.path.join(out_prefix,i)
if not os.path.exists(os.path.dirname(dst)):
os.makedirs(os.path.dirname(dst))
im = cv2.imread(src)
if im.shape[0]>imsize[1] or im.shape[1]>imsize[0]:
im = cv2.resize(im,imsize)
cv2.imwrite(dst,im)
else:
shutil.copy(src,dst)
|
{"/models/__init__.py": ["/models/basic_classifier.py", "/models/svd_densenet.py"], "/utils/__init__.py": ["/utils/load_object.py"], "/train.py": ["/config.py", "/models/__init__.py", "/utils/existed_checkpoint.py", "/datasets/__init__.py", "/utils/__init__.py"], "/datasets/chest_xray.py": ["/datasets/basic_dataset.py"], "/datasets/covid_aid.py": ["/datasets/basic_dataset.py"], "/datasets/__init__.py": ["/datasets/coronahack.py", "/datasets/chest_xray.py", "/datasets/join_datasets.py", "/datasets/covid_chest_xray.py", "/datasets/covid_aid.py"], "/shared_train.py": ["/models/__init__.py", "/utils/existed_checkpoint.py", "/datasets/__init__.py", "/utils/__init__.py"], "/datasets/covid_chest_xray.py": ["/datasets/basic_dataset.py"], "/datasets/coronahack.py": ["/datasets/basic_dataset.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.