after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def __init__(self, ds, *args, **kwargs):
self._vector_fields = dict(self._vector_fields)
self._fields = ds._field_spec
self._ptypes = ds._ptype_spec
self.data_files = set([])
gformat, endianswap = ds._header.gadget_format
# gadget format 1 original, 2 with block name
self._format = gformat
self._endian = endianswap
super(IOHandlerGadgetBinary, self).__init__(ds, *args, **kwargs)
|
def __init__(self, ds, *args, **kwargs):
self._vector_fields = dict(self._vector_fields)
self._fields = ds._field_spec
self._ptypes = ds._ptype_spec
self.data_files = set([])
gformat = _get_gadget_format(ds.parameter_filename, ds._header_size)
# gadget format 1 original, 2 with block name
self._format = gformat[0]
self._endian = gformat[1]
super(IOHandlerGadgetBinary, self).__init__(ds, *args, **kwargs)
|
https://github.com/yt-project/yt/issues/1846
|
Traceback (most recent call last):
File "./plot_solution.py", line 519, in <module>
pf.add_field(("gas", "density_squared"), function=_density_squared, units="g**2/cm**6")
File "/usr/local/lib/python2.7/dist-packages/yt/data_objects/static_output.py", line 1191, in add_field
self.index
File "/usr/local/lib/python2.7/dist-packages/yt/data_objects/static_output.py", line 504, in index
self, dataset_type=self.dataset_type)
File "/usr/local/lib/python2.7/dist-packages/yt/geometry/particle_geometry_handler.py", line 39, in __init__
super(ParticleIndex, self).__init__(ds, dataset_type)
File "/usr/local/lib/python2.7/dist-packages/yt/geometry/geometry_handler.py", line 50, in __init__
self._setup_geometry()
File "/usr/local/lib/python2.7/dist-packages/yt/geometry/particle_geometry_handler.py", line 50, in _setup_geometry
self._initialize_particle_handler()
File "/usr/local/lib/python2.7/dist-packages/yt/geometry/particle_geometry_handler.py", line 104, in _initialize_particle_handler
self._initialize_indices()
File "/usr/local/lib/python2.7/dist-packages/yt/geometry/particle_geometry_handler.py", line 134, in _initialize_indices
self.io._initialize_index(data_file, self.regions)
File "/usr/local/lib/python2.7/dist-packages/yt/frontends/gadget/io.py", line 351, in _initialize_index
data_file, count, 0, regions, DLE, DRE)
File "/usr/local/lib/python2.7/dist-packages/yt/frontends/gadget/io.py", line 334, in _get_morton_from_position
pp = np.fromfile(f, dtype=self._endian + self._float_type,
TypeError: cannot concatenate 'str' and 'int' objects
|
TypeError
|
def _initialize_index(self, data_file, regions):
DLE = data_file.ds.domain_left_edge
DRE = data_file.ds.domain_right_edge
self._float_type = data_file.ds._header.float_type
if self.index_ptype == "all":
count = sum(data_file.total_particles.values())
return self._get_morton_from_position(data_file, count, 0, regions, DLE, DRE)
else:
idpos = self._ptypes.index(self.index_ptype)
count = data_file.total_particles.get(self.index_ptype)
account = [0] + [data_file.total_particles.get(ptype) for ptype in self._ptypes]
account = np.cumsum(account)
return self._get_morton_from_position(
data_file, account, account[idpos], regions, DLE, DRE
)
|
def _initialize_index(self, data_file, regions):
DLE = data_file.ds.domain_left_edge
DRE = data_file.ds.domain_right_edge
self._float_type = data_file.ds._validate_header(
data_file.filename, data_file.ds._header_size
)[1]
if self.index_ptype == "all":
count = sum(data_file.total_particles.values())
return self._get_morton_from_position(data_file, count, 0, regions, DLE, DRE)
else:
idpos = self._ptypes.index(self.index_ptype)
count = data_file.total_particles.get(self.index_ptype)
account = [0] + [data_file.total_particles.get(ptype) for ptype in self._ptypes]
account = np.cumsum(account)
return self._get_morton_from_position(
data_file, account, account[idpos], regions, DLE, DRE
)
|
https://github.com/yt-project/yt/issues/1846
|
Traceback (most recent call last):
File "./plot_solution.py", line 519, in <module>
pf.add_field(("gas", "density_squared"), function=_density_squared, units="g**2/cm**6")
File "/usr/local/lib/python2.7/dist-packages/yt/data_objects/static_output.py", line 1191, in add_field
self.index
File "/usr/local/lib/python2.7/dist-packages/yt/data_objects/static_output.py", line 504, in index
self, dataset_type=self.dataset_type)
File "/usr/local/lib/python2.7/dist-packages/yt/geometry/particle_geometry_handler.py", line 39, in __init__
super(ParticleIndex, self).__init__(ds, dataset_type)
File "/usr/local/lib/python2.7/dist-packages/yt/geometry/geometry_handler.py", line 50, in __init__
self._setup_geometry()
File "/usr/local/lib/python2.7/dist-packages/yt/geometry/particle_geometry_handler.py", line 50, in _setup_geometry
self._initialize_particle_handler()
File "/usr/local/lib/python2.7/dist-packages/yt/geometry/particle_geometry_handler.py", line 104, in _initialize_particle_handler
self._initialize_indices()
File "/usr/local/lib/python2.7/dist-packages/yt/geometry/particle_geometry_handler.py", line 134, in _initialize_indices
self.io._initialize_index(data_file, self.regions)
File "/usr/local/lib/python2.7/dist-packages/yt/frontends/gadget/io.py", line 351, in _initialize_index
data_file, count, 0, regions, DLE, DRE)
File "/usr/local/lib/python2.7/dist-packages/yt/frontends/gadget/io.py", line 334, in _get_morton_from_position
pp = np.fromfile(f, dtype=self._endian + self._float_type,
TypeError: cannot concatenate 'str' and 'int' objects
|
TypeError
|
def __init__(self, outputs, indices, fields=None, suppress_logging=False):
indices.sort() # Just in case the caller wasn't careful
self.field_data = YTFieldData()
self.data_series = outputs
self.masks = []
self.sorts = []
self.array_indices = []
self.indices = indices
self.num_indices = len(indices)
self.num_steps = len(outputs)
self.times = []
self.suppress_logging = suppress_logging
if fields is None:
fields = []
fields = list(OrderedDict.fromkeys(fields))
if self.suppress_logging:
old_level = int(ytcfg.get("yt", "loglevel"))
mylog.setLevel(40)
fds = {}
ds_first = self.data_series[0]
dd_first = ds_first.all_data()
idx_field = dd_first._determine_fields("particle_index")[0]
for field in ("particle_position_%s" % ax for ax in "xyz"):
fds[field] = dd_first._determine_fields(field)[0]
my_storage = {}
pbar = get_pbar("Constructing trajectory information", len(self.data_series))
for i, (sto, ds) in enumerate(self.data_series.piter(storage=my_storage)):
dd = ds.all_data()
newtags = dd[idx_field].d.astype("int64")
mask = np.in1d(newtags, indices, assume_unique=True)
sort = np.argsort(newtags[mask])
array_indices = np.where(np.in1d(indices, newtags, assume_unique=True))[0]
self.array_indices.append(array_indices)
self.masks.append(mask)
self.sorts.append(sort)
pfields = {}
for field in ("particle_position_%s" % ax for ax in "xyz"):
pfields[field] = dd[fds[field]].ndarray_view()[mask][sort]
sto.result_id = ds.parameter_filename
sto.result = (ds.current_time, array_indices, pfields)
pbar.update(i)
pbar.finish()
if self.suppress_logging:
mylog.setLevel(old_level)
times = []
for fn, (time, indices, pfields) in sorted(my_storage.items()):
times.append(time)
self.times = self.data_series[0].arr([time for time in times], times[0].units)
self.particle_fields = []
output_field = np.empty((self.num_indices, self.num_steps))
output_field.fill(np.nan)
for field in ("particle_position_%s" % ax for ax in "xyz"):
for i, (fn, (time, indices, pfields)) in enumerate(sorted(my_storage.items())):
try:
# This will fail if particles ids are
# duplicate. This is due to the fact that the rhs
# would then have a different shape as the lhs
output_field[indices, i] = pfields[field]
except ValueError:
raise YTIllDefinedParticleData(
"This dataset contains duplicate particle indices!"
)
self.field_data[field] = array_like_field(
dd_first, output_field.copy(), fds[field]
)
self.particle_fields.append(field)
# Instantiate fields the caller requested
self._get_data(fields)
|
def __init__(self, outputs, indices, fields=None, suppress_logging=False):
indices.sort() # Just in case the caller wasn't careful
self.field_data = YTFieldData()
self.data_series = outputs
self.masks = []
self.sorts = []
self.array_indices = []
self.indices = indices
self.num_indices = len(indices)
self.num_steps = len(outputs)
self.times = []
self.suppress_logging = suppress_logging
if fields is None:
fields = []
fields = list(OrderedDict.fromkeys(fields))
if self.suppress_logging:
old_level = int(ytcfg.get("yt", "loglevel"))
mylog.setLevel(40)
fds = {}
ds_first = self.data_series[0]
dd_first = ds_first.all_data()
idx_field = dd_first._determine_fields("particle_index")[0]
for field in ("particle_position_%s" % ax for ax in "xyz"):
fds[field] = dd_first._determine_fields(field)[0]
my_storage = {}
pbar = get_pbar("Constructing trajectory information", len(self.data_series))
for i, (sto, ds) in enumerate(self.data_series.piter(storage=my_storage)):
dd = ds.all_data()
newtags = dd[idx_field].d.astype("int64")
mask = np.in1d(newtags, indices, assume_unique=True)
sort = np.argsort(newtags[mask])
array_indices = np.where(np.in1d(indices, newtags, assume_unique=True))[0]
self.array_indices.append(array_indices)
self.masks.append(mask)
self.sorts.append(sort)
pfields = {}
for field in ("particle_position_%s" % ax for ax in "xyz"):
pfields[field] = dd[fds[field]].ndarray_view()[mask][sort]
sto.result_id = ds.parameter_filename
sto.result = (ds.current_time, array_indices, pfields)
pbar.update(i)
pbar.finish()
if self.suppress_logging:
mylog.setLevel(old_level)
times = []
for fn, (time, indices, pfields) in sorted(my_storage.items()):
times.append(time)
self.times = self.data_series[0].arr([time for time in times], times[0].units)
self.particle_fields = []
output_field = np.empty((self.num_indices, self.num_steps))
output_field.fill(np.nan)
for field in ("particle_position_%s" % ax for ax in "xyz"):
for i, (fn, (time, indices, pfields)) in enumerate(sorted(my_storage.items())):
output_field[indices, i] = pfields[field]
self.field_data[field] = array_like_field(
dd_first, output_field.copy(), fds[field]
)
self.particle_fields.append(field)
# Instantiate fields the caller requested
self._get_data(fields)
|
https://github.com/yt-project/yt/issues/1541
|
ValueError Traceback (most recent call last)
~/Documents/These/ramses_tests/MC_tracers/test_feedback/plot.py in <module>()
40 dss = yt.DatasetSeries(outputs[:5], **load_kwa)
41
---> 42 trajs = dss.particle_trajectories(indices)
~/Documents/prog/yt/yt/data_objects/time_series.py in particle_trajectories(self, indices, fields, suppress_logging)
435 >>> print t["particle_velocity_x"].max(), t["particle_velocity_x"].min()
436 """
--> 437 return ParticleTrajectories(self, indices, fields=fields, suppress_logging=suppress_logging)
438
439 class TimeSeriesQuantitiesContainer(object):
~/Documents/prog/yt/yt/data_objects/particle_trajectories.py in __init__(self, outputs, indices, fields, suppress_logging)
121 for field in ("particle_position_%s" % ax for ax in "xyz"):
122 for i, (fn, (time, indices, pfields)) in enumerate(sorted(my_storage.items())):
--> 123 output_field[indices, i] = pfields[field]
124 self.field_data[field] = array_like_field(
125 dd_first, output_field.copy(), fds[field])
ValueError: shape mismatch: value array of shape (10096,) could not be broadcast to indexing result of shape (96,)
|
ValueError
|
def _get_particle_type_counts(self):
"""Reads the active number of particles for every species.
Returns
-------
dict
keys are ptypes
values are integer counts of the ptype
"""
result = {}
f = self.dataset._handle
bp = self.dataset.base_path
pp = self.dataset.particles_path
try:
for ptype in self.ds.particle_types_raw:
if str(ptype) == "io":
spec = list(f[bp + pp].keys())[0]
else:
spec = ptype
axis = list(f[bp + pp + "/" + spec + "/position"].keys())[0]
pos = f[bp + pp + "/" + spec + "/position/" + axis]
if is_const_component(pos):
result[ptype] = pos.attrs["shape"]
else:
result[ptype] = pos.len()
except KeyError:
result["io"] = 0
return result
|
def _get_particle_type_counts(self):
"""Reads the active number of particles for every species.
Returns
-------
dict
keys are ptypes
values are integer counts of the ptype
"""
result = {}
f = self.dataset._handle
bp = self.dataset.base_path
pp = self.dataset.particles_path
for ptype in self.ds.particle_types_raw:
if str(ptype) == "io":
spec = list(f[bp + pp].keys())[0]
else:
spec = ptype
axis = list(f[bp + pp + "/" + spec + "/position"].keys())[0]
pos = f[bp + pp + "/" + spec + "/position/" + axis]
if is_const_component(pos):
result[ptype] = pos.attrs["shape"]
else:
result[ptype] = pos.len()
return result
|
https://github.com/yt-project/yt/issues/1626
|
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-6-606762cc9ee1> in <module>()
----> 1 yt.load('./example-ptcl/hdf5/data00000400.h5')
~/miniconda3/lib/python3.6/site-packages/yt/convenience.py in load(*args, **kwargs)
84 candidates = find_lowest_subclasses(candidates)
85 if len(candidates) == 1:
---> 86 return candidates[0](*args, **kwargs)
87 if len(candidates) == 0:
88 if ytcfg.get("yt", "enzo_db") != '' \
~/miniconda3/lib/python3.6/site-packages/yt/frontends/open_pmd/data_structures.py in __init__(self, filename, dataset_type, storage_filename, units_override, unit_system, **kwargs)
385 self.storage_filename = storage_filename
386 self.fluid_types += ("openPMD",)
--> 387 particles = tuple(str(c) for c in self._handle[self.base_path + self.particles_path].keys())
388 if len(particles) > 1:
389 # Only use on-disk particle names if there is more than one species
~/miniconda3/lib/python3.6/site-packages/yt/utilities/file_handler.py in __getitem__(self, key)
43
44 def __getitem__(self, key):
---> 45 return self.handle[key]
46
47 def __contains__(self, item):
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2846)()
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2804)()
~/miniconda3/lib/python3.6/site-packages/h5py/_hl/group.py in __getitem__(self, name)
167 raise ValueError("Invalid HDF5 object reference")
168 else:
--> 169 oid = h5o.open(self.id, self._e(name), lapl=self._lapl)
170
171 otype = h5i.get_type(oid)
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2846)()
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2804)()
h5py/h5o.pyx in h5py.h5o.open (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/h5o.c:3740)()
KeyError: "Unable to open object (Object 'particles' doesn't exist)"
|
KeyError
|
def _detect_output_fields(self):
"""Populates ``self.field_list`` with native fields (mesh and particle) on disk.
Each entry is a tuple of two strings. The first element is the on-disk fluid type or particle type.
The second element is the name of the field in yt. This string is later used for accessing the data.
Convention suggests that the on-disk fluid type should be "openPMD",
the on-disk particle type (for a single species of particles) is "io"
or (for multiple species of particles) the particle name on-disk.
"""
f = self.dataset._handle
bp = self.dataset.base_path
mp = self.dataset.meshes_path
pp = self.dataset.particles_path
mesh_fields = []
try:
meshes = f[bp + mp]
for mname in meshes.keys():
try:
mesh = meshes[mname]
for axis in mesh.keys():
mesh_fields.append(mname.replace("_", "-") + "_" + axis)
except AttributeError:
# This is a h5.Dataset (i.e. no axes)
mesh_fields.append(mname.replace("_", "-"))
except KeyError:
pass
self.field_list = [("openPMD", str(field)) for field in mesh_fields]
particle_fields = []
try:
particles = f[bp + pp]
for pname in particles.keys():
species = particles[pname]
for recname in species.keys():
record = species[recname]
if is_const_component(record):
# Record itself (e.g. particle_mass) is constant
particle_fields.append(
pname.replace("_", "-") + "_" + recname.replace("_", "-")
)
elif "particlePatches" not in recname:
try:
# Create a field for every axis (x,y,z) of every property (position)
# of every species (electrons)
axes = list(record.keys())
if str(recname) == "position":
recname = "positionCoarse"
for axis in axes:
particle_fields.append(
pname.replace("_", "-")
+ "_"
+ recname.replace("_", "-")
+ "_"
+ axis
)
except AttributeError:
# Record is a dataset, does not have axes (e.g. weighting)
particle_fields.append(
pname.replace("_", "-") + "_" + recname.replace("_", "-")
)
pass
else:
pass
if len(list(particles.keys())) > 1:
# There is more than one particle species, use the specific names as field types
self.field_list.extend(
[
(
str(field).split("_")[0],
("particle_" + "_".join(str(field).split("_")[1:])),
)
for field in particle_fields
]
)
else:
# Only one particle species, fall back to "io"
self.field_list.extend(
[
("io", ("particle_" + "_".join(str(field).split("_")[1:])))
for field in particle_fields
]
)
except KeyError:
pass
|
def _detect_output_fields(self):
"""Populates ``self.field_list`` with native fields (mesh and particle) on disk.
Each entry is a tuple of two strings. The first element is the on-disk fluid type or particle type.
The second element is the name of the field in yt. This string is later used for accessing the data.
Convention suggests that the on-disk fluid type should be "openPMD",
the on-disk particle type (for a single species of particles) is "io"
or (for multiple species of particles) the particle name on-disk.
"""
f = self.dataset._handle
bp = self.dataset.base_path
mp = self.dataset.meshes_path
pp = self.dataset.particles_path
mesh_fields = []
try:
for field in f[bp + mp].keys():
try:
for axis in f[bp + mp + field].keys():
mesh_fields.append(field.replace("_", "-") + "_" + axis)
except AttributeError:
# This is a h5.Dataset (i.e. no axes)
mesh_fields.append(field.replace("_", "-"))
except KeyError:
# There are no mesh fields
pass
self.field_list = [("openPMD", str(field)) for field in mesh_fields]
particle_fields = []
try:
for species in f[bp + pp].keys():
for record in f[bp + pp + species].keys():
if is_const_component(f[bp + pp + species + "/" + record]):
# Record itself (e.g. particle_mass) is constant
particle_fields.append(
species.replace("_", "-") + "_" + record.replace("_", "-")
)
elif "particlePatches" not in record:
try:
# Create a field for every axis (x,y,z) of every property (position)
# of every species (electrons)
axes = list(f[bp + pp + species + "/" + record].keys())
if str(record) == "position":
record = "positionCoarse"
for axis in axes:
particle_fields.append(
species.replace("_", "-")
+ "_"
+ record.replace("_", "-")
+ "_"
+ axis
)
except AttributeError:
# Record is a dataset, does not have axes (e.g. weighting)
particle_fields.append(
species.replace("_", "-") + "_" + record.replace("_", "-")
)
pass
else:
pass
if len(list(f[bp + pp].keys())) > 1:
# There is more than one particle species, use the specific names as field types
self.field_list.extend(
[
(
str(field).split("_")[0],
("particle_" + "_".join(str(field).split("_")[1:])),
)
for field in particle_fields
]
)
else:
# Only one particle species, fall back to "io"
self.field_list.extend(
[
("io", ("particle_" + "_".join(str(field).split("_")[1:])))
for field in particle_fields
]
)
except KeyError:
# There are no particle fields
pass
|
https://github.com/yt-project/yt/issues/1626
|
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-6-606762cc9ee1> in <module>()
----> 1 yt.load('./example-ptcl/hdf5/data00000400.h5')
~/miniconda3/lib/python3.6/site-packages/yt/convenience.py in load(*args, **kwargs)
84 candidates = find_lowest_subclasses(candidates)
85 if len(candidates) == 1:
---> 86 return candidates[0](*args, **kwargs)
87 if len(candidates) == 0:
88 if ytcfg.get("yt", "enzo_db") != '' \
~/miniconda3/lib/python3.6/site-packages/yt/frontends/open_pmd/data_structures.py in __init__(self, filename, dataset_type, storage_filename, units_override, unit_system, **kwargs)
385 self.storage_filename = storage_filename
386 self.fluid_types += ("openPMD",)
--> 387 particles = tuple(str(c) for c in self._handle[self.base_path + self.particles_path].keys())
388 if len(particles) > 1:
389 # Only use on-disk particle names if there is more than one species
~/miniconda3/lib/python3.6/site-packages/yt/utilities/file_handler.py in __getitem__(self, key)
43
44 def __getitem__(self, key):
---> 45 return self.handle[key]
46
47 def __contains__(self, item):
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2846)()
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2804)()
~/miniconda3/lib/python3.6/site-packages/h5py/_hl/group.py in __getitem__(self, name)
167 raise ValueError("Invalid HDF5 object reference")
168 else:
--> 169 oid = h5o.open(self.id, self._e(name), lapl=self._lapl)
170
171 otype = h5i.get_type(oid)
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2846)()
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2804)()
h5py/h5o.pyx in h5py.h5o.open (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/h5o.c:3740)()
KeyError: "Unable to open object (Object 'particles' doesn't exist)"
|
KeyError
|
def _count_grids(self):
"""Sets ``self.num_grids`` to be the total number of grids in the simulation.
The number of grids is determined by their respective memory footprint.
"""
f = self.dataset._handle
bp = self.dataset.base_path
mp = self.dataset.meshes_path
pp = self.dataset.particles_path
self.meshshapes = {}
self.numparts = {}
self.num_grids = 0
try:
meshes = f[bp + mp]
for mname in meshes.keys():
mesh = meshes[mname]
if type(mesh) is h5.Group:
shape = mesh[list(mesh.keys())[0]].shape
else:
shape = mesh.shape
spacing = tuple(mesh.attrs["gridSpacing"])
offset = tuple(mesh.attrs["gridGlobalOffset"])
unit_si = mesh.attrs["gridUnitSI"]
self.meshshapes[mname] = (shape, spacing, offset, unit_si)
except KeyError:
pass
try:
particles = f[bp + pp]
for pname in particles.keys():
species = particles[pname]
if "particlePatches" in species.keys():
for patch, size in enumerate(species["/particlePatches/numParticles"]):
self.numparts[pname + "#" + str(patch)] = size
else:
axis = list(species["/position"].keys())[0]
if is_const_component(species["/position/" + axis]):
self.numparts[pname] = species["/position/" + axis].attrs["shape"]
else:
self.numparts[pname] = species["/position/" + axis].len()
except KeyError:
pass
# Limit values per grid by resulting memory footprint
self.vpg = int(self.dataset.gridsize / 4) # 4Byte per value (f32)
# Meshes of the same size do not need separate chunks
for shape, spacing, offset, unit_si in set(self.meshshapes.values()):
self.num_grids += min(shape[0], int(np.ceil(reduce(mul, shape) * self.vpg**-1)))
# Same goes for particle chunks if they are not inside particlePatches
patches = {}
no_patches = {}
for k, v in self.numparts.items():
if "#" in k:
patches[k] = v
else:
no_patches[k] = v
for size in set(no_patches.values()):
self.num_grids += int(np.ceil(size * self.vpg**-1))
for size in patches.values():
self.num_grids += int(np.ceil(size * self.vpg**-1))
|
def _count_grids(self):
"""Sets ``self.num_grids`` to be the total number of grids in the simulation.
The number of grids is determined by their respective memory footprint.
"""
f = self.dataset._handle
bp = self.dataset.base_path
mp = self.dataset.meshes_path
pp = self.dataset.particles_path
self.meshshapes = {}
self.numparts = {}
self.num_grids = 0
for mesh in f[bp + mp].keys():
if type(f[bp + mp + mesh]) is h5.Group:
shape = f[bp + mp + mesh + "/" + list(f[bp + mp + mesh].keys())[0]].shape
else:
shape = f[bp + mp + mesh].shape
spacing = tuple(f[bp + mp + mesh].attrs["gridSpacing"])
offset = tuple(f[bp + mp + mesh].attrs["gridGlobalOffset"])
unit_si = f[bp + mp + mesh].attrs["gridUnitSI"]
self.meshshapes[mesh] = (shape, spacing, offset, unit_si)
for species in f[bp + pp].keys():
if "particlePatches" in f[bp + pp + "/" + species].keys():
for patch, size in enumerate(
f[bp + pp + "/" + species + "/particlePatches/numParticles"]
):
self.numparts[species + "#" + str(patch)] = size
else:
axis = list(f[bp + pp + species + "/position"].keys())[0]
if is_const_component(f[bp + pp + species + "/position/" + axis]):
self.numparts[species] = f[
bp + pp + species + "/position/" + axis
].attrs["shape"]
else:
self.numparts[species] = f[
bp + pp + species + "/position/" + axis
].len()
# Limit values per grid by resulting memory footprint
self.vpg = int(self.dataset.gridsize / 4) # 4Byte per value (f32)
# Meshes of the same size do not need separate chunks
for shape, spacing, offset, unit_si in set(self.meshshapes.values()):
self.num_grids += min(shape[0], int(np.ceil(reduce(mul, shape) * self.vpg**-1)))
# Same goes for particle chunks if they are not inside particlePatches
patches = {}
no_patches = {}
for k, v in self.numparts.items():
if "#" in k:
patches[k] = v
else:
no_patches[k] = v
for size in set(no_patches.values()):
self.num_grids += int(np.ceil(size * self.vpg**-1))
for size in patches.values():
self.num_grids += int(np.ceil(size * self.vpg**-1))
|
https://github.com/yt-project/yt/issues/1626
|
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-6-606762cc9ee1> in <module>()
----> 1 yt.load('./example-ptcl/hdf5/data00000400.h5')
~/miniconda3/lib/python3.6/site-packages/yt/convenience.py in load(*args, **kwargs)
84 candidates = find_lowest_subclasses(candidates)
85 if len(candidates) == 1:
---> 86 return candidates[0](*args, **kwargs)
87 if len(candidates) == 0:
88 if ytcfg.get("yt", "enzo_db") != '' \
~/miniconda3/lib/python3.6/site-packages/yt/frontends/open_pmd/data_structures.py in __init__(self, filename, dataset_type, storage_filename, units_override, unit_system, **kwargs)
385 self.storage_filename = storage_filename
386 self.fluid_types += ("openPMD",)
--> 387 particles = tuple(str(c) for c in self._handle[self.base_path + self.particles_path].keys())
388 if len(particles) > 1:
389 # Only use on-disk particle names if there is more than one species
~/miniconda3/lib/python3.6/site-packages/yt/utilities/file_handler.py in __getitem__(self, key)
43
44 def __getitem__(self, key):
---> 45 return self.handle[key]
46
47 def __contains__(self, item):
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2846)()
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2804)()
~/miniconda3/lib/python3.6/site-packages/h5py/_hl/group.py in __getitem__(self, name)
167 raise ValueError("Invalid HDF5 object reference")
168 else:
--> 169 oid = h5o.open(self.id, self._e(name), lapl=self._lapl)
170
171 otype = h5i.get_type(oid)
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2846)()
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2804)()
h5py/h5o.pyx in h5py.h5o.open (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/h5o.c:3740)()
KeyError: "Unable to open object (Object 'particles' doesn't exist)"
|
KeyError
|
def _parse_index(self):
"""Fills each grid with appropriate properties (extent, dimensions, ...)
This calculates the properties of every OpenPMDGrid based on the total number of grids in the simulation.
The domain is divided into ``self.num_grids`` (roughly) equally sized chunks along the x-axis.
``grid_levels`` is always equal to 0 since we only have one level of refinement in openPMD.
Notes
-----
``self.grid_dimensions`` is rounded to the nearest integer. Grid edges are calculated from this dimension.
Grids with dimensions [0, 0, 0] are particle only. The others do not have any particles affiliated with them.
"""
f = self.dataset._handle
bp = self.dataset.base_path
pp = self.dataset.particles_path
self.grid_levels.flat[:] = 0
self.grids = np.empty(self.num_grids, dtype="object")
grid_index_total = 0
# Mesh grids
for mesh in set(self.meshshapes.values()):
(shape, spacing, offset, unit_si) = mesh
shape = np.asarray(shape)
spacing = np.asarray(spacing)
offset = np.asarray(offset)
# Total dimension of this grid
domain_dimension = np.asarray(shape, dtype=np.int32)
domain_dimension = np.append(
domain_dimension, np.ones(3 - len(domain_dimension))
)
# Number of grids of this shape
num_grids = min(shape[0], int(np.ceil(reduce(mul, shape) * self.vpg**-1)))
gle = offset * unit_si # self.dataset.domain_left_edge
gre = (
domain_dimension[: spacing.size] * unit_si * spacing + gle
) # self.dataset.domain_right_edge
gle = np.append(gle, np.zeros(3 - len(gle)))
gre = np.append(gre, np.ones(3 - len(gre)))
grid_dim_offset = np.linspace(
0, domain_dimension[0], num_grids + 1, dtype=np.int32
)
grid_edge_offset = (
grid_dim_offset * np.float(domain_dimension[0]) ** -1 * (gre[0] - gle[0])
+ gle[0]
)
mesh_names = []
for mname, mdata in self.meshshapes.items():
if mesh == mdata:
mesh_names.append(str(mname))
prev = 0
for grid in np.arange(num_grids):
self.grid_dimensions[grid_index_total] = domain_dimension
self.grid_dimensions[grid_index_total][0] = (
grid_dim_offset[grid + 1] - grid_dim_offset[grid]
)
self.grid_left_edge[grid_index_total] = gle
self.grid_left_edge[grid_index_total][0] = grid_edge_offset[grid]
self.grid_right_edge[grid_index_total] = gre
self.grid_right_edge[grid_index_total][0] = grid_edge_offset[grid + 1]
self.grid_particle_count[grid_index_total] = 0
self.grids[grid_index_total] = self.grid(
grid_index_total,
self,
0,
fi=prev,
fo=self.grid_dimensions[grid_index_total][0],
ft=mesh_names,
)
prev += self.grid_dimensions[grid_index_total][0]
grid_index_total += 1
handled_ptypes = []
# Particle grids
for species, count in self.numparts.items():
if "#" in species:
# This is a particlePatch
spec = species.split("#")
patch = f[bp + pp + "/" + spec[0] + "/particlePatches"]
domain_dimension = np.ones(3, dtype=np.int32)
for ind, axis in enumerate(list(patch["extent"].keys())):
domain_dimension[ind] = patch["extent/" + axis].value[int(spec[1])]
num_grids = int(np.ceil(count * self.vpg**-1))
gle = []
for axis in patch["offset"].keys():
gle.append(get_component(patch, "offset/" + axis, int(spec[1]), 1)[0])
gle = np.asarray(gle)
gle = np.append(gle, np.zeros(3 - len(gle)))
gre = []
for axis in patch["extent"].keys():
gre.append(get_component(patch, "extent/" + axis, int(spec[1]), 1)[0])
gre = np.asarray(gre)
gre = np.append(gre, np.ones(3 - len(gre)))
np.add(gle, gre, gre)
npo = patch["numParticlesOffset"].value.item(int(spec[1]))
particle_count = np.linspace(
npo, npo + count, num_grids + 1, dtype=np.int32
)
particle_names = [str(spec[0])]
elif str(species) not in handled_ptypes:
domain_dimension = self.dataset.domain_dimensions
num_grids = int(np.ceil(count * self.vpg**-1))
gle = self.dataset.domain_left_edge
gre = self.dataset.domain_right_edge
particle_count = np.linspace(0, count, num_grids + 1, dtype=np.int32)
particle_names = []
for pname, size in self.numparts.items():
if size == count:
# Since this is not part of a particlePatch, we can include multiple same-sized ptypes
particle_names.append(str(pname))
handled_ptypes.append(str(pname))
else:
# A grid with this exact particle count has already been created
continue
for grid in np.arange(num_grids):
self.grid_dimensions[grid_index_total] = domain_dimension
self.grid_left_edge[grid_index_total] = gle
self.grid_right_edge[grid_index_total] = gre
self.grid_particle_count[grid_index_total] = (
particle_count[grid + 1] - particle_count[grid]
) * len(particle_names)
self.grids[grid_index_total] = self.grid(
grid_index_total,
self,
0,
pi=particle_count[grid],
po=particle_count[grid + 1] - particle_count[grid],
pt=particle_names,
)
grid_index_total += 1
|
def _parse_index(self):
"""Fills each grid with appropriate properties (extent, dimensions, ...)
This calculates the properties of every OpenPMDGrid based on the total number of grids in the simulation.
The domain is divided into ``self.num_grids`` (roughly) equally sized chunks along the x-axis.
``grid_levels`` is always equal to 0 since we only have one level of refinement in openPMD.
Notes
-----
``self.grid_dimensions`` is rounded to the nearest integer. Grid edges are calculated from this dimension.
Grids with dimensions [0, 0, 0] are particle only. The others do not have any particles affiliated with them.
"""
f = self.dataset._handle
bp = self.dataset.base_path
pp = self.dataset.particles_path
self.grid_levels.flat[:] = 0
self.grids = np.empty(self.num_grids, dtype="object")
grid_index_total = 0
# Mesh grids
for mesh in set(self.meshshapes.values()):
(shape, spacing, offset, unit_si) = mesh
shape = np.asarray(shape)
spacing = np.asarray(spacing)
offset = np.asarray(offset)
# Total dimension of this grid
domain_dimension = np.asarray(shape, dtype=np.int32)
domain_dimension = np.append(
domain_dimension, np.ones(3 - len(domain_dimension))
)
# Number of grids of this shape
num_grids = min(shape[0], int(np.ceil(reduce(mul, shape) * self.vpg**-1)))
gle = offset * unit_si # self.dataset.domain_left_edge
gre = (
domain_dimension[: spacing.size] * unit_si * spacing + gle
) # self.dataset.domain_right_edge
gle = np.append(gle, np.zeros(3 - len(gle)))
gre = np.append(gre, np.ones(3 - len(gre)))
grid_dim_offset = np.linspace(
0, domain_dimension[0], num_grids + 1, dtype=np.int32
)
grid_edge_offset = (
grid_dim_offset * np.float(domain_dimension[0]) ** -1 * (gre[0] - gle[0])
+ gle[0]
)
mesh_names = []
for mname, mdata in self.meshshapes.items():
if mesh == mdata:
mesh_names.append(str(mname))
prev = 0
for grid in np.arange(num_grids):
self.grid_dimensions[grid_index_total] = domain_dimension
self.grid_dimensions[grid_index_total][0] = (
grid_dim_offset[grid + 1] - grid_dim_offset[grid]
)
self.grid_left_edge[grid_index_total] = gle
self.grid_left_edge[grid_index_total][0] = grid_edge_offset[grid]
self.grid_right_edge[grid_index_total] = gre
self.grid_right_edge[grid_index_total][0] = grid_edge_offset[grid + 1]
self.grid_particle_count[grid_index_total] = 0
self.grids[grid_index_total] = self.grid(
grid_index_total,
self,
0,
fi=prev,
fo=self.grid_dimensions[grid_index_total][0],
ft=mesh_names,
)
prev += self.grid_dimensions[grid_index_total][0]
grid_index_total += 1
handled_ptypes = []
# Particle grids
for species, count in self.numparts.items():
if "#" in species:
# This is a particlePatch
spec = species.split("#")
patch = f[bp + pp + "/" + spec[0] + "/particlePatches"]
num_grids = int(np.ceil(count * self.vpg**-1))
gle = []
for axis in patch["offset"].keys():
gle.append(get_component(patch, "offset/" + axis, int(spec[1]), 1)[0])
gle = np.asarray(gle)
gle = np.append(gle, np.zeros(3 - len(gle)))
gre = []
for axis in patch["extent"].keys():
gre.append(get_component(patch, "extent/" + axis, int(spec[1]), 1)[0])
gre = np.asarray(gre)
gre = np.append(gre, np.ones(3 - len(gre)))
np.add(gle, gre, gre)
npo = patch["numParticlesOffset"].value.item(int(spec[1]))
particle_count = np.linspace(
npo, npo + count, num_grids + 1, dtype=np.int32
)
particle_names = [str(spec[0])]
elif str(species) not in handled_ptypes:
num_grids = int(np.ceil(count * self.vpg**-1))
gle = self.dataset.domain_left_edge
gre = self.dataset.domain_right_edge
particle_count = np.linspace(0, count, num_grids + 1, dtype=np.int32)
particle_names = []
for pname, size in self.numparts.items():
if size == count:
# Since this is not part of a particlePatch, we can include multiple same-sized ptypes
particle_names.append(str(pname))
handled_ptypes.append(str(pname))
else:
# A grid with this exact particle count has already been created
continue
for grid in np.arange(num_grids):
self.grid_dimensions[grid_index_total] = [
0,
0,
0,
] # Counted as mesh-size, thus no dimensional extent
self.grid_left_edge[grid_index_total] = gle
self.grid_right_edge[grid_index_total] = gre
self.grid_particle_count[grid_index_total] = (
particle_count[grid + 1] - particle_count[grid]
) * len(particle_names)
self.grids[grid_index_total] = self.grid(
grid_index_total,
self,
0,
pi=particle_count[grid],
po=particle_count[grid + 1] - particle_count[grid],
pt=particle_names,
)
grid_index_total += 1
|
https://github.com/yt-project/yt/issues/1626
|
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-6-606762cc9ee1> in <module>()
----> 1 yt.load('./example-ptcl/hdf5/data00000400.h5')
~/miniconda3/lib/python3.6/site-packages/yt/convenience.py in load(*args, **kwargs)
84 candidates = find_lowest_subclasses(candidates)
85 if len(candidates) == 1:
---> 86 return candidates[0](*args, **kwargs)
87 if len(candidates) == 0:
88 if ytcfg.get("yt", "enzo_db") != '' \
~/miniconda3/lib/python3.6/site-packages/yt/frontends/open_pmd/data_structures.py in __init__(self, filename, dataset_type, storage_filename, units_override, unit_system, **kwargs)
385 self.storage_filename = storage_filename
386 self.fluid_types += ("openPMD",)
--> 387 particles = tuple(str(c) for c in self._handle[self.base_path + self.particles_path].keys())
388 if len(particles) > 1:
389 # Only use on-disk particle names if there is more than one species
~/miniconda3/lib/python3.6/site-packages/yt/utilities/file_handler.py in __getitem__(self, key)
43
44 def __getitem__(self, key):
---> 45 return self.handle[key]
46
47 def __contains__(self, item):
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2846)()
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2804)()
~/miniconda3/lib/python3.6/site-packages/h5py/_hl/group.py in __getitem__(self, name)
167 raise ValueError("Invalid HDF5 object reference")
168 else:
--> 169 oid = h5o.open(self.id, self._e(name), lapl=self._lapl)
170
171 otype = h5i.get_type(oid)
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2846)()
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2804)()
h5py/h5o.pyx in h5py.h5o.open (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/h5o.c:3740)()
KeyError: "Unable to open object (Object 'particles' doesn't exist)"
|
KeyError
|
def __init__(
self,
filename,
dataset_type="openPMD",
storage_filename=None,
units_override=None,
unit_system="mks",
**kwargs,
):
self._handle = HDF5FileHandler(filename)
self.gridsize = kwargs.pop("open_pmd_virtual_gridsize", 10**9)
self.standard_version = StrictVersion(self._handle.attrs["openPMD"].decode())
self.iteration = kwargs.pop("iteration", None)
self._set_paths(self._handle, path.dirname(filename), self.iteration)
Dataset.__init__(
self,
filename,
dataset_type,
units_override=units_override,
unit_system=unit_system,
)
self.storage_filename = storage_filename
self.fluid_types += ("openPMD",)
try:
particles = tuple(
str(c) for c in self._handle[self.base_path + self.particles_path].keys()
)
if len(particles) > 1:
# Only use on-disk particle names if there is more than one species
self.particle_types = particles
mylog.debug("self.particle_types: {}".format(self.particle_types))
self.particle_types_raw = self.particle_types
self.particle_types = tuple(self.particle_types)
except KeyError:
pass
|
def __init__(
self,
filename,
dataset_type="openPMD",
storage_filename=None,
units_override=None,
unit_system="mks",
**kwargs,
):
self._handle = HDF5FileHandler(filename)
self.gridsize = kwargs.pop("open_pmd_virtual_gridsize", 10**9)
self._set_paths(self._handle, path.dirname(filename))
Dataset.__init__(
self,
filename,
dataset_type,
units_override=units_override,
unit_system=unit_system,
)
self.storage_filename = storage_filename
self.fluid_types += ("openPMD",)
particles = tuple(
str(c) for c in self._handle[self.base_path + self.particles_path].keys()
)
if len(particles) > 1:
# Only use on-disk particle names if there is more than one species
self.particle_types = particles
mylog.debug("open_pmd - self.particle_types: {}".format(self.particle_types))
self.particle_types_raw = self.particle_types
self.particle_types = tuple(self.particle_types)
|
https://github.com/yt-project/yt/issues/1626
|
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-6-606762cc9ee1> in <module>()
----> 1 yt.load('./example-ptcl/hdf5/data00000400.h5')
~/miniconda3/lib/python3.6/site-packages/yt/convenience.py in load(*args, **kwargs)
84 candidates = find_lowest_subclasses(candidates)
85 if len(candidates) == 1:
---> 86 return candidates[0](*args, **kwargs)
87 if len(candidates) == 0:
88 if ytcfg.get("yt", "enzo_db") != '' \
~/miniconda3/lib/python3.6/site-packages/yt/frontends/open_pmd/data_structures.py in __init__(self, filename, dataset_type, storage_filename, units_override, unit_system, **kwargs)
385 self.storage_filename = storage_filename
386 self.fluid_types += ("openPMD",)
--> 387 particles = tuple(str(c) for c in self._handle[self.base_path + self.particles_path].keys())
388 if len(particles) > 1:
389 # Only use on-disk particle names if there is more than one species
~/miniconda3/lib/python3.6/site-packages/yt/utilities/file_handler.py in __getitem__(self, key)
43
44 def __getitem__(self, key):
---> 45 return self.handle[key]
46
47 def __contains__(self, item):
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2846)()
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2804)()
~/miniconda3/lib/python3.6/site-packages/h5py/_hl/group.py in __getitem__(self, name)
167 raise ValueError("Invalid HDF5 object reference")
168 else:
--> 169 oid = h5o.open(self.id, self._e(name), lapl=self._lapl)
170
171 otype = h5i.get_type(oid)
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2846)()
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2804)()
h5py/h5o.pyx in h5py.h5o.open (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/h5o.c:3740)()
KeyError: "Unable to open object (Object 'particles' doesn't exist)"
|
KeyError
|
def _set_paths(self, handle, path, iteration):
"""Parses relevant hdf5-paths out of ``handle``.
Parameters
----------
handle : h5py.File
path : str
(absolute) filepath for current hdf5 container
"""
iterations = []
if iteration is None:
iteration = list(handle["/data"].keys())[0]
encoding = handle.attrs["iterationEncoding"].decode()
if "groupBased" in encoding:
iterations = list(handle["/data"].keys())
mylog.info("Found {} iterations in file".format(len(iterations)))
elif "fileBased" in encoding:
itformat = handle.attrs["iterationFormat"].decode().split("/")[-1]
regex = "^" + itformat.replace("%T", "[0-9]+") + "$"
if path is "":
mylog.warning("For file based iterations, please use absolute file paths!")
pass
for filename in listdir(path):
if match(regex, filename):
iterations.append(filename)
mylog.info("Found {} iterations in directory".format(len(iterations)))
if len(iterations) == 0:
mylog.warning("No iterations found!")
if "groupBased" in encoding and len(iterations) > 1:
mylog.warning("Only chose to load one iteration ({})".format(iteration))
self.base_path = "/data/{}/".format(iteration)
self.meshes_path = self._handle["/"].attrs["meshesPath"].decode()
try:
handle[self.base_path + self.meshes_path]
except KeyError:
if self.standard_version <= StrictVersion("1.1.0"):
mylog.info(
"meshesPath not present in file."
" Assuming file contains no meshes and has a domain extent of 1m^3!"
)
else:
raise
self.particles_path = self._handle["/"].attrs["particlesPath"].decode()
try:
handle[self.base_path + self.particles_path]
except KeyError:
if self.standard_version <= StrictVersion("1.1.0"):
mylog.info(
"particlesPath not present in file."
" Assuming file contains no particles!"
)
else:
raise
|
def _set_paths(self, handle, path):
"""Parses relevant hdf5-paths out of ``handle``.
Parameters
----------
handle : h5py.File
path : str
(absolute) filepath for current hdf5 container
"""
iterations = []
encoding = handle.attrs["iterationEncoding"].decode()
if "groupBased" in encoding:
iterations = list(handle["/data"].keys())
mylog.info("open_pmd - found {} iterations in file".format(len(iterations)))
elif "fileBased" in encoding:
itformat = handle.attrs["iterationFormat"].decode().split("/")[-1]
regex = "^" + itformat.replace("%T", "[0-9]+") + "$"
if path is "":
mylog.warning(
"open_pmd - For file based iterations, please use absolute file paths!"
)
pass
for filename in listdir(path):
if match(regex, filename):
iterations.append(filename)
mylog.info(
"open_pmd - found {} iterations in directory".format(len(iterations))
)
if len(iterations) == 0:
mylog.warning("open_pmd - no iterations found!")
if "groupBased" in encoding and len(iterations) > 1:
mylog.warning(
"open_pmd - only choose to load one iteration ({})".format(
list(handle["/data"].keys())[0]
)
)
self.base_path = "/data/{}/".format(list(handle["/data"].keys())[0])
self.meshes_path = self._handle["/"].attrs["meshesPath"].decode()
self.particles_path = self._handle["/"].attrs["particlesPath"].decode()
|
https://github.com/yt-project/yt/issues/1626
|
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-6-606762cc9ee1> in <module>()
----> 1 yt.load('./example-ptcl/hdf5/data00000400.h5')
~/miniconda3/lib/python3.6/site-packages/yt/convenience.py in load(*args, **kwargs)
84 candidates = find_lowest_subclasses(candidates)
85 if len(candidates) == 1:
---> 86 return candidates[0](*args, **kwargs)
87 if len(candidates) == 0:
88 if ytcfg.get("yt", "enzo_db") != '' \
~/miniconda3/lib/python3.6/site-packages/yt/frontends/open_pmd/data_structures.py in __init__(self, filename, dataset_type, storage_filename, units_override, unit_system, **kwargs)
385 self.storage_filename = storage_filename
386 self.fluid_types += ("openPMD",)
--> 387 particles = tuple(str(c) for c in self._handle[self.base_path + self.particles_path].keys())
388 if len(particles) > 1:
389 # Only use on-disk particle names if there is more than one species
~/miniconda3/lib/python3.6/site-packages/yt/utilities/file_handler.py in __getitem__(self, key)
43
44 def __getitem__(self, key):
---> 45 return self.handle[key]
46
47 def __contains__(self, item):
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2846)()
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2804)()
~/miniconda3/lib/python3.6/site-packages/h5py/_hl/group.py in __getitem__(self, name)
167 raise ValueError("Invalid HDF5 object reference")
168 else:
--> 169 oid = h5o.open(self.id, self._e(name), lapl=self._lapl)
170
171 otype = h5i.get_type(oid)
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2846)()
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2804)()
h5py/h5o.pyx in h5py.h5o.open (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/h5o.c:3740)()
KeyError: "Unable to open object (Object 'particles' doesn't exist)"
|
KeyError
|
def _parse_parameter_file(self):
"""Read in metadata describing the overall data on-disk."""
f = self._handle
bp = self.base_path
mp = self.meshes_path
self.unique_identifier = 0
self.parameters = 0
self.periodicity = np.zeros(3, dtype=np.bool)
self.refine_by = 1
self.cosmological_simulation = 0
try:
shapes = {}
left_edges = {}
right_edges = {}
meshes = f[bp + mp]
for mname in meshes.keys():
mesh = meshes[mname]
if type(mesh) is h5.Group:
shape = np.asarray(mesh[list(mesh.keys())[0]].shape)
else:
shape = np.asarray(mesh.shape)
spacing = np.asarray(mesh.attrs["gridSpacing"])
offset = np.asarray(mesh.attrs["gridGlobalOffset"])
unit_si = np.asarray(mesh.attrs["gridUnitSI"])
le = offset * unit_si
re = le + shape * unit_si * spacing
shapes[mname] = shape
left_edges[mname] = le
right_edges[mname] = re
lowest_dim = np.min([len(i) for i in shapes.values()])
shapes = np.asarray([i[:lowest_dim] for i in shapes.values()])
left_edges = np.asarray([i[:lowest_dim] for i in left_edges.values()])
right_edges = np.asarray([i[:lowest_dim] for i in right_edges.values()])
fs = []
dle = []
dre = []
for i in np.arange(lowest_dim):
fs.append(np.max(shapes.transpose()[i]))
dle.append(np.min(left_edges.transpose()[i]))
dre.append(np.min(right_edges.transpose()[i]))
self.dimensionality = len(fs)
self.domain_dimensions = np.append(fs, np.ones(3 - self.dimensionality))
self.domain_left_edge = np.append(dle, np.zeros(3 - len(dle)))
self.domain_right_edge = np.append(dre, np.ones(3 - len(dre)))
except KeyError:
if self.standard_version <= StrictVersion("1.1.0"):
self.dimensionality = 3
self.domain_dimensions = np.ones(3, dtype=np.float64)
self.domain_left_edge = np.zeros(3, dtype=np.float64)
self.domain_right_edge = np.ones(3, dtype=np.float64)
else:
raise
self.current_time = f[bp].attrs["time"] * f[bp].attrs["timeUnitSI"]
|
def _parse_parameter_file(self):
"""Read in metadata describing the overall data on-disk."""
f = self._handle
bp = self.base_path
mp = self.meshes_path
self.unique_identifier = 0
self.parameters = 0
self.periodicity = np.zeros(3, dtype=np.bool)
self.refine_by = 1
self.cosmological_simulation = 0
try:
shapes = {}
left_edges = {}
right_edges = {}
for mesh in f[bp + mp].keys():
if type(f[bp + mp + mesh]) is h5.Group:
shape = np.asarray(
f[bp + mp + mesh + "/" + list(f[bp + mp + mesh].keys())[0]].shape
)
else:
shapes[mesh] = np.asarray(f[bp + mp + mesh].shape)
spacing = np.asarray(f[bp + mp + mesh].attrs["gridSpacing"])
offset = np.asarray(f[bp + mp + mesh].attrs["gridGlobalOffset"])
unit_si = np.asarray(f[bp + mp + mesh].attrs["gridUnitSI"])
le = offset * unit_si
re = le + shape * unit_si * spacing
shapes[mesh] = shape
left_edges[mesh] = le
right_edges[mesh] = re
lowest_dim = np.min([len(i) for i in shapes.values()])
shapes = np.asarray([i[:lowest_dim] for i in shapes.values()])
left_edges = np.asarray([i[:lowest_dim] for i in left_edges.values()])
right_edges = np.asarray([i[:lowest_dim] for i in right_edges.values()])
fs = []
dle = []
dre = []
for i in np.arange(lowest_dim):
fs.append(np.max(shapes.transpose()[i]))
dle.append(np.min(left_edges.transpose()[i]))
dre.append(np.min(right_edges.transpose()[i]))
self.dimensionality = len(fs)
self.domain_dimensions = np.append(fs, np.ones(3 - self.dimensionality))
self.domain_left_edge = np.append(dle, np.zeros(3 - len(dle)))
self.domain_right_edge = np.append(dre, np.ones(3 - len(dre)))
except ValueError:
mylog.warning(
"open_pmd - It seems your data does not contain meshes. Assuming domain extent of 1m^3!"
)
self.dimensionality = 3
self.domain_dimensions = np.ones(3, dtype=np.float64)
self.domain_left_edge = np.zeros(3, dtype=np.float64)
self.domain_right_edge = np.ones(3, dtype=np.float64)
self.current_time = f[bp].attrs["time"] * f[bp].attrs["timeUnitSI"]
|
https://github.com/yt-project/yt/issues/1626
|
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-6-606762cc9ee1> in <module>()
----> 1 yt.load('./example-ptcl/hdf5/data00000400.h5')
~/miniconda3/lib/python3.6/site-packages/yt/convenience.py in load(*args, **kwargs)
84 candidates = find_lowest_subclasses(candidates)
85 if len(candidates) == 1:
---> 86 return candidates[0](*args, **kwargs)
87 if len(candidates) == 0:
88 if ytcfg.get("yt", "enzo_db") != '' \
~/miniconda3/lib/python3.6/site-packages/yt/frontends/open_pmd/data_structures.py in __init__(self, filename, dataset_type, storage_filename, units_override, unit_system, **kwargs)
385 self.storage_filename = storage_filename
386 self.fluid_types += ("openPMD",)
--> 387 particles = tuple(str(c) for c in self._handle[self.base_path + self.particles_path].keys())
388 if len(particles) > 1:
389 # Only use on-disk particle names if there is more than one species
~/miniconda3/lib/python3.6/site-packages/yt/utilities/file_handler.py in __getitem__(self, key)
43
44 def __getitem__(self, key):
---> 45 return self.handle[key]
46
47 def __contains__(self, item):
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2846)()
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2804)()
~/miniconda3/lib/python3.6/site-packages/h5py/_hl/group.py in __getitem__(self, name)
167 raise ValueError("Invalid HDF5 object reference")
168 else:
--> 169 oid = h5o.open(self.id, self._e(name), lapl=self._lapl)
170
171 otype = h5i.get_type(oid)
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2846)()
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2804)()
h5py/h5o.pyx in h5py.h5o.open (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/h5o.c:3740)()
KeyError: "Unable to open object (Object 'particles' doesn't exist)"
|
KeyError
|
def _is_valid(self, *args, **kwargs):
"""Checks whether the supplied file can be read by this frontend."""
warn_h5py(args[0])
try:
with h5.File(args[0], "r") as f:
attrs = list(f["/"].attrs.keys())
for i in opmd_required_attributes:
if i not in attrs:
return False
if StrictVersion(f.attrs["openPMD"].decode()) not in ompd_known_versions:
return False
if f.attrs["iterationEncoding"].decode() == "fileBased":
return True
return False
except (IOError, OSError, ImportError):
return False
|
def _is_valid(self, *args, **kwargs):
"""Checks whether the supplied file can be read by this frontend."""
warn_h5py(args[0])
try:
f = h5.File(args[0], "r")
except (IOError, OSError, ImportError):
return False
requirements = ["openPMD", "basePath", "meshesPath", "particlesPath"]
attrs = list(f["/"].attrs.keys())
for i in requirements:
if i not in attrs:
f.close()
return False
known_versions = [StrictVersion("1.0.0"), StrictVersion("1.0.1")]
if StrictVersion(f.attrs["openPMD"].decode()) in known_versions:
f.close()
return True
else:
f.close()
return False
|
https://github.com/yt-project/yt/issues/1626
|
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-6-606762cc9ee1> in <module>()
----> 1 yt.load('./example-ptcl/hdf5/data00000400.h5')
~/miniconda3/lib/python3.6/site-packages/yt/convenience.py in load(*args, **kwargs)
84 candidates = find_lowest_subclasses(candidates)
85 if len(candidates) == 1:
---> 86 return candidates[0](*args, **kwargs)
87 if len(candidates) == 0:
88 if ytcfg.get("yt", "enzo_db") != '' \
~/miniconda3/lib/python3.6/site-packages/yt/frontends/open_pmd/data_structures.py in __init__(self, filename, dataset_type, storage_filename, units_override, unit_system, **kwargs)
385 self.storage_filename = storage_filename
386 self.fluid_types += ("openPMD",)
--> 387 particles = tuple(str(c) for c in self._handle[self.base_path + self.particles_path].keys())
388 if len(particles) > 1:
389 # Only use on-disk particle names if there is more than one species
~/miniconda3/lib/python3.6/site-packages/yt/utilities/file_handler.py in __getitem__(self, key)
43
44 def __getitem__(self, key):
---> 45 return self.handle[key]
46
47 def __contains__(self, item):
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2846)()
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2804)()
~/miniconda3/lib/python3.6/site-packages/h5py/_hl/group.py in __getitem__(self, name)
167 raise ValueError("Invalid HDF5 object reference")
168 else:
--> 169 oid = h5o.open(self.id, self._e(name), lapl=self._lapl)
170
171 otype = h5i.get_type(oid)
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2846)()
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2804)()
h5py/h5o.pyx in h5py.h5o.open (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/h5o.c:3740)()
KeyError: "Unable to open object (Object 'particles' doesn't exist)"
|
KeyError
|
def __init__(self, ds, field_list):
f = ds._handle
bp = ds.base_path
mp = ds.meshes_path
pp = ds.particles_path
try:
fields = f[bp + mp]
for fname in fields.keys():
field = fields[fname]
if type(field) is h5.Dataset or is_const_component(field):
# Don't consider axes. This appears to be a vector field of single dimensionality
ytname = str("_".join([fname.replace("_", "-")]))
parsed = parse_unit_dimension(
np.asarray(field.attrs["unitDimension"], dtype=np.int)
)
unit = str(YTQuantity(1, parsed).units)
aliases = []
# Save a list of magnetic fields for aliasing later on
# We can not reasonably infer field type/unit by name in openPMD
if unit == "T" or unit == "kg/(A*s**2)":
self._mag_fields.append(ytname)
self.known_other_fields += ((ytname, (unit, aliases, None)),)
else:
for axis in field.keys():
ytname = str("_".join([fname.replace("_", "-"), axis]))
parsed = parse_unit_dimension(
np.asarray(field.attrs["unitDimension"], dtype=np.int)
)
unit = str(YTQuantity(1, parsed).units)
aliases = []
# Save a list of magnetic fields for aliasing later on
# We can not reasonably infer field type by name in openPMD
if unit == "T" or unit == "kg/(A*s**2)":
self._mag_fields.append(ytname)
self.known_other_fields += ((ytname, (unit, aliases, None)),)
for i in self.known_other_fields:
mylog.debug("open_pmd - known_other_fields - {}".format(i))
except KeyError:
pass
try:
particles = f[bp + pp]
for pname in particles.keys():
species = particles[pname]
for recname in species.keys():
try:
record = species[recname]
parsed = parse_unit_dimension(record.attrs["unitDimension"])
unit = str(YTQuantity(1, parsed).units)
ytattrib = str(recname).replace("_", "-")
if ytattrib == "position":
# Symbolically rename position to preserve yt's interpretation of the pfield
# particle_position is later derived in setup_absolute_positions in the way yt expects it
ytattrib = "positionCoarse"
if type(record) is h5.Dataset or is_const_component(record):
name = ["particle", ytattrib]
self.known_particle_fields += (
(str("_".join(name)), (unit, [], None)),
)
else:
for axis in record.keys():
aliases = []
name = ["particle", ytattrib, axis]
ytname = str("_".join(name))
self.known_particle_fields += (
(ytname, (unit, aliases, None)),
)
except KeyError:
if recname != "particlePatches":
mylog.info(
"open_pmd - {}_{} does not seem to have unitDimension".format(
pname, recname
)
)
for i in self.known_particle_fields:
mylog.debug("open_pmd - known_particle_fields - {}".format(i))
except KeyError:
pass
super(OpenPMDFieldInfo, self).__init__(ds, field_list)
|
def __init__(self, ds, field_list):
f = ds._handle
bp = ds.base_path
mp = ds.meshes_path
pp = ds.particles_path
fields = f[bp + mp]
for fname in fields.keys():
field = fields[fname]
if type(field) is h5.Dataset or is_const_component(field):
# Don't consider axes. This appears to be a vector field of single dimensionality
ytname = str("_".join([fname.replace("_", "-")]))
parsed = parse_unit_dimension(
np.asarray(field.attrs["unitDimension"], dtype=np.int)
)
unit = str(YTQuantity(1, parsed).units)
aliases = []
# Save a list of magnetic fields for aliasing later on
# We can not reasonably infer field type/unit by name in openPMD
if unit == "T" or unit == "kg/(A*s**2)":
self._mag_fields.append(ytname)
self.known_other_fields += ((ytname, (unit, aliases, None)),)
else:
for axis in field.keys():
ytname = str("_".join([fname.replace("_", "-"), axis]))
parsed = parse_unit_dimension(
np.asarray(field.attrs["unitDimension"], dtype=np.int)
)
unit = str(YTQuantity(1, parsed).units)
aliases = []
# Save a list of magnetic fields for aliasing later on
# We can not reasonably infer field type by name in openPMD
if unit == "T" or unit == "kg/(A*s**2)":
self._mag_fields.append(ytname)
self.known_other_fields += ((ytname, (unit, aliases, None)),)
for i in self.known_other_fields:
mylog.debug("open_pmd - known_other_fields - {}".format(i))
particles = f[bp + pp]
for species in particles.keys():
for record in particles[species].keys():
try:
pds = particles[species + "/" + record]
parsed = parse_unit_dimension(pds.attrs["unitDimension"])
unit = str(YTQuantity(1, parsed).units)
ytattrib = str(record).replace("_", "-")
if ytattrib == "position":
# Symbolically rename position to preserve yt's interpretation of the pfield
# particle_position is later derived in setup_absolute_positions in the way yt expects it
ytattrib = "positionCoarse"
if type(pds) is h5.Dataset or is_const_component(pds):
name = ["particle", ytattrib]
self.known_particle_fields += (
(str("_".join(name)), (unit, [], None)),
)
else:
for axis in pds.keys():
aliases = []
name = ["particle", ytattrib, axis]
ytname = str("_".join(name))
self.known_particle_fields += ((ytname, (unit, aliases, None)),)
except KeyError:
mylog.info(
"open_pmd - {}_{} does not seem to have unitDimension".format(
species, record
)
)
for i in self.known_particle_fields:
mylog.debug("open_pmd - known_particle_fields - {}".format(i))
super(OpenPMDFieldInfo, self).__init__(ds, field_list)
|
https://github.com/yt-project/yt/issues/1626
|
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-6-606762cc9ee1> in <module>()
----> 1 yt.load('./example-ptcl/hdf5/data00000400.h5')
~/miniconda3/lib/python3.6/site-packages/yt/convenience.py in load(*args, **kwargs)
84 candidates = find_lowest_subclasses(candidates)
85 if len(candidates) == 1:
---> 86 return candidates[0](*args, **kwargs)
87 if len(candidates) == 0:
88 if ytcfg.get("yt", "enzo_db") != '' \
~/miniconda3/lib/python3.6/site-packages/yt/frontends/open_pmd/data_structures.py in __init__(self, filename, dataset_type, storage_filename, units_override, unit_system, **kwargs)
385 self.storage_filename = storage_filename
386 self.fluid_types += ("openPMD",)
--> 387 particles = tuple(str(c) for c in self._handle[self.base_path + self.particles_path].keys())
388 if len(particles) > 1:
389 # Only use on-disk particle names if there is more than one species
~/miniconda3/lib/python3.6/site-packages/yt/utilities/file_handler.py in __getitem__(self, key)
43
44 def __getitem__(self, key):
---> 45 return self.handle[key]
46
47 def __contains__(self, item):
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2846)()
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2804)()
~/miniconda3/lib/python3.6/site-packages/h5py/_hl/group.py in __getitem__(self, name)
167 raise ValueError("Invalid HDF5 object reference")
168 else:
--> 169 oid = h5o.open(self.id, self._e(name), lapl=self._lapl)
170
171 otype = h5i.get_type(oid)
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2846)()
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2804)()
h5py/h5o.pyx in h5py.h5o.open (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/h5o.c:3740)()
KeyError: "Unable to open object (Object 'particles' doesn't exist)"
|
KeyError
|
def _read_fluid_selection(self, chunks, selector, fields, size):
"""Reads given fields masked by a given selection.
Parameters
----------
chunks
A list of chunks
A chunk is a list of grids
selector
A region (inside your domain) specifying which parts of the field you want to read
See [1] and [2]
fields : array_like
Tuples (fname, ftype) representing a field
size : int
Size of the data to read
Returns
-------
dict
keys are tuples (ftype, fname) representing a field
values are flat (``size``,) ndarrays with data from that field
"""
f = self._handle
bp = self.base_path
mp = self.meshes_path
ds = f[bp + mp]
chunks = list(chunks)
rv = {}
ind = {}
if isinstance(selector, GridSelector):
if not (len(chunks) == len(chunks[0].objs) == 1):
raise RuntimeError
if size is None:
size = sum((g.count(selector) for chunk in chunks for g in chunk.objs))
for field in fields:
rv[field] = np.empty(size, dtype=np.float64)
ind[field] = 0
for ftype, fname in fields:
field = (ftype, fname)
for chunk in chunks:
for grid in chunk.objs:
mask = grid._get_selector_mask(selector)
if mask is None:
continue
component = fname.replace("_", "/").replace("-", "_")
if component.split("/")[0] not in grid.ftypes:
data = np.full(grid.ActiveDimensions, 0, dtype=np.float64)
else:
data = get_component(ds, component, grid.findex, grid.foffset)
# The following is a modified AMRGridPatch.select(...)
data.shape = (
mask.shape
) # Workaround - casts a 2D (x,y) array to 3D (x,y,1)
count = grid.count(selector)
rv[field][ind[field] : ind[field] + count] = data[mask]
ind[field] += count
for field in fields:
rv[field] = rv[field][: ind[field]]
rv[field].flatten()
return rv
|
def _read_fluid_selection(self, chunks, selector, fields, size):
"""Reads given fields masked by a given selection.
Parameters
----------
chunks
A list of chunks
A chunk is a list of grids
selector
A region (inside your domain) specifying which parts of the field you want to read
See [1] and [2]
fields : array_like
Tuples (fname, ftype) representing a field
size : int
Size of the data to read
Returns
-------
dict
keys are tuples (ftype, fname) representing a field
values are flat (``size``,) ndarrays with data from that field
"""
f = self._handle
bp = self.base_path
mp = self.meshes_path
ds = f[bp + mp]
chunks = list(chunks)
rv = {}
ind = {}
if isinstance(selector, GridSelector):
if not (len(chunks) == len(chunks[0].objs) == 1):
raise RuntimeError
if size is None:
size = sum((g.count(selector) for chunk in chunks for g in chunk.objs))
for field in fields:
rv[field] = np.empty(size, dtype=np.float64)
ind[field] = 0
for ftype, fname in fields:
field = (ftype, fname)
for chunk in chunks:
for grid in chunk.objs:
component = fname.replace("_", "/").replace("-", "_")
if component.split("/")[0] not in grid.ftypes:
continue
mask = grid._get_selector_mask(selector)
if mask is None:
continue
data = get_component(ds, component, grid.findex, grid.foffset)
# The following is a modified AMRGridPatch.select(...)
data.shape = (
mask.shape
) # Workaround - casts a 2D (x,y) array to 3D (x,y,1)
count = grid.count(selector)
rv[field][ind[field] : ind[field] + count] = data[mask]
ind[field] += count
for field in fields:
rv[field] = rv[field][: ind[field]]
rv[field].flatten()
return rv
|
https://github.com/yt-project/yt/issues/1626
|
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-6-606762cc9ee1> in <module>()
----> 1 yt.load('./example-ptcl/hdf5/data00000400.h5')
~/miniconda3/lib/python3.6/site-packages/yt/convenience.py in load(*args, **kwargs)
84 candidates = find_lowest_subclasses(candidates)
85 if len(candidates) == 1:
---> 86 return candidates[0](*args, **kwargs)
87 if len(candidates) == 0:
88 if ytcfg.get("yt", "enzo_db") != '' \
~/miniconda3/lib/python3.6/site-packages/yt/frontends/open_pmd/data_structures.py in __init__(self, filename, dataset_type, storage_filename, units_override, unit_system, **kwargs)
385 self.storage_filename = storage_filename
386 self.fluid_types += ("openPMD",)
--> 387 particles = tuple(str(c) for c in self._handle[self.base_path + self.particles_path].keys())
388 if len(particles) > 1:
389 # Only use on-disk particle names if there is more than one species
~/miniconda3/lib/python3.6/site-packages/yt/utilities/file_handler.py in __getitem__(self, key)
43
44 def __getitem__(self, key):
---> 45 return self.handle[key]
46
47 def __contains__(self, item):
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2846)()
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2804)()
~/miniconda3/lib/python3.6/site-packages/h5py/_hl/group.py in __getitem__(self, name)
167 raise ValueError("Invalid HDF5 object reference")
168 else:
--> 169 oid = h5o.open(self.id, self._e(name), lapl=self._lapl)
170
171 otype = h5i.get_type(oid)
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2846)()
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2804)()
h5py/h5o.pyx in h5py.h5o.open (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/h5o.c:3740)()
KeyError: "Unable to open object (Object 'particles' doesn't exist)"
|
KeyError
|
def parse_unit_dimension(unit_dimension):
"""Transforms an openPMD unitDimension into a string.
Parameters
----------
unit_dimension : array_like
integer array of length 7 with one entry for the dimensional component of every SI unit
[0] length L,
[1] mass M,
[2] time T,
[3] electric current I,
[4] thermodynamic temperature theta,
[5] amount of substance N,
[6] luminous intensity J
References
----------
.. https://github.com/openPMD/openPMD-standard/blob/latest/STANDARD.md#unit-systems-and-dimensionality
Returns
-------
str
Examples
--------
>>> velocity = [1., 0., -1., 0., 0., 0., 0.]
>>> print parse_unit_dimension(velocity)
'm**1*s**-1'
>>> magnetic_field = [0., 1., -2., -1., 0., 0., 0.]
>>> print parse_unit_dimension(magnetic_field)
'kg**1*s**-2*A**-1'
"""
if len(unit_dimension) is not 7:
mylog.error("SI must have 7 base dimensions!")
unit_dimension = np.asarray(unit_dimension, dtype=np.int)
dim = []
si = ["m", "kg", "s", "A", "C", "mol", "cd"]
for i in np.arange(7):
if unit_dimension[i] != 0:
dim.append("{}**{}".format(si[i], unit_dimension[i]))
return "*".join(dim)
|
def parse_unit_dimension(unit_dimension):
"""Transforms an openPMD unitDimension into a string.
Parameters
----------
unit_dimension : array_like
integer array of length 7 with one entry for the dimensional component of every SI unit
[0] length L,
[1] mass M,
[2] time T,
[3] electric current I,
[4] thermodynamic temperature theta,
[5] amount of substance N,
[6] luminous intensity J
References
----------
.. https://github.com/openPMD/openPMD-standard/blob/latest/STANDARD.md#unit-systems-and-dimensionality
Returns
-------
str
Examples
--------
>>> velocity = [1., 0., -1., 0., 0., 0., 0.]
>>> print parse_unit_dimension(velocity)
'm**1*s**-1'
>>> magnetic_field = [0., 1., -2., -1., 0., 0., 0.]
>>> print parse_unit_dimension(magnetic_field)
'kg**1*s**-2*A**-1'
"""
if len(unit_dimension) is not 7:
mylog.error("open_pmd - SI must have 7 base dimensions!")
unit_dimension = np.asarray(unit_dimension, dtype=np.int)
dim = []
si = ["m", "kg", "s", "A", "C", "mol", "cd"]
for i in np.arange(7):
if unit_dimension[i] != 0:
dim.append("{}**{}".format(si[i], unit_dimension[i]))
return "*".join(dim)
|
https://github.com/yt-project/yt/issues/1626
|
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-6-606762cc9ee1> in <module>()
----> 1 yt.load('./example-ptcl/hdf5/data00000400.h5')
~/miniconda3/lib/python3.6/site-packages/yt/convenience.py in load(*args, **kwargs)
84 candidates = find_lowest_subclasses(candidates)
85 if len(candidates) == 1:
---> 86 return candidates[0](*args, **kwargs)
87 if len(candidates) == 0:
88 if ytcfg.get("yt", "enzo_db") != '' \
~/miniconda3/lib/python3.6/site-packages/yt/frontends/open_pmd/data_structures.py in __init__(self, filename, dataset_type, storage_filename, units_override, unit_system, **kwargs)
385 self.storage_filename = storage_filename
386 self.fluid_types += ("openPMD",)
--> 387 particles = tuple(str(c) for c in self._handle[self.base_path + self.particles_path].keys())
388 if len(particles) > 1:
389 # Only use on-disk particle names if there is more than one species
~/miniconda3/lib/python3.6/site-packages/yt/utilities/file_handler.py in __getitem__(self, key)
43
44 def __getitem__(self, key):
---> 45 return self.handle[key]
46
47 def __contains__(self, item):
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2846)()
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2804)()
~/miniconda3/lib/python3.6/site-packages/h5py/_hl/group.py in __getitem__(self, name)
167 raise ValueError("Invalid HDF5 object reference")
168 else:
--> 169 oid = h5o.open(self.id, self._e(name), lapl=self._lapl)
170
171 otype = h5i.get_type(oid)
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2846)()
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/_objects.c:2804)()
h5py/h5o.pyx in h5py.h5o.open (/Users/ilan/minonda/conda-bld/h5py_1496887972496/work/h5py/h5o.c:3740)()
KeyError: "Unable to open object (Object 'particles' doesn't exist)"
|
KeyError
|
def assign_particle_data(ds, pdata, bbox):
"""
Assign particle data to the grids using MatchPointsToGrids. This
will overwrite any existing particle data, so be careful!
"""
for ptype in ds.particle_types_raw:
check_fields = [(ptype, "particle_position_x"), (ptype, "particle_position")]
if all(f not in pdata for f in check_fields):
pdata_ftype = {}
for f in [k for k in sorted(pdata)]:
if not hasattr(pdata[f], "shape"):
continue
if f == "number_of_particles":
continue
mylog.debug("Reassigning '%s' to ('%s','%s')", f, ptype, f)
pdata_ftype[ptype, f] = pdata.pop(f)
pdata_ftype.update(pdata)
pdata = pdata_ftype
# Note: what we need to do here is a bit tricky. Because occasionally this
# gets called before we property handle the field detection, we cannot use
# any information about the index. Fortunately for us, we can generate
# most of the GridTree utilizing information we already have from the
# stream handler.
if len(ds.stream_handler.fields) > 1:
pdata.pop("number_of_particles", None)
num_grids = len(ds.stream_handler.fields)
parent_ids = ds.stream_handler.parent_ids
num_children = np.zeros(num_grids, dtype="int64")
# We're going to do this the slow way
mask = np.empty(num_grids, dtype="bool")
for i in range(num_grids):
np.equal(parent_ids, i, mask)
num_children[i] = mask.sum()
levels = ds.stream_handler.levels.astype("int64").ravel()
grid_tree = GridTree(
num_grids,
ds.stream_handler.left_edges,
ds.stream_handler.right_edges,
ds.stream_handler.dimensions,
ds.stream_handler.parent_ids,
levels,
num_children,
)
grid_pdata = []
for i in range(num_grids):
grid = {"number_of_particles": 0}
grid_pdata.append(grid)
for ptype in ds.particle_types_raw:
if (ptype, "particle_position_x") in pdata:
x, y, z = (pdata[ptype, "particle_position_%s" % ax] for ax in "xyz")
elif (ptype, "particle_position") in pdata:
x, y, z = pdata[ptype, "particle_position"].T
else:
raise KeyError(
"Cannot decompose particle data without position fields!"
)
pts = MatchPointsToGrids(grid_tree, len(x), x, y, z)
particle_grid_inds = pts.find_points_in_tree()
(assigned_particles,) = (particle_grid_inds >= 0).nonzero()
num_particles = particle_grid_inds.size
num_unassigned = num_particles - assigned_particles.size
if num_unassigned > 0:
m = (
"Discarding %s particles (out of %s) that are outside "
"bounding box. "
)
eps = np.finfo(x.dtype).eps
s = np.array(
[
[x.min() - eps, x.max() + eps],
[y.min() - eps, y.max() + eps],
[z.min() - eps, z.max() + eps],
]
)
sug_bbox = [
[min(bbox[0, 0], s[0, 0]), max(bbox[0, 1], s[0, 1])],
[min(bbox[1, 0], s[1, 0]), max(bbox[1, 1], s[1, 1])],
[min(bbox[2, 0], s[2, 0]), max(bbox[2, 1], s[2, 1])],
]
m += "Set bbox=%s to avoid this in the future."
mylog.warn(m % (num_unassigned, num_particles, sug_bbox))
particle_grid_inds = particle_grid_inds[assigned_particles]
x = x[assigned_particles]
y = y[assigned_particles]
z = z[assigned_particles]
idxs = np.argsort(particle_grid_inds)
particle_grid_count = np.bincount(
particle_grid_inds.astype("intp"), minlength=num_grids
)
particle_indices = np.zeros(num_grids + 1, dtype="int64")
if num_grids > 1:
np.add.accumulate(
particle_grid_count.squeeze(), out=particle_indices[1:]
)
else:
particle_indices[1] = particle_grid_count.squeeze()
for i, pcount in enumerate(particle_grid_count):
grid_pdata[i]["number_of_particles"] += pcount
start = particle_indices[i]
end = particle_indices[i + 1]
for key in pdata.keys():
if key[0] == ptype:
grid_pdata[i][key] = pdata[key][idxs][start:end]
else:
grid_pdata = [pdata]
for pd, gi in zip(grid_pdata, sorted(ds.stream_handler.fields)):
ds.stream_handler.fields[gi].update(pd)
ds.stream_handler.particle_types.update(set_particle_types(pd))
npart = ds.stream_handler.fields[gi].pop("number_of_particles", 0)
ds.stream_handler.particle_count[gi] = npart
|
def assign_particle_data(ds, pdata):
"""
Assign particle data to the grids using MatchPointsToGrids. This
will overwrite any existing particle data, so be careful!
"""
for ptype in ds.particle_types_raw:
check_fields = [(ptype, "particle_position_x"), (ptype, "particle_position")]
if all(f not in pdata for f in check_fields):
pdata_ftype = {}
for f in [k for k in sorted(pdata)]:
if not hasattr(pdata[f], "shape"):
continue
if f == "number_of_particles":
continue
mylog.debug("Reassigning '%s' to ('%s','%s')", f, ptype, f)
pdata_ftype[ptype, f] = pdata.pop(f)
pdata_ftype.update(pdata)
pdata = pdata_ftype
# Note: what we need to do here is a bit tricky. Because occasionally this
# gets called before we property handle the field detection, we cannot use
# any information about the index. Fortunately for us, we can generate
# most of the GridTree utilizing information we already have from the
# stream handler.
if len(ds.stream_handler.fields) > 1:
pdata.pop("number_of_particles", None)
num_grids = len(ds.stream_handler.fields)
parent_ids = ds.stream_handler.parent_ids
num_children = np.zeros(num_grids, dtype="int64")
# We're going to do this the slow way
mask = np.empty(num_grids, dtype="bool")
for i in range(num_grids):
np.equal(parent_ids, i, mask)
num_children[i] = mask.sum()
levels = ds.stream_handler.levels.astype("int64").ravel()
grid_tree = GridTree(
num_grids,
ds.stream_handler.left_edges,
ds.stream_handler.right_edges,
ds.stream_handler.dimensions,
ds.stream_handler.parent_ids,
levels,
num_children,
)
grid_pdata = []
for i in range(num_grids):
grid = {"number_of_particles": 0}
grid_pdata.append(grid)
for ptype in ds.particle_types_raw:
if (ptype, "particle_position_x") in pdata:
x, y, z = (pdata[ptype, "particle_position_%s" % ax] for ax in "xyz")
elif (ptype, "particle_position") in pdata:
x, y, z = pdata[ptype, "particle_position"].T
else:
raise KeyError(
"Cannot decompose particle data without position fields!"
)
pts = MatchPointsToGrids(grid_tree, len(x), x, y, z)
particle_grid_inds = pts.find_points_in_tree()
idxs = np.argsort(particle_grid_inds)
particle_grid_count = np.bincount(
particle_grid_inds.astype("intp"), minlength=num_grids
)
particle_indices = np.zeros(num_grids + 1, dtype="int64")
if num_grids > 1:
np.add.accumulate(
particle_grid_count.squeeze(), out=particle_indices[1:]
)
else:
particle_indices[1] = particle_grid_count.squeeze()
for i, pcount in enumerate(particle_grid_count):
grid_pdata[i]["number_of_particles"] += pcount
start = particle_indices[i]
end = particle_indices[i + 1]
for key in pdata.keys():
if key[0] == ptype:
grid_pdata[i][key] = pdata[key][idxs][start:end]
else:
grid_pdata = [pdata]
for pd, gi in zip(grid_pdata, sorted(ds.stream_handler.fields)):
ds.stream_handler.fields[gi].update(pd)
ds.stream_handler.particle_types.update(set_particle_types(pd))
npart = ds.stream_handler.fields[gi].pop("number_of_particles", 0)
ds.stream_handler.particle_count[gi] = npart
|
https://github.com/yt-project/yt/issues/1600
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-2-a9ff1fb7d916> in <module>()
8 bbox = np.array([[-1.5, 1.5], [-1.5, 1.5], [-1.5, 1.5]])
9 ds = yt.load_uniform_grid(data, data["density"][0].shape, length_unit=(1.0, "Mpc"), mass_unit=(1.0,"Msun"),
---> 10 bbox=bbox, nprocs=4)
11
~/anaconda3/envs/phcpy/lib/python3.6/site-packages/yt/frontends/stream/data_structures.py in load_uniform_grid(data, domain_dimensions, length_unit, bbox, nprocs, sim_time, mass_unit, time_unit, velocity_unit, magnetic_unit, periodicity, geometry, unit_system)
752 if number_of_particles > 0:
753 # This will update the stream handler too
--> 754 assign_particle_data(sds, pdata)
755
756 return sds
~/anaconda3/envs/phcpy/lib/python3.6/site-packages/yt/frontends/stream/data_structures.py in assign_particle_data(ds, pdata)
449 idxs = np.argsort(particle_grid_inds)
450 particle_grid_count = np.bincount(particle_grid_inds.astype("intp"),
--> 451 minlength=num_grids)
452 particle_indices = np.zeros(num_grids + 1, dtype='int64')
453 if num_grids > 1:
ValueError: The first argument of bincount must be non-negative
|
ValueError
|
def load_uniform_grid(
data,
domain_dimensions,
length_unit=None,
bbox=None,
nprocs=1,
sim_time=0.0,
mass_unit=None,
time_unit=None,
velocity_unit=None,
magnetic_unit=None,
periodicity=(True, True, True),
geometry="cartesian",
unit_system="cgs",
):
r"""Load a uniform grid of data into yt as a
:class:`~yt.frontends.stream.data_structures.StreamHandler`.
This should allow a uniform grid of data to be loaded directly into yt and
analyzed as would any others. This comes with several caveats:
* Units will be incorrect unless the unit system is explicitly
specified.
* Some functions may behave oddly, and parallelism will be
disappointing or non-existent in most cases.
* Particles may be difficult to integrate.
Particle fields are detected as one-dimensional fields.
Parameters
----------
data : dict
This is a dict of numpy arrays or (numpy array, unit spec) tuples.
The keys are the field names.
domain_dimensions : array_like
This is the domain dimensions of the grid
length_unit : string
Unit to use for lengths. Defaults to unitless.
bbox : array_like (xdim:zdim, LE:RE), optional
Size of computational domain in units specified by length_unit.
Defaults to a cubic unit-length domain.
nprocs: integer, optional
If greater than 1, will create this number of subarrays out of data
sim_time : float, optional
The simulation time in seconds
mass_unit : string
Unit to use for masses. Defaults to unitless.
time_unit : string
Unit to use for times. Defaults to unitless.
velocity_unit : string
Unit to use for velocities. Defaults to unitless.
magnetic_unit : string
Unit to use for magnetic fields. Defaults to unitless.
periodicity : tuple of booleans
Determines whether the data will be treated as periodic along
each axis
geometry : string or tuple
"cartesian", "cylindrical", "polar", "spherical", "geographic" or
"spectral_cube". Optionally, a tuple can be provided to specify the
axis ordering -- for instance, to specify that the axis ordering should
be z, x, y, this would be: ("cartesian", ("z", "x", "y")). The same
can be done for other coordinates, for instance:
("spherical", ("theta", "phi", "r")).
Examples
--------
>>> bbox = np.array([[0., 1.0], [-1.5, 1.5], [1.0, 2.5]])
>>> arr = np.random.random((128, 128, 128))
>>> data = dict(density=arr)
>>> ds = load_uniform_grid(data, arr.shape, length_unit='cm',
... bbox=bbox, nprocs=12)
>>> dd = ds.all_data()
>>> dd['density']
YTArray([ 0.87568064, 0.33686453, 0.70467189, ..., 0.70439916,
0.97506269, 0.03047113]) g/cm**3
>>> data = dict(density=(arr, 'kg/m**3'))
>>> ds = load_uniform_grid(data, arr.shape, length_unit=3.03e24,
... bbox=bbox, nprocs=12)
>>> dd = ds.all_data()
>>> dd['density']
YTArray([ 8.75680644e-04, 3.36864527e-04, 7.04671886e-04, ...,
7.04399160e-04, 9.75062693e-04, 3.04711295e-05]) g/cm**3
"""
domain_dimensions = np.array(domain_dimensions)
if bbox is None:
bbox = np.array([[0.0, 1.0], [0.0, 1.0], [0.0, 1.0]], "float64")
domain_left_edge = np.array(bbox[:, 0], "float64")
domain_right_edge = np.array(bbox[:, 1], "float64")
grid_levels = np.zeros(nprocs, dtype="int32").reshape((nprocs, 1))
# If someone included this throw it away--old API
if "number_of_particles" in data:
issue_deprecation_warning(
"It is no longer necessary to include "
"the number of particles in the data "
"dict. The number of particles is "
"determined from the sizes of the "
"particle fields."
)
data.pop("number_of_particles")
# First we fix our field names, apply units to data
# and check for consistency of field shapes
field_units, data, number_of_particles = process_data(
data, grid_dims=tuple(domain_dimensions)
)
sfh = StreamDictFieldHandler()
if number_of_particles > 0:
particle_types = set_particle_types(data)
# Used much further below.
pdata = {"number_of_particles": number_of_particles}
for key in list(data.keys()):
if len(data[key].shape) == 1 or key[0] == "io":
if not isinstance(key, tuple):
field = ("io", key)
mylog.debug("Reassigning '%s' to '%s'", key, field)
else:
field = key
sfh._additional_fields += (field,)
pdata[field] = data.pop(key)
else:
particle_types = {}
if nprocs > 1:
temp = {}
new_data = {}
for key in data.keys():
psize = get_psize(np.array(data[key].shape), nprocs)
grid_left_edges, grid_right_edges, shapes, slices = decompose_array(
data[key].shape, psize, bbox
)
grid_dimensions = np.array([shape for shape in shapes], dtype="int32")
temp[key] = [data[key][slice] for slice in slices]
for gid in range(nprocs):
new_data[gid] = {}
for key in temp.keys():
new_data[gid].update({key: temp[key][gid]})
sfh.update(new_data)
del new_data, temp
else:
sfh.update({0: data})
grid_left_edges = domain_left_edge
grid_right_edges = domain_right_edge
grid_dimensions = domain_dimensions.reshape(nprocs, 3).astype("int32")
if length_unit is None:
length_unit = "code_length"
if mass_unit is None:
mass_unit = "code_mass"
if time_unit is None:
time_unit = "code_time"
if velocity_unit is None:
velocity_unit = "code_velocity"
if magnetic_unit is None:
magnetic_unit = "code_magnetic"
handler = StreamHandler(
grid_left_edges,
grid_right_edges,
grid_dimensions,
grid_levels,
-np.ones(nprocs, dtype="int64"),
np.zeros(nprocs, dtype="int64").reshape(nprocs, 1), # particle count
np.zeros(nprocs).reshape((nprocs, 1)),
sfh,
field_units,
(length_unit, mass_unit, time_unit, velocity_unit, magnetic_unit),
particle_types=particle_types,
periodicity=periodicity,
)
handler.name = "UniformGridData"
handler.domain_left_edge = domain_left_edge
handler.domain_right_edge = domain_right_edge
handler.refine_by = 2
if np.all(domain_dimensions[1:] == 1):
dimensionality = 1
elif domain_dimensions[2] == 1:
dimensionality = 2
else:
dimensionality = 3
handler.dimensionality = dimensionality
handler.domain_dimensions = domain_dimensions
handler.simulation_time = sim_time
handler.cosmology_simulation = 0
sds = StreamDataset(handler, geometry=geometry, unit_system=unit_system)
# Now figure out where the particles go
if number_of_particles > 0:
# This will update the stream handler too
assign_particle_data(sds, pdata, bbox)
return sds
|
def load_uniform_grid(
data,
domain_dimensions,
length_unit=None,
bbox=None,
nprocs=1,
sim_time=0.0,
mass_unit=None,
time_unit=None,
velocity_unit=None,
magnetic_unit=None,
periodicity=(True, True, True),
geometry="cartesian",
unit_system="cgs",
):
r"""Load a uniform grid of data into yt as a
:class:`~yt.frontends.stream.data_structures.StreamHandler`.
This should allow a uniform grid of data to be loaded directly into yt and
analyzed as would any others. This comes with several caveats:
* Units will be incorrect unless the unit system is explicitly
specified.
* Some functions may behave oddly, and parallelism will be
disappointing or non-existent in most cases.
* Particles may be difficult to integrate.
Particle fields are detected as one-dimensional fields.
Parameters
----------
data : dict
This is a dict of numpy arrays or (numpy array, unit spec) tuples.
The keys are the field names.
domain_dimensions : array_like
This is the domain dimensions of the grid
length_unit : string
Unit to use for lengths. Defaults to unitless.
bbox : array_like (xdim:zdim, LE:RE), optional
Size of computational domain in units specified by length_unit.
Defaults to a cubic unit-length domain.
nprocs: integer, optional
If greater than 1, will create this number of subarrays out of data
sim_time : float, optional
The simulation time in seconds
mass_unit : string
Unit to use for masses. Defaults to unitless.
time_unit : string
Unit to use for times. Defaults to unitless.
velocity_unit : string
Unit to use for velocities. Defaults to unitless.
magnetic_unit : string
Unit to use for magnetic fields. Defaults to unitless.
periodicity : tuple of booleans
Determines whether the data will be treated as periodic along
each axis
geometry : string or tuple
"cartesian", "cylindrical", "polar", "spherical", "geographic" or
"spectral_cube". Optionally, a tuple can be provided to specify the
axis ordering -- for instance, to specify that the axis ordering should
be z, x, y, this would be: ("cartesian", ("z", "x", "y")). The same
can be done for other coordinates, for instance:
("spherical", ("theta", "phi", "r")).
Examples
--------
>>> bbox = np.array([[0., 1.0], [-1.5, 1.5], [1.0, 2.5]])
>>> arr = np.random.random((128, 128, 128))
>>> data = dict(density=arr)
>>> ds = load_uniform_grid(data, arr.shape, length_unit='cm',
... bbox=bbox, nprocs=12)
>>> dd = ds.all_data()
>>> dd['density']
YTArray([ 0.87568064, 0.33686453, 0.70467189, ..., 0.70439916,
0.97506269, 0.03047113]) g/cm**3
>>> data = dict(density=(arr, 'kg/m**3'))
>>> ds = load_uniform_grid(data, arr.shape, length_unit=3.03e24,
... bbox=bbox, nprocs=12)
>>> dd = ds.all_data()
>>> dd['density']
YTArray([ 8.75680644e-04, 3.36864527e-04, 7.04671886e-04, ...,
7.04399160e-04, 9.75062693e-04, 3.04711295e-05]) g/cm**3
"""
domain_dimensions = np.array(domain_dimensions)
if bbox is None:
bbox = np.array([[0.0, 1.0], [0.0, 1.0], [0.0, 1.0]], "float64")
domain_left_edge = np.array(bbox[:, 0], "float64")
domain_right_edge = np.array(bbox[:, 1], "float64")
grid_levels = np.zeros(nprocs, dtype="int32").reshape((nprocs, 1))
# If someone included this throw it away--old API
if "number_of_particles" in data:
issue_deprecation_warning(
"It is no longer necessary to include "
"the number of particles in the data "
"dict. The number of particles is "
"determined from the sizes of the "
"particle fields."
)
data.pop("number_of_particles")
# First we fix our field names, apply units to data
# and check for consistency of field shapes
field_units, data, number_of_particles = process_data(
data, grid_dims=tuple(domain_dimensions)
)
sfh = StreamDictFieldHandler()
if number_of_particles > 0:
particle_types = set_particle_types(data)
# Used much further below.
pdata = {"number_of_particles": number_of_particles}
for key in list(data.keys()):
if len(data[key].shape) == 1 or key[0] == "io":
if not isinstance(key, tuple):
field = ("io", key)
mylog.debug("Reassigning '%s' to '%s'", key, field)
else:
field = key
sfh._additional_fields += (field,)
pdata[field] = data.pop(key)
else:
particle_types = {}
if nprocs > 1:
temp = {}
new_data = {}
for key in data.keys():
psize = get_psize(np.array(data[key].shape), nprocs)
grid_left_edges, grid_right_edges, shapes, slices = decompose_array(
data[key].shape, psize, bbox
)
grid_dimensions = np.array([shape for shape in shapes], dtype="int32")
temp[key] = [data[key][slice] for slice in slices]
for gid in range(nprocs):
new_data[gid] = {}
for key in temp.keys():
new_data[gid].update({key: temp[key][gid]})
sfh.update(new_data)
del new_data, temp
else:
sfh.update({0: data})
grid_left_edges = domain_left_edge
grid_right_edges = domain_right_edge
grid_dimensions = domain_dimensions.reshape(nprocs, 3).astype("int32")
if length_unit is None:
length_unit = "code_length"
if mass_unit is None:
mass_unit = "code_mass"
if time_unit is None:
time_unit = "code_time"
if velocity_unit is None:
velocity_unit = "code_velocity"
if magnetic_unit is None:
magnetic_unit = "code_magnetic"
handler = StreamHandler(
grid_left_edges,
grid_right_edges,
grid_dimensions,
grid_levels,
-np.ones(nprocs, dtype="int64"),
np.zeros(nprocs, dtype="int64").reshape(nprocs, 1), # particle count
np.zeros(nprocs).reshape((nprocs, 1)),
sfh,
field_units,
(length_unit, mass_unit, time_unit, velocity_unit, magnetic_unit),
particle_types=particle_types,
periodicity=periodicity,
)
handler.name = "UniformGridData"
handler.domain_left_edge = domain_left_edge
handler.domain_right_edge = domain_right_edge
handler.refine_by = 2
if np.all(domain_dimensions[1:] == 1):
dimensionality = 1
elif domain_dimensions[2] == 1:
dimensionality = 2
else:
dimensionality = 3
handler.dimensionality = dimensionality
handler.domain_dimensions = domain_dimensions
handler.simulation_time = sim_time
handler.cosmology_simulation = 0
sds = StreamDataset(handler, geometry=geometry, unit_system=unit_system)
# Now figure out where the particles go
if number_of_particles > 0:
# This will update the stream handler too
assign_particle_data(sds, pdata)
return sds
|
https://github.com/yt-project/yt/issues/1600
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-2-a9ff1fb7d916> in <module>()
8 bbox = np.array([[-1.5, 1.5], [-1.5, 1.5], [-1.5, 1.5]])
9 ds = yt.load_uniform_grid(data, data["density"][0].shape, length_unit=(1.0, "Mpc"), mass_unit=(1.0,"Msun"),
---> 10 bbox=bbox, nprocs=4)
11
~/anaconda3/envs/phcpy/lib/python3.6/site-packages/yt/frontends/stream/data_structures.py in load_uniform_grid(data, domain_dimensions, length_unit, bbox, nprocs, sim_time, mass_unit, time_unit, velocity_unit, magnetic_unit, periodicity, geometry, unit_system)
752 if number_of_particles > 0:
753 # This will update the stream handler too
--> 754 assign_particle_data(sds, pdata)
755
756 return sds
~/anaconda3/envs/phcpy/lib/python3.6/site-packages/yt/frontends/stream/data_structures.py in assign_particle_data(ds, pdata)
449 idxs = np.argsort(particle_grid_inds)
450 particle_grid_count = np.bincount(particle_grid_inds.astype("intp"),
--> 451 minlength=num_grids)
452 particle_indices = np.zeros(num_grids + 1, dtype='int64')
453 if num_grids > 1:
ValueError: The first argument of bincount must be non-negative
|
ValueError
|
def refine_amr(base_ds, refinement_criteria, fluid_operators, max_level, callback=None):
r"""Given a base dataset, repeatedly apply refinement criteria and
fluid operators until a maximum level is reached.
Parameters
----------
base_ds : ~yt.data_objects.static_output.Dataset
This is any static output. It can also be a stream static output, for
instance as returned by load_uniform_data.
refinement_critera : list of :class:`~yt.utilities.flagging_methods.FlaggingMethod`
These criteria will be applied in sequence to identify cells that need
to be refined.
fluid_operators : list of :class:`~yt.utilities.initial_conditions.FluidOperator`
These fluid operators will be applied in sequence to all resulting
grids.
max_level : int
The maximum level to which the data will be refined
callback : function, optional
A function that will be called at the beginning of each refinement
cycle, with the current dataset.
Examples
--------
>>> domain_dims = (32, 32, 32)
>>> data = np.zeros(domain_dims) + 0.25
>>> fo = [ic.CoredSphere(0.05, 0.3, [0.7,0.4,0.75], {"Density": (0.25, 100.0)})]
>>> rc = [fm.flagging_method_registry["overdensity"](8.0)]
>>> ug = load_uniform_grid({'Density': data}, domain_dims, 1.0)
>>> ds = refine_amr(ug, rc, fo, 5)
"""
# If we have particle data, set it aside for now
number_of_particles = np.sum(
[grid.NumberOfParticles for grid in base_ds.index.grids]
)
if number_of_particles > 0:
pdata = {}
for field in base_ds.field_list:
if not isinstance(field, tuple):
field = ("unknown", field)
fi = base_ds._get_field_info(*field)
if fi.particle_type and field[0] in base_ds.particle_types_raw:
pdata[field] = uconcatenate(
[grid[field] for grid in base_ds.index.grids]
)
pdata["number_of_particles"] = number_of_particles
last_gc = base_ds.index.num_grids
cur_gc = -1
ds = base_ds
bbox = np.array(
[(ds.domain_left_edge[i], ds.domain_right_edge[i]) for i in range(3)]
)
while ds.index.max_level < max_level and last_gc != cur_gc:
mylog.info("Refining another level. Current max level: %s", ds.index.max_level)
last_gc = ds.index.grids.size
for m in fluid_operators:
m.apply(ds)
if callback is not None:
callback(ds)
grid_data = []
for g in ds.index.grids:
gd = dict(
left_edge=g.LeftEdge,
right_edge=g.RightEdge,
level=g.Level,
dimensions=g.ActiveDimensions,
)
for field in ds.field_list:
if not isinstance(field, tuple):
field = ("unknown", field)
fi = ds._get_field_info(*field)
if not fi.particle_type:
gd[field] = g[field]
grid_data.append(gd)
if g.Level < ds.index.max_level:
continue
fg = FlaggingGrid(g, refinement_criteria)
nsg = fg.find_subgrids()
for sg in nsg:
LE = sg.left_index * g.dds + ds.domain_left_edge
dims = sg.dimensions * ds.refine_by
grid = ds.smoothed_covering_grid(g.Level + 1, LE, dims)
gd = dict(
left_edge=LE,
right_edge=grid.right_edge,
level=g.Level + 1,
dimensions=dims,
)
for field in ds.field_list:
if not isinstance(field, tuple):
field = ("unknown", field)
fi = ds._get_field_info(*field)
if not fi.particle_type:
gd[field] = grid[field]
grid_data.append(gd)
ds = load_amr_grids(grid_data, ds.domain_dimensions, bbox=bbox)
ds.particle_types_raw = base_ds.particle_types_raw
ds.particle_types = ds.particle_types_raw
# Now figure out where the particles go
if number_of_particles > 0:
# This will update the stream handler too
assign_particle_data(ds, pdata, bbox)
cur_gc = ds.index.num_grids
return ds
|
def refine_amr(base_ds, refinement_criteria, fluid_operators, max_level, callback=None):
r"""Given a base dataset, repeatedly apply refinement criteria and
fluid operators until a maximum level is reached.
Parameters
----------
base_ds : ~yt.data_objects.static_output.Dataset
This is any static output. It can also be a stream static output, for
instance as returned by load_uniform_data.
refinement_critera : list of :class:`~yt.utilities.flagging_methods.FlaggingMethod`
These criteria will be applied in sequence to identify cells that need
to be refined.
fluid_operators : list of :class:`~yt.utilities.initial_conditions.FluidOperator`
These fluid operators will be applied in sequence to all resulting
grids.
max_level : int
The maximum level to which the data will be refined
callback : function, optional
A function that will be called at the beginning of each refinement
cycle, with the current dataset.
Examples
--------
>>> domain_dims = (32, 32, 32)
>>> data = np.zeros(domain_dims) + 0.25
>>> fo = [ic.CoredSphere(0.05, 0.3, [0.7,0.4,0.75], {"Density": (0.25, 100.0)})]
>>> rc = [fm.flagging_method_registry["overdensity"](8.0)]
>>> ug = load_uniform_grid({'Density': data}, domain_dims, 1.0)
>>> ds = refine_amr(ug, rc, fo, 5)
"""
# If we have particle data, set it aside for now
number_of_particles = np.sum(
[grid.NumberOfParticles for grid in base_ds.index.grids]
)
if number_of_particles > 0:
pdata = {}
for field in base_ds.field_list:
if not isinstance(field, tuple):
field = ("unknown", field)
fi = base_ds._get_field_info(*field)
if fi.particle_type and field[0] in base_ds.particle_types_raw:
pdata[field] = uconcatenate(
[grid[field] for grid in base_ds.index.grids]
)
pdata["number_of_particles"] = number_of_particles
last_gc = base_ds.index.num_grids
cur_gc = -1
ds = base_ds
bbox = np.array(
[(ds.domain_left_edge[i], ds.domain_right_edge[i]) for i in range(3)]
)
while ds.index.max_level < max_level and last_gc != cur_gc:
mylog.info("Refining another level. Current max level: %s", ds.index.max_level)
last_gc = ds.index.grids.size
for m in fluid_operators:
m.apply(ds)
if callback is not None:
callback(ds)
grid_data = []
for g in ds.index.grids:
gd = dict(
left_edge=g.LeftEdge,
right_edge=g.RightEdge,
level=g.Level,
dimensions=g.ActiveDimensions,
)
for field in ds.field_list:
if not isinstance(field, tuple):
field = ("unknown", field)
fi = ds._get_field_info(*field)
if not fi.particle_type:
gd[field] = g[field]
grid_data.append(gd)
if g.Level < ds.index.max_level:
continue
fg = FlaggingGrid(g, refinement_criteria)
nsg = fg.find_subgrids()
for sg in nsg:
LE = sg.left_index * g.dds + ds.domain_left_edge
dims = sg.dimensions * ds.refine_by
grid = ds.smoothed_covering_grid(g.Level + 1, LE, dims)
gd = dict(
left_edge=LE,
right_edge=grid.right_edge,
level=g.Level + 1,
dimensions=dims,
)
for field in ds.field_list:
if not isinstance(field, tuple):
field = ("unknown", field)
fi = ds._get_field_info(*field)
if not fi.particle_type:
gd[field] = grid[field]
grid_data.append(gd)
ds = load_amr_grids(grid_data, ds.domain_dimensions, bbox=bbox)
ds.particle_types_raw = base_ds.particle_types_raw
ds.particle_types = ds.particle_types_raw
# Now figure out where the particles go
if number_of_particles > 0:
# This will update the stream handler too
assign_particle_data(ds, pdata)
cur_gc = ds.index.num_grids
return ds
|
https://github.com/yt-project/yt/issues/1600
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-2-a9ff1fb7d916> in <module>()
8 bbox = np.array([[-1.5, 1.5], [-1.5, 1.5], [-1.5, 1.5]])
9 ds = yt.load_uniform_grid(data, data["density"][0].shape, length_unit=(1.0, "Mpc"), mass_unit=(1.0,"Msun"),
---> 10 bbox=bbox, nprocs=4)
11
~/anaconda3/envs/phcpy/lib/python3.6/site-packages/yt/frontends/stream/data_structures.py in load_uniform_grid(data, domain_dimensions, length_unit, bbox, nprocs, sim_time, mass_unit, time_unit, velocity_unit, magnetic_unit, periodicity, geometry, unit_system)
752 if number_of_particles > 0:
753 # This will update the stream handler too
--> 754 assign_particle_data(sds, pdata)
755
756 return sds
~/anaconda3/envs/phcpy/lib/python3.6/site-packages/yt/frontends/stream/data_structures.py in assign_particle_data(ds, pdata)
449 idxs = np.argsort(particle_grid_inds)
450 particle_grid_count = np.bincount(particle_grid_inds.astype("intp"),
--> 451 minlength=num_grids)
452 particle_indices = np.zeros(num_grids + 1, dtype='int64')
453 if num_grids > 1:
ValueError: The first argument of bincount must be non-negative
|
ValueError
|
def _switch_ds(self, new_ds, data_source=None):
old_object = self.data_source
name = old_object._type_name
kwargs = dict((n, getattr(old_object, n)) for n in old_object._con_args)
kwargs["center"] = getattr(old_object, "center", None)
if data_source is not None:
if name != "proj":
raise RuntimeError(
"The data_source keyword argument is only defined for projections."
)
kwargs["data_source"] = data_source
self.ds = new_ds
# A _hack_ for ParticleProjectionPlots
if name == "Particle":
from yt.visualization.particle_plots import ParticleAxisAlignedDummyDataSource
new_object = ParticleAxisAlignedDummyDataSource(ds=self.ds, **kwargs)
else:
new_object = getattr(new_ds, name)(**kwargs)
self.data_source = new_object
self._data_valid = self._plot_valid = False
for d in "xyz":
lim_name = d + "lim"
if hasattr(self, lim_name):
lim = getattr(self, lim_name)
lim = tuple(new_ds.quan(l.value, str(l.units)) for l in lim)
setattr(self, lim_name, lim)
self.plots.data_source = new_object
self._colorbar_label.data_source = new_object
self._setup_plots()
|
def _switch_ds(self, new_ds, data_source=None):
old_object = self.data_source
name = old_object._type_name
kwargs = dict((n, getattr(old_object, n)) for n in old_object._con_args)
kwargs["center"] = getattr(old_object, "center", None)
if data_source is not None:
if name != "proj":
raise RuntimeError(
"The data_source keyword argument is only defined for projections."
)
kwargs["data_source"] = data_source
new_object = getattr(new_ds, name)(**kwargs)
self.ds = new_ds
self.data_source = new_object
self._data_valid = self._plot_valid = False
for d in "xyz":
lim_name = d + "lim"
if hasattr(self, lim_name):
lim = getattr(self, lim_name)
lim = tuple(new_ds.quan(l.value, str(l.units)) for l in lim)
setattr(self, lim_name, lim)
self.plots.data_source = new_object
self._colorbar_label.data_source = new_object
self._setup_plots()
|
https://github.com/yt-project/yt/issues/1582
|
Traceback (most recent call last):
File "test.py", line 13, in <module>
plot._switch_ds(ds2)
File "/Users/goldbaum/Documents/yt-git-fixes/yt/visualization/plot_container.py", line 325, in _switch_ds
new_object = getattr(new_ds, name)(**kwargs)
AttributeError: 'TipsyDataset' object has no attribute 'Particle'
|
AttributeError
|
def _parse_enzo2_parameter_file(self, f):
for line in (l.strip() for l in f):
if (len(line) < 2) or (line[0] == "#"):
continue
param, vals = (i.strip() for i in line.split("=", 1))
# First we try to decipher what type of value it is.
vals = vals.split()
# Special case approaching.
if "(do" in vals:
vals = vals[:1]
if len(vals) == 0:
pcast = str # Assume NULL output
else:
v = vals[0]
# Figure out if it's castable to floating point:
try:
float(v)
except ValueError:
pcast = str
else:
if any("." in v or "e+" in v or "e-" in v for v in vals):
pcast = float
elif v == "inf":
pcast = str
else:
pcast = int
# Now we figure out what to do with it.
if len(vals) == 0:
vals = ""
elif len(vals) == 1:
vals = pcast(vals[0])
else:
vals = np.array([pcast(i) for i in vals if i != "-99999"])
if param.startswith("Append"):
if param not in self.parameters:
self.parameters[param] = []
self.parameters[param].append(vals)
else:
self.parameters[param] = vals
self.refine_by = self.parameters["RefineBy"]
self.periodicity = ensure_tuple(self.parameters["LeftFaceBoundaryCondition"] == 3)
self.dimensionality = self.parameters["TopGridRank"]
if "MetaDataDatasetUUID" in self.parameters:
self.unique_identifier = self.parameters["MetaDataDatasetUUID"]
elif "CurrentTimeIdentifier" in self.parameters:
self.unique_identifier = self.parameters["CurrentTimeIdentifier"]
else:
self.unique_identifier = str(
int(os.stat(self.parameter_filename)[stat.ST_CTIME])
)
if self.dimensionality > 1:
self.domain_dimensions = self.parameters["TopGridDimensions"]
if len(self.domain_dimensions) < 3:
tmp = self.domain_dimensions.tolist()
tmp.append(1)
self.domain_dimensions = np.array(tmp)
self.periodicity += (False,)
self.domain_left_edge = np.array(
self.parameters["DomainLeftEdge"], "float64"
).copy()
self.domain_right_edge = np.array(
self.parameters["DomainRightEdge"], "float64"
).copy()
else:
self.domain_left_edge = np.array(self.parameters["DomainLeftEdge"], "float64")
self.domain_right_edge = np.array(self.parameters["DomainRightEdge"], "float64")
self.domain_dimensions = np.array([self.parameters["TopGridDimensions"], 1, 1])
self.periodicity += (False, False)
self.gamma = self.parameters["Gamma"]
if self.parameters["ComovingCoordinates"]:
self.cosmological_simulation = 1
self.current_redshift = self.parameters["CosmologyCurrentRedshift"]
self.omega_lambda = self.parameters["CosmologyOmegaLambdaNow"]
self.omega_matter = self.parameters["CosmologyOmegaMatterNow"]
self.hubble_constant = self.parameters["CosmologyHubbleConstantNow"]
else:
self.current_redshift = self.omega_lambda = self.omega_matter = (
self.hubble_constant
) = self.cosmological_simulation = 0.0
self.particle_types = []
self.current_time = self.parameters["InitialTime"]
if (
self.parameters["NumberOfParticles"] > 0
and "AppendActiveParticleType" in self.parameters.keys()
):
# If this is the case, then we know we should have a DarkMatter
# particle type, and we don't need the "io" type.
self.parameters["AppendActiveParticleType"].append("DarkMatter")
else:
# We do not have an "io" type for Enzo particles if the
# ActiveParticle machinery is on, as we simply will ignore any of
# the non-DarkMatter particles in that case. However, for older
# datasets, we call this particle type "io".
self.particle_types = ["io"]
for ptype in self.parameters.get("AppendActiveParticleType", []):
self.particle_types.append(ptype)
self.particle_types = tuple(self.particle_types)
self.particle_types_raw = self.particle_types
if self.dimensionality == 1:
self._setup_1d()
elif self.dimensionality == 2:
self._setup_2d()
|
def _parse_enzo2_parameter_file(self, f):
for line in (l.strip() for l in f):
if len(line) < 2:
continue
param, vals = (i.strip() for i in line.split("=", 1))
# First we try to decipher what type of value it is.
vals = vals.split()
# Special case approaching.
if "(do" in vals:
vals = vals[:1]
if len(vals) == 0:
pcast = str # Assume NULL output
else:
v = vals[0]
# Figure out if it's castable to floating point:
try:
float(v)
except ValueError:
pcast = str
else:
if any("." in v or "e+" in v or "e-" in v for v in vals):
pcast = float
elif v == "inf":
pcast = str
else:
pcast = int
# Now we figure out what to do with it.
if len(vals) == 0:
vals = ""
elif len(vals) == 1:
vals = pcast(vals[0])
else:
vals = np.array([pcast(i) for i in vals if i != "-99999"])
if param.startswith("Append"):
if param not in self.parameters:
self.parameters[param] = []
self.parameters[param].append(vals)
else:
self.parameters[param] = vals
self.refine_by = self.parameters["RefineBy"]
self.periodicity = ensure_tuple(self.parameters["LeftFaceBoundaryCondition"] == 3)
self.dimensionality = self.parameters["TopGridRank"]
if "MetaDataDatasetUUID" in self.parameters:
self.unique_identifier = self.parameters["MetaDataDatasetUUID"]
elif "CurrentTimeIdentifier" in self.parameters:
self.unique_identifier = self.parameters["CurrentTimeIdentifier"]
else:
self.unique_identifier = str(
int(os.stat(self.parameter_filename)[stat.ST_CTIME])
)
if self.dimensionality > 1:
self.domain_dimensions = self.parameters["TopGridDimensions"]
if len(self.domain_dimensions) < 3:
tmp = self.domain_dimensions.tolist()
tmp.append(1)
self.domain_dimensions = np.array(tmp)
self.periodicity += (False,)
self.domain_left_edge = np.array(
self.parameters["DomainLeftEdge"], "float64"
).copy()
self.domain_right_edge = np.array(
self.parameters["DomainRightEdge"], "float64"
).copy()
else:
self.domain_left_edge = np.array(self.parameters["DomainLeftEdge"], "float64")
self.domain_right_edge = np.array(self.parameters["DomainRightEdge"], "float64")
self.domain_dimensions = np.array([self.parameters["TopGridDimensions"], 1, 1])
self.periodicity += (False, False)
self.gamma = self.parameters["Gamma"]
if self.parameters["ComovingCoordinates"]:
self.cosmological_simulation = 1
self.current_redshift = self.parameters["CosmologyCurrentRedshift"]
self.omega_lambda = self.parameters["CosmologyOmegaLambdaNow"]
self.omega_matter = self.parameters["CosmologyOmegaMatterNow"]
self.hubble_constant = self.parameters["CosmologyHubbleConstantNow"]
else:
self.current_redshift = self.omega_lambda = self.omega_matter = (
self.hubble_constant
) = self.cosmological_simulation = 0.0
self.particle_types = []
self.current_time = self.parameters["InitialTime"]
if (
self.parameters["NumberOfParticles"] > 0
and "AppendActiveParticleType" in self.parameters.keys()
):
# If this is the case, then we know we should have a DarkMatter
# particle type, and we don't need the "io" type.
self.parameters["AppendActiveParticleType"].append("DarkMatter")
else:
# We do not have an "io" type for Enzo particles if the
# ActiveParticle machinery is on, as we simply will ignore any of
# the non-DarkMatter particles in that case. However, for older
# datasets, we call this particle type "io".
self.particle_types = ["io"]
for ptype in self.parameters.get("AppendActiveParticleType", []):
self.particle_types.append(ptype)
self.particle_types = tuple(self.particle_types)
self.particle_types_raw = self.particle_types
if self.dimensionality == 1:
self._setup_1d()
elif self.dimensionality == 2:
self._setup_2d()
|
https://github.com/yt-project/yt/issues/1584
|
ValueError Traceback (most recent call last)
<ipython-input-2-d4d26e2f34da> in <module>()
----> 1 ds = yt.load("RD0027/RD0027")
/Users/molly/anaconda2/envs/astroconda/lib/python2.7/site-packages/yt/convenience.pyc in load(*args, **kwargs)
84 candidates = find_lowest_subclasses(candidates)
85 if len(candidates) == 1:
---> 86 return candidates[0](*args, **kwargs)
87 if len(candidates) == 0:
88 if ytcfg.get("yt", "enzo_db") != '' \
/Users/molly/anaconda2/envs/astroconda/lib/python2.7/site-packages/yt/frontends/enzo/data_structures.pyc in __init__(self, filename, dataset_type, file_style, parameter_override, conversion_override, storage_filename, units_override, unit_system)
680 self.storage_filename = storage_filename
681 Dataset.__init__(self, filename, dataset_type, file_style=file_style,
--> 682 units_override=units_override, unit_system=unit_system)
683
684 def _setup_1d(self):
/Users/molly/anaconda2/envs/astroconda/lib/python2.7/site-packages/yt/data_objects/static_output.pyc in __init__(self, filename, dataset_type, file_style, units_override, unit_system)
304 self._create_unit_registry()
305
--> 306 self._parse_parameter_file()
307 self.set_units()
308 self._assign_unit_system(unit_system)
/Users/molly/anaconda2/envs/astroconda/lib/python2.7/site-packages/yt/frontends/enzo/data_structures.pyc in _parse_parameter_file(self)
741 self._parse_enzo3_parameter_file(f)
742 else:
--> 743 self._parse_enzo2_parameter_file(f)
744
745 def _parse_enzo3_parameter_file(self, f):
/Users/molly/anaconda2/envs/astroconda/lib/python2.7/site-packages/yt/frontends/enzo/data_structures.pyc in _parse_enzo2_parameter_file(self, f)
783 for line in (l.strip() for l in f):
784 if len(line) < 2: continue
--> 785 param, vals = (i.strip() for i in line.split("=",1))
786 # First we try to decipher what type of value it is.
787 vals = vals.split()
ValueError: need more than 1 value to unpack
|
ValueError
|
def __init__(self, id, filename=None, index=None):
super(AMRGridPatch, self).__init__(index.dataset, None)
self.id = id
self._child_mask = self._child_indices = self._child_index_mask = None
self.ds = index.dataset
self._index = weakref.proxy(index)
self.start_index = None
self.filename = filename
self._last_mask = None
self._last_count = -1
self._last_selector_id = None
|
def __init__(self, id, filename=None, index=None):
self.field_data = YTFieldData()
self.field_parameters = {}
self.id = id
self._child_mask = self._child_indices = self._child_index_mask = None
self.ds = index.dataset
self._index = weakref.proxy(index)
self.start_index = None
self.filename = filename
self._last_mask = None
self._last_count = -1
self._last_selector_id = None
self._current_particle_type = "all"
self._current_fluid_type = self.ds.default_fluid_type
|
https://github.com/yt-project/yt/issues/1571
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-16-ea26250246dd> in <module>()
----> 1 grid['particle_velocity_magnitude']
/home/ychen/src/yt-git/yt/data_objects/grid_patch.py in __getitem__(self, key)
93
94 def __getitem__(self, key):
---> 95 tr = super(AMRGridPatch, self).__getitem__(key)
96 try:
97 fields = self._determine_fields(key)
/home/ychen/src/yt-git/yt/data_objects/data_containers.py in __getitem__(self, key)
279 return self.field_data[f]
280 else:
--> 281 self.get_data(f)
282 # fi.units is the unit expression string. We depend on the registry
283 # hanging off the dataset to define this unit object.
/home/ychen/src/yt-git/yt/data_objects/data_containers.py in get_data(self, fields)
1334
1335 fields_to_generate += gen_fluids + gen_particles
-> 1336 self._generate_fields(fields_to_generate)
1337 for field in list(self.field_data.keys()):
1338 if field not in ofields:
/home/ychen/src/yt-git/yt/data_objects/data_containers.py in _generate_fields(self, fields_to_generate)
1354 fi = self.ds._get_field_info(*field)
1355 try:
-> 1356 fd = self._generate_field(field)
1357 if fd is None:
1358 raise RuntimeError
/home/ychen/src/yt-git/yt/data_objects/data_containers.py in _generate_field(self, field)
314 tr = self._generate_container_field(field)
315 if finfo.particle_type: # This is a property now
--> 316 tr = self._generate_particle_field(field)
317 else:
318 tr = self._generate_fluid_field(field)
/home/ychen/src/yt-git/yt/data_objects/data_containers.py in _generate_particle_field(self, field)
399 else:
400 with self._field_type_state(ftype, finfo, gen_obj):
--> 401 rv = self.ds._get_field_info(*field)(gen_obj)
402 return rv
403
/home/ychen/src/yt-git/yt/fields/derived_field.py in __call__(self, data)
235 "for %s" % (self.name,))
236 with self.unit_registry(data):
--> 237 dd = self._function(self, data)
238 for field_name in data.keys():
239 if field_name not in original_fields:
/home/ychen/src/yt-git/yt/fields/particle_fields.py in _particle_velocity_magnitude(field, data)
282 return np.sqrt((data[ptype, svel % 'x'] - bulk_velocity[0])**2
283 + (data[ptype, svel % 'y'] - bulk_velocity[1])**2
--> 284 + (data[ptype, svel % 'z'] - bulk_velocity[2])**2 )
285
286 registry.add_field((ptype, "particle_velocity_magnitude"),
TypeError: 'NoneType' object is not subscriptable
|
TypeError
|
def __init__(self, base_region, domain, ds, over_refine_factor=1):
super(OctreeSubset, self).__init__(ds, None)
self._num_zones = 1 << (over_refine_factor)
self._oref = over_refine_factor
self.domain = domain
self.domain_id = domain.domain_id
self.ds = domain.ds
self._index = self.ds.index
self.oct_handler = domain.oct_handler
self._last_mask = None
self._last_selector_id = None
self.base_region = base_region
self.base_selector = base_region.selector
|
def __init__(self, base_region, domain, ds, over_refine_factor=1):
self._num_zones = 1 << (over_refine_factor)
self._oref = over_refine_factor
self.field_data = YTFieldData()
self.field_parameters = {}
self.domain = domain
self.domain_id = domain.domain_id
self.ds = domain.ds
self._index = self.ds.index
self.oct_handler = domain.oct_handler
self._last_mask = None
self._last_selector_id = None
self._current_particle_type = "all"
self._current_fluid_type = self.ds.default_fluid_type
self.base_region = base_region
self.base_selector = base_region.selector
|
https://github.com/yt-project/yt/issues/1571
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-16-ea26250246dd> in <module>()
----> 1 grid['particle_velocity_magnitude']
/home/ychen/src/yt-git/yt/data_objects/grid_patch.py in __getitem__(self, key)
93
94 def __getitem__(self, key):
---> 95 tr = super(AMRGridPatch, self).__getitem__(key)
96 try:
97 fields = self._determine_fields(key)
/home/ychen/src/yt-git/yt/data_objects/data_containers.py in __getitem__(self, key)
279 return self.field_data[f]
280 else:
--> 281 self.get_data(f)
282 # fi.units is the unit expression string. We depend on the registry
283 # hanging off the dataset to define this unit object.
/home/ychen/src/yt-git/yt/data_objects/data_containers.py in get_data(self, fields)
1334
1335 fields_to_generate += gen_fluids + gen_particles
-> 1336 self._generate_fields(fields_to_generate)
1337 for field in list(self.field_data.keys()):
1338 if field not in ofields:
/home/ychen/src/yt-git/yt/data_objects/data_containers.py in _generate_fields(self, fields_to_generate)
1354 fi = self.ds._get_field_info(*field)
1355 try:
-> 1356 fd = self._generate_field(field)
1357 if fd is None:
1358 raise RuntimeError
/home/ychen/src/yt-git/yt/data_objects/data_containers.py in _generate_field(self, field)
314 tr = self._generate_container_field(field)
315 if finfo.particle_type: # This is a property now
--> 316 tr = self._generate_particle_field(field)
317 else:
318 tr = self._generate_fluid_field(field)
/home/ychen/src/yt-git/yt/data_objects/data_containers.py in _generate_particle_field(self, field)
399 else:
400 with self._field_type_state(ftype, finfo, gen_obj):
--> 401 rv = self.ds._get_field_info(*field)(gen_obj)
402 return rv
403
/home/ychen/src/yt-git/yt/fields/derived_field.py in __call__(self, data)
235 "for %s" % (self.name,))
236 with self.unit_registry(data):
--> 237 dd = self._function(self, data)
238 for field_name in data.keys():
239 if field_name not in original_fields:
/home/ychen/src/yt-git/yt/fields/particle_fields.py in _particle_velocity_magnitude(field, data)
282 return np.sqrt((data[ptype, svel % 'x'] - bulk_velocity[0])**2
283 + (data[ptype, svel % 'y'] - bulk_velocity[1])**2
--> 284 + (data[ptype, svel % 'z'] - bulk_velocity[2])**2 )
285
286 registry.add_field((ptype, "particle_velocity_magnitude"),
TypeError: 'NoneType' object is not subscriptable
|
TypeError
|
def __init__(self, mesh_id, filename, connectivity_indices, connectivity_coords, index):
super(UnstructuredMesh, self).__init__(index.dataset, None)
self.filename = filename
self.mesh_id = mesh_id
# This is where we set up the connectivity information
self.connectivity_indices = connectivity_indices
if connectivity_indices.shape[1] != self._connectivity_length:
if self._connectivity_length == -1:
self._connectivity_length = connectivity_indices.shape[1]
else:
raise RuntimeError
self.connectivity_coords = connectivity_coords
self.ds = index.dataset
self._index = index
self._last_mask = None
self._last_count = -1
self._last_selector_id = None
|
def __init__(self, mesh_id, filename, connectivity_indices, connectivity_coords, index):
self.field_data = YTFieldData()
self.filename = filename
self.field_parameters = {}
self.mesh_id = mesh_id
# This is where we set up the connectivity information
self.connectivity_indices = connectivity_indices
if connectivity_indices.shape[1] != self._connectivity_length:
if self._connectivity_length == -1:
self._connectivity_length = connectivity_indices.shape[1]
else:
raise RuntimeError
self.connectivity_coords = connectivity_coords
self.ds = index.dataset
self._index = index
self._last_mask = None
self._last_count = -1
self._last_selector_id = None
self._current_particle_type = "all"
self._current_fluid_type = self.ds.default_fluid_type
|
https://github.com/yt-project/yt/issues/1571
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-16-ea26250246dd> in <module>()
----> 1 grid['particle_velocity_magnitude']
/home/ychen/src/yt-git/yt/data_objects/grid_patch.py in __getitem__(self, key)
93
94 def __getitem__(self, key):
---> 95 tr = super(AMRGridPatch, self).__getitem__(key)
96 try:
97 fields = self._determine_fields(key)
/home/ychen/src/yt-git/yt/data_objects/data_containers.py in __getitem__(self, key)
279 return self.field_data[f]
280 else:
--> 281 self.get_data(f)
282 # fi.units is the unit expression string. We depend on the registry
283 # hanging off the dataset to define this unit object.
/home/ychen/src/yt-git/yt/data_objects/data_containers.py in get_data(self, fields)
1334
1335 fields_to_generate += gen_fluids + gen_particles
-> 1336 self._generate_fields(fields_to_generate)
1337 for field in list(self.field_data.keys()):
1338 if field not in ofields:
/home/ychen/src/yt-git/yt/data_objects/data_containers.py in _generate_fields(self, fields_to_generate)
1354 fi = self.ds._get_field_info(*field)
1355 try:
-> 1356 fd = self._generate_field(field)
1357 if fd is None:
1358 raise RuntimeError
/home/ychen/src/yt-git/yt/data_objects/data_containers.py in _generate_field(self, field)
314 tr = self._generate_container_field(field)
315 if finfo.particle_type: # This is a property now
--> 316 tr = self._generate_particle_field(field)
317 else:
318 tr = self._generate_fluid_field(field)
/home/ychen/src/yt-git/yt/data_objects/data_containers.py in _generate_particle_field(self, field)
399 else:
400 with self._field_type_state(ftype, finfo, gen_obj):
--> 401 rv = self.ds._get_field_info(*field)(gen_obj)
402 return rv
403
/home/ychen/src/yt-git/yt/fields/derived_field.py in __call__(self, data)
235 "for %s" % (self.name,))
236 with self.unit_registry(data):
--> 237 dd = self._function(self, data)
238 for field_name in data.keys():
239 if field_name not in original_fields:
/home/ychen/src/yt-git/yt/fields/particle_fields.py in _particle_velocity_magnitude(field, data)
282 return np.sqrt((data[ptype, svel % 'x'] - bulk_velocity[0])**2
283 + (data[ptype, svel % 'y'] - bulk_velocity[1])**2
--> 284 + (data[ptype, svel % 'z'] - bulk_velocity[2])**2 )
285
286 registry.add_field((ptype, "particle_velocity_magnitude"),
TypeError: 'NoneType' object is not subscriptable
|
TypeError
|
def _fill_fields(self, fields):
fields = [f for f in fields if f not in self.field_data]
if len(fields) == 0:
return
# It may be faster to adapt fill_region_float to fill multiple fields
# instead of looping here
for field in fields:
dest = np.zeros(self.ActiveDimensions, dtype="float64")
for chunk in self._data_source.chunks(fields, "io"):
fill_region_float(
chunk.fcoords,
chunk.fwidth,
chunk[field],
self.left_edge,
self.right_edge,
dest,
1,
self.ds.domain_width,
int(any(self.ds.periodicity)),
)
fi = self.ds._get_field_info(field)
self[field] = self.ds.arr(dest, fi.units)
|
def _fill_fields(self, fields):
fields = [f for f in fields if f not in self.field_data]
if len(fields) == 0:
return
assert len(fields) == 1
field = fields[0]
dest = np.zeros(self.ActiveDimensions, dtype="float64")
for chunk in self._data_source.chunks(fields, "io"):
fill_region_float(
chunk.fcoords,
chunk.fwidth,
chunk[field],
self.left_edge,
self.right_edge,
dest,
1,
self.ds.domain_width,
int(any(self.ds.periodicity)),
)
fi = self.ds._get_field_info(field)
self[field] = self.ds.arr(dest, fi.units)
|
https://github.com/yt-project/yt/issues/1527
|
Traceback (most recent call last):
File "yt_arbgridtest.py", line 16, in <module>
tracerp = galgas['tracerf']
File "/Users/goldbaum/Documents/yt-git-fixes/yt/data_objects/data_containers.py", line 281, in __getitem__
self.get_data(f)
File "/Users/goldbaum/Documents/yt-git-fixes/yt/data_objects/construction_data_containers.py", line 641, in get_data
if len(fill) > 0: self._fill_fields(fill)
File "/Users/goldbaum/Documents/yt-git-fixes/yt/data_objects/construction_data_containers.py", line 851, in _fill_fields
assert(len(fields) == 1)
AssertionError
|
AssertionError
|
def _setup_filtered_type(self, filter):
if not filter.available(self.derived_field_list):
raise YTIllDefinedParticleFilter(
filter, filter.missing(self.derived_field_list)
)
fi = self.field_info
fd = self.field_dependencies
available = False
for fn in self.derived_field_list:
if fn[0] == filter.filtered_type:
# Now we can add this
available = True
self.derived_field_list.append((filter.name, fn[1]))
fi[filter.name, fn[1]] = filter.wrap_func(fn, fi[fn])
# Now we append the dependencies
fd[filter.name, fn[1]] = fd[fn]
if available:
self.particle_types += (filter.name,)
self.filtered_particle_types.append(filter.name)
new_fields = self._setup_particle_types([filter.name])
deps, _ = self.field_info.check_derived_fields(new_fields)
self.field_dependencies.update(deps)
return available
|
def _setup_filtered_type(self, filter):
if not filter.available(self.derived_field_list):
return False
fi = self.field_info
fd = self.field_dependencies
available = False
for fn in self.derived_field_list:
if fn[0] == filter.filtered_type:
# Now we can add this
available = True
self.derived_field_list.append((filter.name, fn[1]))
fi[filter.name, fn[1]] = filter.wrap_func(fn, fi[fn])
# Now we append the dependencies
fd[filter.name, fn[1]] = fd[fn]
if available:
self.particle_types += (filter.name,)
self.filtered_particle_types.append(filter.name)
new_fields = self._setup_particle_types([filter.name])
deps, _ = self.field_info.check_derived_fields(new_fields)
self.field_dependencies.update(deps)
return available
|
https://github.com/yt-project/yt/issues/1278
|
Traceback (most recent call last):
File "test.py", line 22, in <module>
width=(2000, 'kpc'))
File "/Users/goldbaum/Documents/yt-hg/yt/visualization/particle_plots.py", line 399, in ParticlePlot
x_field = ad._determine_fields(x_field)[0]
File "/Users/goldbaum/Documents/yt-hg/yt/data_objects/data_containers.py", line 1088, in _determine_fields
finfo = self.ds._get_field_info(ftype, fname)
File "/Users/goldbaum/Documents/yt-hg/yt/data_objects/static_output.py", line 666, in _get_field_info
raise YTFieldNotFound((ftype, fname), self)
yt.utilities.exceptions.YTFieldNotFound: Could not find field '('small_x', 'particle_position_x')' in snapshot_200
|
yt.utilities.exceptions.YTFieldNotFound
|
def _oblique_pixelize(self, data_source, field, bounds, size, antialias):
indices = np.argsort(data_source["pdx"])[::-1].astype(np.int_)
buff = np.zeros((size[1], size[0]), dtype="f8")
pixelize_off_axis_cartesian(
buff,
data_source["x"],
data_source["y"],
data_source["z"],
data_source["px"],
data_source["py"],
data_source["pdx"],
data_source["pdy"],
data_source["pdz"],
data_source.center,
data_source._inv_mat,
indices,
data_source[field],
bounds,
)
return buff
|
def _oblique_pixelize(self, data_source, field, bounds, size, antialias):
indices = np.argsort(data_source["pdx"])[::-1]
buff = np.zeros((size[1], size[0]), dtype="f8")
pixelize_off_axis_cartesian(
buff,
data_source["x"],
data_source["y"],
data_source["z"],
data_source["px"],
data_source["py"],
data_source["pdx"],
data_source["pdy"],
data_source["pdz"],
data_source.center,
data_source._inv_mat,
indices,
data_source[field],
bounds,
)
return buff
|
https://github.com/yt-project/yt/issues/1254
|
Traceback (most recent call last):
File "C:\Miniconda-x64\envs\test\lib\site-packages\nose\loader.py", line 251, in generate
for test in g():
File "c:\projects\yt\yt\utilities\lib\tests\test_alt_ray_tracers.py", line 65, in test_cylindrical_ray_trace
t, s, rztheta, inds = cylindrical_ray_trace(p1, p2, left_grid, right_grid)
File "yt\utilities\lib\alt_ray_tracers.pyx", line 203, in yt.utilities.lib.alt_ray_tracers.cylindrical_ray_trace (yt/utilities/lib/alt_ray_tracers.c:6000)
tsect, tinds = np.unique(tsect[tmask], return_index=True)
ValueError: Item size of buffer (1 byte) does not match size of 'int64_t' (8 bytes)
|
ValueError
|
def __call__(self, plot):
x0, x1 = plot.xlim
y0, y1 = plot.ylim
xx0, xx1 = plot._axes.get_xlim()
yy0, yy1 = plot._axes.get_ylim()
nx = plot.image._A.shape[1] // self.factor
ny = plot.image._A.shape[0] // self.factor
indices = np.argsort(plot.data["dx"])[::-1].astype(np.int_)
pixX = np.zeros((ny, nx), dtype="f8")
pixY = np.zeros((ny, nx), dtype="f8")
pixelize_off_axis_cartesian(
pixX,
plot.data["x"],
plot.data["y"],
plot.data["z"],
plot.data["px"],
plot.data["py"],
plot.data["pdx"],
plot.data["pdy"],
plot.data["pdz"],
plot.data.center,
plot.data._inv_mat,
indices,
plot.data[self.field_x],
(x0, x1, y0, y1),
)
pixelize_off_axis_cartesian(
pixY,
plot.data["x"],
plot.data["y"],
plot.data["z"],
plot.data["px"],
plot.data["py"],
plot.data["pdx"],
plot.data["pdy"],
plot.data["pdz"],
plot.data.center,
plot.data._inv_mat,
indices,
plot.data[self.field_y],
(x0, x1, y0, y1),
)
X, Y = np.meshgrid(
np.linspace(xx0, xx1, nx, endpoint=True),
np.linspace(yy0, yy1, ny, endpoint=True),
)
if self.normalize:
nn = np.sqrt(pixX**2 + pixY**2)
pixX /= nn
pixY /= nn
plot._axes.quiver(X, Y, pixX, pixY, scale=self.scale, scale_units=self.scale_units)
plot._axes.set_xlim(xx0, xx1)
plot._axes.set_ylim(yy0, yy1)
|
def __call__(self, plot):
x0, x1 = plot.xlim
y0, y1 = plot.ylim
xx0, xx1 = plot._axes.get_xlim()
yy0, yy1 = plot._axes.get_ylim()
nx = plot.image._A.shape[1] // self.factor
ny = plot.image._A.shape[0] // self.factor
indices = np.argsort(plot.data["dx"])[::-1]
pixX = np.zeros((ny, nx), dtype="f8")
pixY = np.zeros((ny, nx), dtype="f8")
pixelize_off_axis_cartesian(
pixX,
plot.data["x"],
plot.data["y"],
plot.data["z"],
plot.data["px"],
plot.data["py"],
plot.data["pdx"],
plot.data["pdy"],
plot.data["pdz"],
plot.data.center,
plot.data._inv_mat,
indices,
plot.data[self.field_x],
(x0, x1, y0, y1),
)
pixelize_off_axis_cartesian(
pixY,
plot.data["x"],
plot.data["y"],
plot.data["z"],
plot.data["px"],
plot.data["py"],
plot.data["pdx"],
plot.data["pdy"],
plot.data["pdz"],
plot.data.center,
plot.data._inv_mat,
indices,
plot.data[self.field_y],
(x0, x1, y0, y1),
)
X, Y = np.meshgrid(
np.linspace(xx0, xx1, nx, endpoint=True),
np.linspace(yy0, yy1, ny, endpoint=True),
)
if self.normalize:
nn = np.sqrt(pixX**2 + pixY**2)
pixX /= nn
pixY /= nn
plot._axes.quiver(X, Y, pixX, pixY, scale=self.scale, scale_units=self.scale_units)
plot._axes.set_xlim(xx0, xx1)
plot._axes.set_ylim(yy0, yy1)
|
https://github.com/yt-project/yt/issues/1254
|
Traceback (most recent call last):
File "C:\Miniconda-x64\envs\test\lib\site-packages\nose\loader.py", line 251, in generate
for test in g():
File "c:\projects\yt\yt\utilities\lib\tests\test_alt_ray_tracers.py", line 65, in test_cylindrical_ray_trace
t, s, rztheta, inds = cylindrical_ray_trace(p1, p2, left_grid, right_grid)
File "yt\utilities\lib\alt_ray_tracers.pyx", line 203, in yt.utilities.lib.alt_ray_tracers.cylindrical_ray_trace (yt/utilities/lib/alt_ray_tracers.c:6000)
tsect, tinds = np.unique(tsect[tmask], return_index=True)
ValueError: Item size of buffer (1 byte) does not match size of 'int64_t' (8 bytes)
|
ValueError
|
def deposit(self, positions, fields=None, method=None, kernel_name="cubic"):
cls = getattr(particle_deposit, "deposit_%s" % method, None)
if cls is None:
raise YTParticleDepositionNotImplemented(method)
# We allocate number of zones, not number of octs. Everything inside
# this is fortran ordered because of the ordering in the octree deposit
# routines, so we reverse it here to match the convention there
op = cls(tuple(self.ActiveDimensions)[::-1], kernel_name)
op.initialize()
op.process_grid(self, positions, fields)
vals = op.finalize()
# Fortran-ordered, so transpose.
return vals.transpose()
|
def deposit(self, positions, fields=None, method=None, kernel_name="cubic"):
cls = getattr(particle_deposit, "deposit_%s" % method, None)
if cls is None:
raise YTParticleDepositionNotImplemented(method)
# We allocate number of zones, not number of octs
op = cls(self.ActiveDimensions, kernel_name)
op.initialize()
op.process_grid(self, positions, fields)
vals = op.finalize()
return vals.copy(order="C")
|
https://github.com/yt-project/yt/issues/1379
|
yt : [INFO ] 2017-05-08 14:18:25,422 integer runtime parameter checkpointfilenumber overwrites a simulation scalar of the same name
yt : [INFO ] 2017-05-08 14:18:25,422 integer runtime parameter particlefilenumber overwrites a simulation scalar of the same name
yt : [INFO ] 2017-05-08 14:18:25,422 integer runtime parameter plotfilenumber overwrites a simulation scalar of the same name
yt : [INFO ] 2017-05-08 14:18:25,437 Parameters: current_time = 50.0034519855
yt : [INFO ] 2017-05-08 14:18:25,437 Parameters: domain_dimensions = [2 2 2]
yt : [INFO ] 2017-05-08 14:18:25,438 Parameters: domain_left_edge = [ 0. 0. 0.]
yt : [INFO ] 2017-05-08 14:18:25,438 Parameters: domain_right_edge = [ 20. 0.5 0.5]
yt : [INFO ] 2017-05-08 14:18:25,438 Parameters: cosmological_simulation = 0.0
yt : [INFO ] 2017-05-08 14:18:25,441 Allocating for 9.480e+04 particles (index particle type 'all')
yt : [INFO ] 2017-05-08 14:18:25,468 Identified 3.969e+03 octs
Traceback (most recent call last):
File "to_grid.py", line 8, in <module>
print(vol["deposit", "all_density"].max())
File "/Applications/anaconda/lib/python2.7/site-packages/yt/data_objects/data_containers.py", line 281, in __getitem__
self.get_data(f)
File "/Applications/anaconda/lib/python2.7/site-packages/yt/data_objects/construction_data_containers.py", line 640, in get_data
if len(gen) > 0: self._generate_fields(gen)
File "/Applications/anaconda/lib/python2.7/site-packages/yt/data_objects/data_containers.py", line 1353, in _generate_fields
fd = self._generate_field(field)
File "/Applications/anaconda/lib/python2.7/site-packages/yt/data_objects/data_containers.py", line 318, in _generate_field
tr = self._generate_fluid_field(field)
File "/Applications/anaconda/lib/python2.7/site-packages/yt/data_objects/data_containers.py", line 338, in _generate_fluid_field
rv = finfo(gen_obj)
File "/Applications/anaconda/lib/python2.7/site-packages/yt/fields/derived_field.py", line 234, in __call__
dd = self._function(self, data)
File "/Applications/anaconda/lib/python2.7/site-packages/yt/fields/particle_fields.py", line 129, in particle_density
d = data.deposit(pos, [mass], method = "sum")
File "/Applications/anaconda/lib/python2.7/site-packages/yt/data_objects/construction_data_containers.py", line 733, in deposit
op.process_grid(self, positions, fields)
File "yt/geometry/particle_deposit.pyx", line 144, in yt.geometry.particle_deposit.ParticleDepositOperation.process_grid (yt/geometry/particle_deposit.c:5633)
File "yt/geometry/particle_deposit.pyx", line 271, in yt.geometry.particle_deposit.SumParticleField.process (yt/geometry/particle_deposit.c:7204)
IndexError: Out of bounds on buffer access (axis 2)
|
IndexError
|
def deposit(self, positions, fields=None, method=None, kernel_name="cubic"):
# Here we perform our particle deposition.
cls = getattr(particle_deposit, "deposit_%s" % method, None)
if cls is None:
raise YTParticleDepositionNotImplemented(method)
# We allocate number of zones, not number of octs. Everything inside
# this is Fortran ordered because of the ordering in the octree deposit
# routines, so we reverse it here to match the convention there
op = cls(tuple(self.ActiveDimensions[::-1]), kernel_name)
op.initialize()
op.process_grid(self, positions, fields)
vals = op.finalize()
if vals is None:
return
# Fortran-ordered, so transpose.
return vals.transpose()
|
def deposit(self, positions, fields=None, method=None, kernel_name="cubic"):
# Here we perform our particle deposition.
cls = getattr(particle_deposit, "deposit_%s" % method, None)
if cls is None:
raise YTParticleDepositionNotImplemented(method)
# We allocate number of zones, not number of octs
# Everything inside this is fortran ordered, so we reverse it here.
op = cls(tuple(self.ActiveDimensions)[::-1], kernel_name)
op.initialize()
op.process_grid(self, positions, fields)
vals = op.finalize()
if vals is None:
return
return vals.transpose() # Fortran-ordered, so transpose.
|
https://github.com/yt-project/yt/issues/1379
|
yt : [INFO ] 2017-05-08 14:18:25,422 integer runtime parameter checkpointfilenumber overwrites a simulation scalar of the same name
yt : [INFO ] 2017-05-08 14:18:25,422 integer runtime parameter particlefilenumber overwrites a simulation scalar of the same name
yt : [INFO ] 2017-05-08 14:18:25,422 integer runtime parameter plotfilenumber overwrites a simulation scalar of the same name
yt : [INFO ] 2017-05-08 14:18:25,437 Parameters: current_time = 50.0034519855
yt : [INFO ] 2017-05-08 14:18:25,437 Parameters: domain_dimensions = [2 2 2]
yt : [INFO ] 2017-05-08 14:18:25,438 Parameters: domain_left_edge = [ 0. 0. 0.]
yt : [INFO ] 2017-05-08 14:18:25,438 Parameters: domain_right_edge = [ 20. 0.5 0.5]
yt : [INFO ] 2017-05-08 14:18:25,438 Parameters: cosmological_simulation = 0.0
yt : [INFO ] 2017-05-08 14:18:25,441 Allocating for 9.480e+04 particles (index particle type 'all')
yt : [INFO ] 2017-05-08 14:18:25,468 Identified 3.969e+03 octs
Traceback (most recent call last):
File "to_grid.py", line 8, in <module>
print(vol["deposit", "all_density"].max())
File "/Applications/anaconda/lib/python2.7/site-packages/yt/data_objects/data_containers.py", line 281, in __getitem__
self.get_data(f)
File "/Applications/anaconda/lib/python2.7/site-packages/yt/data_objects/construction_data_containers.py", line 640, in get_data
if len(gen) > 0: self._generate_fields(gen)
File "/Applications/anaconda/lib/python2.7/site-packages/yt/data_objects/data_containers.py", line 1353, in _generate_fields
fd = self._generate_field(field)
File "/Applications/anaconda/lib/python2.7/site-packages/yt/data_objects/data_containers.py", line 318, in _generate_field
tr = self._generate_fluid_field(field)
File "/Applications/anaconda/lib/python2.7/site-packages/yt/data_objects/data_containers.py", line 338, in _generate_fluid_field
rv = finfo(gen_obj)
File "/Applications/anaconda/lib/python2.7/site-packages/yt/fields/derived_field.py", line 234, in __call__
dd = self._function(self, data)
File "/Applications/anaconda/lib/python2.7/site-packages/yt/fields/particle_fields.py", line 129, in particle_density
d = data.deposit(pos, [mass], method = "sum")
File "/Applications/anaconda/lib/python2.7/site-packages/yt/data_objects/construction_data_containers.py", line 733, in deposit
op.process_grid(self, positions, fields)
File "yt/geometry/particle_deposit.pyx", line 144, in yt.geometry.particle_deposit.ParticleDepositOperation.process_grid (yt/geometry/particle_deposit.c:5633)
File "yt/geometry/particle_deposit.pyx", line 271, in yt.geometry.particle_deposit.SumParticleField.process (yt/geometry/particle_deposit.c:7204)
IndexError: Out of bounds on buffer access (axis 2)
|
IndexError
|
def get_vertex_centered_data(self, fields, smoothed=True, no_ghost=False):
_old_api = isinstance(fields, (string_types, tuple))
if _old_api:
message = (
"get_vertex_centered_data() requires list of fields, rather than "
"a single field as an argument."
)
warnings.warn(message, DeprecationWarning, stacklevel=2)
fields = [fields]
# Make sure the field list has only unique entries
fields = list(set(fields))
new_fields = {}
for field in fields:
finfo = self.ds._get_field_info(field)
new_fields[field] = self.ds.arr(
np.zeros(self.ActiveDimensions + 1), finfo.output_units
)
if no_ghost:
for field in fields:
# Ensure we have the native endianness in this array. Avoid making
# a copy if possible.
old_field = np.asarray(self[field], dtype="=f8")
# We'll use the ghost zone routine, which will naturally
# extrapolate here.
input_left = np.array([0.5, 0.5, 0.5], dtype="float64")
output_left = np.array([0.0, 0.0, 0.0], dtype="float64")
# rf = 1 here
ghost_zone_interpolate(
1, old_field, input_left, new_fields[field], output_left
)
else:
cg = self.retrieve_ghost_zones(1, fields, smoothed=smoothed)
for field in fields:
np.add(new_fields[field], cg[field][1:, 1:, 1:], new_fields[field])
np.add(new_fields[field], cg[field][:-1, 1:, 1:], new_fields[field])
np.add(new_fields[field], cg[field][1:, :-1, 1:], new_fields[field])
np.add(new_fields[field], cg[field][1:, 1:, :-1], new_fields[field])
np.add(new_fields[field], cg[field][:-1, 1:, :-1], new_fields[field])
np.add(new_fields[field], cg[field][1:, :-1, :-1], new_fields[field])
np.add(new_fields[field], cg[field][:-1, :-1, 1:], new_fields[field])
np.add(new_fields[field], cg[field][:-1, :-1, :-1], new_fields[field])
np.multiply(new_fields[field], 0.125, new_fields[field])
if _old_api:
return new_fields[fields[0]]
return new_fields
|
def get_vertex_centered_data(self, fields, smoothed=True, no_ghost=False):
_old_api = isinstance(fields, (string_types, tuple))
if _old_api:
message = (
"get_vertex_centered_data() requires list of fields, rather than "
"a single field as an argument."
)
warnings.warn(message, DeprecationWarning, stacklevel=2)
fields = [fields]
# Make sure the field list has only unique entries
fields = list(set(fields))
new_fields = {}
for field in fields:
finfo = self.ds._get_field_info(field)
new_fields[field] = self.ds.arr(
np.zeros(self.ActiveDimensions + 1), finfo.units
)
if no_ghost:
for field in fields:
# Ensure we have the native endianness in this array. Avoid making
# a copy if possible.
old_field = np.asarray(self[field], dtype="=f8")
# We'll use the ghost zone routine, which will naturally
# extrapolate here.
input_left = np.array([0.5, 0.5, 0.5], dtype="float64")
output_left = np.array([0.0, 0.0, 0.0], dtype="float64")
# rf = 1 here
ghost_zone_interpolate(
1, old_field, input_left, new_fields[field], output_left
)
else:
cg = self.retrieve_ghost_zones(1, fields, smoothed=smoothed)
for field in fields:
np.add(new_fields[field], cg[field][1:, 1:, 1:], new_fields[field])
np.add(new_fields[field], cg[field][:-1, 1:, 1:], new_fields[field])
np.add(new_fields[field], cg[field][1:, :-1, 1:], new_fields[field])
np.add(new_fields[field], cg[field][1:, 1:, :-1], new_fields[field])
np.add(new_fields[field], cg[field][:-1, 1:, :-1], new_fields[field])
np.add(new_fields[field], cg[field][1:, :-1, :-1], new_fields[field])
np.add(new_fields[field], cg[field][:-1, :-1, 1:], new_fields[field])
np.add(new_fields[field], cg[field][:-1, :-1, :-1], new_fields[field])
np.multiply(new_fields[field], 0.125, new_fields[field])
if _old_api:
return new_fields[fields[0]]
return new_fields
|
https://github.com/yt-project/yt/issues/1368
|
Traceback (most recent call last):
File "test.py", line 9, in <module>
print(sur['x'][0])
File "/Users/goldbaum/Documents/yt-git/yt/data_objects/data_containers.py", line 281, in __getitem__
self.get_data(f)
File "/Users/goldbaum/Documents/yt-git/yt/data_objects/construction_data_containers.py", line 1144, in get_data
mask, fields, sample_type, no_ghost=no_ghost)
File "/Users/goldbaum/Documents/yt-git/yt/data_objects/construction_data_containers.py", line 1171, in _extract_isocontours_from_grid
svals = grid.get_vertex_centered_data([sample_values])[sample_values]
File "/Users/goldbaum/Documents/yt-git/yt/data_objects/grid_patch.py", line 293, in get_vertex_centered_data
np.add(new_fields[field], cg[field][1: ,1: ,1: ], new_fields[field])
File "/Users/goldbaum/Documents/yt-git/yt/units/yt_array.py", line 1225, in __array_wrap__
raise YTUfuncUnitError(context[0], unit1, unit2)
yt.utilities.exceptions.YTUfuncUnitError: The NumPy <ufunc 'add'> operation is only allowed on objects with identical units. Convert one of the arrays to the other's units first. Received units (code_length) and (cm).
|
yt.utilities.exceptions.YTUfuncUnitError
|
def _localize_check(self, fn):
if fn is None:
return None
# If the file exists, use it. If not, set it to None.
root_dir = os.path.dirname(self.output_dir)
full_fn = os.path.join(root_dir, fn)
if os.path.exists(full_fn):
return full_fn
return None
|
def _localize_check(self, fn):
# If the file exists, use it. If not, set it to None.
root_dir = os.path.dirname(self.output_dir)
full_fn = os.path.join(root_dir, fn)
if os.path.exists(full_fn):
return full_fn
return None
|
https://github.com/yt-project/yt/issues/1338
|
Traceback (most recent call last):
File "get_plot.py", line 20, in <module>
ds=yt.load(file_name)
File "xxx/python2.7/site-packages/yt/convenience.py", line 86, in load
return candidates[0](*args, **kwargs)
File "xxx/python2.7/site-packages/yt/frontends/boxlib/data_structures.py", line 392, in __init__
self.cparam_filename = self._localize_check(cparam_filename)
File "xxx/python2.7/site-packages/yt/frontends/boxlib/data_structures.py", line 415, in _localize_check
if os.path.exists(full_fn):
File "xxx/python2.7/genericpath.py", line 26, in exists
os.stat(path)
TypeError: coercing to Unicode: need string or buffer, NoneType found
|
TypeError
|
def __call__(self, plot):
from mpl_toolkits.axes_grid1.anchored_artists import AnchoredSizeBar
# Callback only works for plots with axis ratios of 1
xsize = plot.xlim[1] - plot.xlim[0]
# Setting pos overrides corner argument
if self.pos is None:
if self.corner == "upper_left":
self.pos = (0.11, 0.952)
elif self.corner == "upper_right":
self.pos = (0.89, 0.952)
elif self.corner == "lower_left":
self.pos = (0.11, 0.052)
elif self.corner == "lower_right":
self.pos = (0.89, 0.052)
elif self.corner is None:
self.pos = (0.5, 0.5)
else:
raise SyntaxError(
"Argument 'corner' must be set to "
"'upper_left', 'upper_right', 'lower_left', "
"'lower_right', or None"
)
# When identifying a best fit distance unit, do not allow scale marker
# to be greater than max_frac fraction of xaxis or under min_frac
# fraction of xaxis
max_scale = self.max_frac * xsize
min_scale = self.min_frac * xsize
if self.coeff is None:
self.coeff = 1.0
# If no units are set, then identify a best fit distance unit
if self.unit is None:
min_scale = plot.ds.get_smallest_appropriate_unit(
min_scale, return_quantity=True
)
max_scale = plot.ds.get_smallest_appropriate_unit(
max_scale, return_quantity=True
)
self.coeff = max_scale.v
self.unit = max_scale.units
self.scale = YTQuantity(self.coeff, self.unit)
text = "{scale} {units}".format(scale=int(self.coeff), units=self.unit)
image_scale = (
plot.frb.convert_distance_x(self.scale) / plot.frb.convert_distance_x(xsize)
).v
size_vertical = self.size_bar_args.pop("size_vertical", 0.005 * plot.aspect)
fontproperties = self.size_bar_args.pop(
"fontproperties", plot.font_properties.copy()
)
frameon = self.size_bar_args.pop("frameon", self.draw_inset_box)
# FontProperties instances use set_<property>() setter functions
for key, val in self.text_args.items():
setter_func = "set_" + key
try:
getattr(fontproperties, setter_func)(val)
except AttributeError:
raise AttributeError(
"Cannot set text_args keyword "
"to include '%s' because MPL's fontproperties object does "
"not contain function '%s'." % (key, setter_func)
)
# this "anchors" the size bar to a box centered on self.pos in axis
# coordinates
self.size_bar_args["bbox_to_anchor"] = self.pos
self.size_bar_args["bbox_transform"] = plot._axes.transAxes
bar = AnchoredSizeBar(
plot._axes.transAxes,
image_scale,
text,
10,
size_vertical=size_vertical,
fontproperties=fontproperties,
frameon=frameon,
**self.size_bar_args,
)
bar.patch.set(**self.inset_box_args)
plot._axes.add_artist(bar)
return plot
|
def __call__(self, plot):
from mpl_toolkits.axes_grid1.anchored_artists import AnchoredSizeBar
# Callback only works for plots with axis ratios of 1
xsize = plot.xlim[1] - plot.xlim[0]
if plot.aspect != 1.0:
raise NotImplementedError(
"Scale callback has only been implemented for plots with no "
"aspect ratio scaling. (aspect = {%s})".format(plot._aspect)
)
# Setting pos overrides corner argument
if self.pos is None:
if self.corner == "upper_left":
self.pos = (0.11, 0.952)
elif self.corner == "upper_right":
self.pos = (0.89, 0.952)
elif self.corner == "lower_left":
self.pos = (0.11, 0.052)
elif self.corner == "lower_right":
self.pos = (0.89, 0.052)
elif self.corner is None:
self.pos = (0.5, 0.5)
else:
raise SyntaxError(
"Argument 'corner' must be set to "
"'upper_left', 'upper_right', 'lower_left', "
"'lower_right', or None"
)
# When identifying a best fit distance unit, do not allow scale marker
# to be greater than max_frac fraction of xaxis or under min_frac
# fraction of xaxis
max_scale = self.max_frac * xsize
min_scale = self.min_frac * xsize
if self.coeff is None:
self.coeff = 1.0
# If no units are set, then identify a best fit distance unit
if self.unit is None:
min_scale = plot.ds.get_smallest_appropriate_unit(
min_scale, return_quantity=True
)
max_scale = plot.ds.get_smallest_appropriate_unit(
max_scale, return_quantity=True
)
self.coeff = max_scale.v
self.unit = max_scale.units
self.scale = YTQuantity(self.coeff, self.unit)
text = "{scale} {units}".format(scale=int(self.coeff), units=self.unit)
image_scale = (
plot.frb.convert_distance_x(self.scale) / plot.frb.convert_distance_x(xsize)
).v
size_vertical = self.size_bar_args.pop("size_vertical", 0.005)
fontproperties = self.size_bar_args.pop(
"fontproperties", plot.font_properties.copy()
)
frameon = self.size_bar_args.pop("frameon", self.draw_inset_box)
# FontProperties instances use set_<property>() setter functions
for key, val in self.text_args.items():
setter_func = "set_" + key
try:
getattr(fontproperties, setter_func)(val)
except AttributeError:
raise AttributeError(
"Cannot set text_args keyword "
"to include '%s' because MPL's fontproperties object does "
"not contain function '%s'." % (key, setter_func)
)
# this "anchors" the size bar to a box centered on self.pos in axis
# coordinates
self.size_bar_args["bbox_to_anchor"] = self.pos
self.size_bar_args["bbox_transform"] = plot._axes.transAxes
bar = AnchoredSizeBar(
plot._axes.transAxes,
image_scale,
text,
10,
size_vertical=size_vertical,
fontproperties=fontproperties,
frameon=frameon,
**self.size_bar_args,
)
bar.patch.set(**self.inset_box_args)
plot._axes.add_artist(bar)
return plot
|
https://github.com/yt-project/yt/issues/1337
|
Traceback (most recent call last):
File "/usr/local/lib/python3.5/site-packages/yt/visualization/plot_window.py", line 1007, in run_callbacks
callback(cbw)
File "/usr/local/lib/python3.5/site-packages/yt/visualization/plot_modifications.py", line 54, in _check_geometry
return func(self, plot)
File "/usr/local/lib/python3.5/site-packages/yt/visualization/plot_modifications.py", line 2084, in __call__
"aspect ratio scaling. (aspect = {%s})".format(plot._aspect))
AttributeError: 'CallbackWrapper' object has no attribute '_aspect'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "test.py", line 7, in <module>
slc.save()
File "/usr/local/lib/python3.5/site-packages/yt/visualization/plot_container.py", line 78, in newfunc
args[0]._setup_plots()
File "/usr/local/lib/python3.5/site-packages/yt/visualization/plot_window.py", line 956, in _setup_plots
self.run_callbacks()
File "/usr/local/lib/python3.5/site-packages/yt/visualization/plot_window.py", line 1013, in run_callbacks
sys.exc_info()[2])
File "/usr/local/lib/python3.5/site-packages/six.py", line 685, in reraise
raise value.with_traceback(tb)
File "/usr/local/lib/python3.5/site-packages/yt/visualization/plot_window.py", line 1007, in run_callbacks
callback(cbw)
File "/usr/local/lib/python3.5/site-packages/yt/visualization/plot_modifications.py", line 54, in _check_geometry
return func(self, plot)
File "/usr/local/lib/python3.5/site-packages/yt/visualization/plot_modifications.py", line 2084, in __call__
"aspect ratio scaling. (aspect = {%s})".format(plot._aspect))
yt.utilities.exceptions.YTPlotCallbackError: annotate_scale callback failed with the following error: 'CallbackWrapper' object has no attribute '_aspect'
|
AttributeError
|
def data(self):
"""
Return a data container configured like the original used to
create this dataset.
"""
if self._data_obj is None:
# Some data containers can't be recontructed in the same way
# since this is now particle-like data.
data_type = self.parameters.get("data_type")
container_type = self.parameters.get("container_type")
ex_container_type = ["cutting", "proj", "ray", "slice", "cut_region"]
if data_type == "yt_light_ray" or container_type in ex_container_type:
mylog.info("Returning an all_data data container.")
return self.all_data()
my_obj = getattr(self, self.parameters["container_type"])
my_args = [self.parameters[con_arg] for con_arg in self.parameters["con_args"]]
self._data_obj = my_obj(*my_args)
return self._data_obj
|
def data(self):
"""
Return a data container configured like the original used to
create this dataset.
"""
if self._data_obj is None:
# Some data containers can't be recontructed in the same way
# since this is now particle-like data.
data_type = self.parameters.get("data_type")
container_type = self.parameters.get("container_type")
ex_container_type = ["cutting", "proj", "ray", "slice"]
if data_type == "yt_light_ray" or container_type in ex_container_type:
mylog.info("Returning an all_data data container.")
return self.all_data()
my_obj = getattr(self, self.parameters["container_type"])
my_args = [self.parameters[con_arg] for con_arg in self.parameters["con_args"]]
self._data_obj = my_obj(*my_args)
return self._data_obj
|
https://github.com/yt-project/yt/issues/1330
|
#!python
-----------------------------------------------------------------
yt : [INFO ] 2017-03-03 15:06:16,097 Calculating time from 1.000e+00 to be 4.376e+17 seconds
yt : [INFO ] 2017-03-03 15:06:16,098 Assuming length units are in kpc/h (comoving)
yt : [INFO ] 2017-03-03 15:06:16,127 Parameters: current_time = 4.37587753506e+17 s
yt : [INFO ] 2017-03-03 15:06:16,127 Parameters: domain_dimensions = [2 2 2]
yt : [INFO ] 2017-03-03 15:06:16,128 Parameters: domain_left_edge = [ 0. 0. 0.]
yt : [INFO ] 2017-03-03 15:06:16,128 Parameters: domain_right_edge = [ 5000. 5000. 5000.]
yt : [INFO ] 2017-03-03 15:06:16,129 Parameters: cosmological_simulation = 1
yt : [INFO ] 2017-03-03 15:06:16,129 Parameters: current_redshift = -2.22044604925e-16
yt : [INFO ] 2017-03-03 15:06:16,129 Parameters: omega_lambda = 0.73
yt : [INFO ] 2017-03-03 15:06:16,129 Parameters: omega_matter = 0.27
yt : [INFO ] 2017-03-03 15:06:16,129 Parameters: hubble_constant = 0.7
yt : [INFO ] 2017-03-03 15:06:16,134 Allocating for 1.370e+06 particles (index particle type 'all')
yt : [INFO ] 2017-03-03 15:06:16,388 Identified 9.178e+04 octs
yt : [INFO ] 2017-03-03 15:06:44,399 Max Value is 1.00458e-25 at 2255.8593750000000000 2568.3593750000000000 2802.7343750000000000
yt : [INFO ] 2017-03-03 15:06:45,874 Saving field data to yt dataset: Gadget3-snap-format2_sphere.h5.
yt : [INFO ] 2017-03-03 15:06:47,395 Saving field data to yt dataset: Gadget3-snap-format2_cut_region.h5.
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-1-5b545a06497d> in <module>()
4 sp.save_as_dataset(fields=["temperature"])
5 kt = ds.cut_region(sp,['(obj["kT"] < 75) & (obj["kT"] > 2)'])
----> 6 fn = kt.save_as_dataset(fields=["density"])
/usr/local/lib/python2.7/dist-packages/yt/data_objects/data_containers.pyc in save_as_dataset(self, filename, fields)
591 extra_attrs["dimensionality"] = self._dimensionality
592 save_as_dataset(self.ds, filename, data, field_types=ftypes,
--> 593 extra_attrs=extra_attrs)
594
595 return filename
/usr/local/lib/python2.7/dist-packages/yt/frontends/ytdata/utilities.pyc in save_as_dataset(ds, filename, data, field_types, extra_attrs)
116 for attr in extra_attrs:
117 my_val = extra_attrs[attr]
--> 118 _yt_array_hdf5_attr(fh, attr, my_val)
119 if "data_type" not in extra_attrs:
120 fh.attrs["data_type"] = "yt_array_data"
/usr/local/lib/python2.7/dist-packages/yt/frontends/ytdata/utilities.pyc in _yt_array_hdf5_attr(fh, attr, val)
231 if val.dtype.kind == 'U':
232 val = val.astype('|S')
--> 233 fh.attrs[str(attr)] = val
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/tmp/pip-4rPeHA-build/h5py/_objects.c:2684)()
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/tmp/pip-4rPeHA-build/h5py/_objects.c:2642)()
/usr/local/lib/python2.7/dist-packages/h5py/_hl/attrs.pyc in __setitem__(self, name, value)
91 use the methods create() and modify().
92 """
---> 93 self.create(name, data=value, dtype=base.guess_dtype(value))
94
95 @with_phil
/usr/local/lib/python2.7/dist-packages/h5py/_hl/attrs.pyc in create(self, name, data, shape, dtype)
167 # Make HDF5 datatype and dataspace for the H5A calls
168 if use_htype is None:
--> 169 htype = h5t.py_create(original_dtype, logical=True)
170 htype2 = h5t.py_create(original_dtype) # Must be bit-for-bit representation rather than logical
171 else:
h5py/h5t.pyx in h5py.h5t.py_create (/tmp/pip-4rPeHA-build/h5py/h5t.c:16078)()
h5py/h5t.pyx in h5py.h5t.py_create (/tmp/pip-4rPeHA-build/h5py/h5t.c:15912)()
h5py/h5t.pyx in h5py.h5t.py_create (/tmp/pip-4rPeHA-build/h5py/h5t.c:15813)()
TypeError: Object dtype dtype('O') has no native HDF5 equivalent
|
TypeError
|
def _yt_array_hdf5_attr(fh, attr, val):
r"""Save a YTArray or YTQuantity as an hdf5 attribute.
Save an hdf5 attribute. If it has units, save an
additional attribute with the units.
Parameters
----------
fh : an open hdf5 file, group, or dataset
The hdf5 file, group, or dataset to which the
attribute will be written.
attr : str
The name of the attribute to be saved.
val : anything
The value to be saved.
"""
if val is None:
val = "None"
if hasattr(val, "units"):
fh.attrs["%s_units" % attr] = str(val.units)
# The following is a crappy workaround for getting
# Unicode strings into HDF5 attributes in Python 3
if iterable(val):
val = np.array(val)
if val.dtype.kind == "U":
val = val.astype("|S")
try:
fh.attrs[str(attr)] = val
# This is raised if no HDF5 equivalent exists.
# In that case, save its string representation.
except TypeError:
fh.attrs[str(attr)] = str(val)
|
def _yt_array_hdf5_attr(fh, attr, val):
r"""Save a YTArray or YTQuantity as an hdf5 attribute.
Save an hdf5 attribute. If it has units, save an
additional attribute with the units.
Parameters
----------
fh : an open hdf5 file, group, or dataset
The hdf5 file, group, or dataset to which the
attribute will be written.
attr : str
The name of the attribute to be saved.
val : anything
The value to be saved.
"""
if val is None:
val = "None"
if hasattr(val, "units"):
fh.attrs["%s_units" % attr] = str(val.units)
# The following is a crappy workaround for getting
# Unicode strings into HDF5 attributes in Python 3
if iterable(val):
val = np.array(val)
if val.dtype.kind == "U":
val = val.astype("|S")
fh.attrs[str(attr)] = val
|
https://github.com/yt-project/yt/issues/1330
|
#!python
-----------------------------------------------------------------
yt : [INFO ] 2017-03-03 15:06:16,097 Calculating time from 1.000e+00 to be 4.376e+17 seconds
yt : [INFO ] 2017-03-03 15:06:16,098 Assuming length units are in kpc/h (comoving)
yt : [INFO ] 2017-03-03 15:06:16,127 Parameters: current_time = 4.37587753506e+17 s
yt : [INFO ] 2017-03-03 15:06:16,127 Parameters: domain_dimensions = [2 2 2]
yt : [INFO ] 2017-03-03 15:06:16,128 Parameters: domain_left_edge = [ 0. 0. 0.]
yt : [INFO ] 2017-03-03 15:06:16,128 Parameters: domain_right_edge = [ 5000. 5000. 5000.]
yt : [INFO ] 2017-03-03 15:06:16,129 Parameters: cosmological_simulation = 1
yt : [INFO ] 2017-03-03 15:06:16,129 Parameters: current_redshift = -2.22044604925e-16
yt : [INFO ] 2017-03-03 15:06:16,129 Parameters: omega_lambda = 0.73
yt : [INFO ] 2017-03-03 15:06:16,129 Parameters: omega_matter = 0.27
yt : [INFO ] 2017-03-03 15:06:16,129 Parameters: hubble_constant = 0.7
yt : [INFO ] 2017-03-03 15:06:16,134 Allocating for 1.370e+06 particles (index particle type 'all')
yt : [INFO ] 2017-03-03 15:06:16,388 Identified 9.178e+04 octs
yt : [INFO ] 2017-03-03 15:06:44,399 Max Value is 1.00458e-25 at 2255.8593750000000000 2568.3593750000000000 2802.7343750000000000
yt : [INFO ] 2017-03-03 15:06:45,874 Saving field data to yt dataset: Gadget3-snap-format2_sphere.h5.
yt : [INFO ] 2017-03-03 15:06:47,395 Saving field data to yt dataset: Gadget3-snap-format2_cut_region.h5.
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-1-5b545a06497d> in <module>()
4 sp.save_as_dataset(fields=["temperature"])
5 kt = ds.cut_region(sp,['(obj["kT"] < 75) & (obj["kT"] > 2)'])
----> 6 fn = kt.save_as_dataset(fields=["density"])
/usr/local/lib/python2.7/dist-packages/yt/data_objects/data_containers.pyc in save_as_dataset(self, filename, fields)
591 extra_attrs["dimensionality"] = self._dimensionality
592 save_as_dataset(self.ds, filename, data, field_types=ftypes,
--> 593 extra_attrs=extra_attrs)
594
595 return filename
/usr/local/lib/python2.7/dist-packages/yt/frontends/ytdata/utilities.pyc in save_as_dataset(ds, filename, data, field_types, extra_attrs)
116 for attr in extra_attrs:
117 my_val = extra_attrs[attr]
--> 118 _yt_array_hdf5_attr(fh, attr, my_val)
119 if "data_type" not in extra_attrs:
120 fh.attrs["data_type"] = "yt_array_data"
/usr/local/lib/python2.7/dist-packages/yt/frontends/ytdata/utilities.pyc in _yt_array_hdf5_attr(fh, attr, val)
231 if val.dtype.kind == 'U':
232 val = val.astype('|S')
--> 233 fh.attrs[str(attr)] = val
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/tmp/pip-4rPeHA-build/h5py/_objects.c:2684)()
h5py/_objects.pyx in h5py._objects.with_phil.wrapper (/tmp/pip-4rPeHA-build/h5py/_objects.c:2642)()
/usr/local/lib/python2.7/dist-packages/h5py/_hl/attrs.pyc in __setitem__(self, name, value)
91 use the methods create() and modify().
92 """
---> 93 self.create(name, data=value, dtype=base.guess_dtype(value))
94
95 @with_phil
/usr/local/lib/python2.7/dist-packages/h5py/_hl/attrs.pyc in create(self, name, data, shape, dtype)
167 # Make HDF5 datatype and dataspace for the H5A calls
168 if use_htype is None:
--> 169 htype = h5t.py_create(original_dtype, logical=True)
170 htype2 = h5t.py_create(original_dtype) # Must be bit-for-bit representation rather than logical
171 else:
h5py/h5t.pyx in h5py.h5t.py_create (/tmp/pip-4rPeHA-build/h5py/h5t.c:16078)()
h5py/h5t.pyx in h5py.h5t.py_create (/tmp/pip-4rPeHA-build/h5py/h5t.c:15912)()
h5py/h5t.pyx in h5py.h5t.py_create (/tmp/pip-4rPeHA-build/h5py/h5t.c:15813)()
TypeError: Object dtype dtype('O') has no native HDF5 equivalent
|
TypeError
|
def add_deposited_particle_field(
self, deposit_field, method, kernel_name="cubic", weight_field="particle_mass"
):
"""Add a new deposited particle field
Creates a new deposited field based on the particle *deposit_field*.
Parameters
----------
deposit_field : tuple
The field name tuple of the particle field the deposited field will
be created from. This must be a field name tuple so yt can
appropriately infer the correct particle type.
method : string
This is the "method name" which will be looked up in the
`particle_deposit` namespace as `methodname_deposit`. Current
methods include `simple_smooth`, `sum`, `std`, `cic`, `weighted_mean`,
`mesh_id`, and `nearest`.
kernel_name : string, default 'cubic'
This is the name of the smoothing kernel to use. It is only used for
the `simple_smooth` method and is otherwise ignored. Current
supported kernel names include `cubic`, `quartic`, `quintic`,
`wendland2`, `wendland4`, and `wendland6`.
weight_field : string, default 'particle_mass'
Weighting field name for deposition method `weighted_mean`.
Returns
-------
The field name tuple for the newly created field.
"""
self.index
if isinstance(deposit_field, tuple):
ptype, deposit_field = deposit_field[0], deposit_field[1]
else:
raise RuntimeError
units = self.field_info[ptype, deposit_field].units
take_log = self.field_info[ptype, deposit_field].take_log
name_map = {
"sum": "sum",
"std": "std",
"cic": "cic",
"weighted_mean": "avg",
"nearest": "nn",
"simple_smooth": "ss",
"count": "count",
}
field_name = "%s_" + name_map[method] + "_%s"
field_name = field_name % (ptype, deposit_field.replace("particle_", ""))
if method == "count":
field_name = "%s_count" % ptype
if ("deposit", field_name) in self.field_info:
mylog.warning("The deposited field %s already exists" % field_name)
return ("deposit", field_name)
else:
units = "dimensionless"
take_log = False
def _deposit_field(field, data):
"""
Create a grid field for particle quantities using given method.
"""
pos = data[ptype, "particle_position"]
fields = [data[ptype, deposit_field]]
if method == "weighted_mean":
fields.append(data[ptype, weight_field])
fields = [np.ascontiguousarray(f) for f in fields]
d = data.deposit(pos, fields, method=method, kernel_name=kernel_name)
d = data.ds.arr(d, input_units=units)
if method == "weighted_mean":
d[np.isnan(d)] = 0.0
return d
self.add_field(
("deposit", field_name),
function=_deposit_field,
sampling_type="cell",
units=units,
take_log=take_log,
validators=[ValidateSpatial()],
)
return ("deposit", field_name)
|
def add_deposited_particle_field(
self, deposit_field, method, kernel_name="cubic", weight_field="particle_mass"
):
"""Add a new deposited particle field
Creates a new deposited field based on the particle *deposit_field*.
Parameters
----------
deposit_field : tuple
The field name tuple of the particle field the deposited field will
be created from. This must be a field name tuple so yt can
appropriately infer the correct particle type.
method : string
This is the "method name" which will be looked up in the
`particle_deposit` namespace as `methodname_deposit`. Current
methods include `simple_smooth`, `sum`, `std`, `cic`, `weighted_mean`,
`mesh_id`, and `nearest`.
kernel_name : string, default 'cubic'
This is the name of the smoothing kernel to use. It is only used for
the `simple_smooth` method and is otherwise ignored. Current
supported kernel names include `cubic`, `quartic`, `quintic`,
`wendland2`, `wendland4`, and `wendland6`.
weight_field : string, default 'particle_mass'
Weighting field name for deposition method `weighted_mean`.
Returns
-------
The field name tuple for the newly created field.
"""
self.index
if isinstance(deposit_field, tuple):
ptype, deposit_field = deposit_field[0], deposit_field[1]
else:
raise RuntimeError
units = self.field_info[ptype, deposit_field].units
take_log = self.field_info[ptype, deposit_field].take_log
name_map = {
"sum": "sum",
"std": "std",
"cic": "cic",
"weighted_mean": "avg",
"nearest": "nn",
"simple_smooth": "ss",
"count": "count",
}
field_name = "%s_" + name_map[method] + "_%s"
field_name = field_name % (ptype, deposit_field.replace("particle_", ""))
if method == "count":
field_name = "%s_count" % ptype
if ("deposit", field_name) in self.field_info:
mylog.warning("The deposited field %s already exists" % field_name)
return ("deposit", field_name)
else:
units = "dimensionless"
take_log = False
def _deposit_field(field, data):
"""
Create a grid field for particle quantities using given method.
"""
pos = data[ptype, "particle_position"]
if method == "weighted_mean":
d = data.ds.arr(
data.deposit(
pos,
[data[ptype, deposit_field], data[ptype, weight_field]],
method=method,
kernel_name=kernel_name,
),
input_units=units,
)
d[np.isnan(d)] = 0.0
else:
d = data.ds.arr(
data.deposit(
pos,
[data[ptype, deposit_field]],
method=method,
kernel_name=kernel_name,
),
input_units=units,
)
return d
self.add_field(
("deposit", field_name),
function=_deposit_field,
sampling_type="cell",
units=units,
take_log=take_log,
validators=[ValidateSpatial()],
)
return ("deposit", field_name)
|
https://github.com/yt-project/yt/issues/1319
|
jam:scratch desika$ python vel_proj_idealized.py
yt : [INFO ] 2017-02-07 11:14:47,621 Omega Lambda is 0.0, so we are turning off Cosmology.
yt : [INFO ] 2017-02-07 11:14:47,630 Parameters: current_time = 0.25
yt : [INFO ] 2017-02-07 11:14:47,630 Parameters: domain_dimensions = [2 2 2]
yt : [INFO ] 2017-02-07 11:14:47,631 Parameters: domain_left_edge = [-1000. -1000. -1000.]
yt : [INFO ] 2017-02-07 11:14:47,631 Parameters: domain_right_edge = [ 1000. 1000. 1000.]
yt : [INFO ] 2017-02-07 11:14:47,632 Parameters: cosmological_simulation = 0
yt : [INFO ] 2017-02-07 11:14:47,634 Allocating for 2.515e+07 particles (index particle type 'all')
yt : [INFO ] 2017-02-07 11:14:51,665 Identified 1.540e+06 octs
[ 0. 0. 0. ..., 0. 0. 0.] code_velocity
Traceback (most recent call last):
File "vel_proj_idealized.py", line 23, in <module>
vy = arb[ ('deposit', 'PartType0_sum_velocity_y')]
File "/Users/desika/yt/yt/data_objects/data_containers.py", line 281, in __getitem__
self.get_data(f)
File "/Users/desika/yt/yt/data_objects/construction_data_containers.py", line 635, in get_data
if len(gen) > 0: self._generate_fields(gen)
File "/Users/desika/yt/yt/data_objects/data_containers.py", line 1353, in _generate_fields
fd = self._generate_field(field)
File "/Users/desika/yt/yt/data_objects/data_containers.py", line 318, in _generate_field
tr = self._generate_fluid_field(field)
File "/Users/desika/yt/yt/data_objects/data_containers.py", line 338, in _generate_fluid_field
rv = finfo(gen_obj)
File "/Users/desika/yt/yt/fields/derived_field.py", line 220, in __call__
dd = self._function(self, data)
File "/Users/desika/yt/yt/data_objects/static_output.py", line 1260, in _deposit_field
method=method, kernel_name=kernel_name),
File "/Users/desika/yt/yt/data_objects/construction_data_containers.py", line 728, in deposit
op.process_grid(self, positions, fields)
File "yt/geometry/particle_deposit.pyx", line 120, in yt.geometry.particle_deposit.ParticleDepositOperation.process_grid (yt/geometry/particle_deposit.c:5181)
ValueError: Buffer and memoryview are not contiguous in the same dimension.
|
ValueError
|
def _deposit_field(field, data):
"""
Create a grid field for particle quantities using given method.
"""
pos = data[ptype, "particle_position"]
fields = [data[ptype, deposit_field]]
if method == "weighted_mean":
fields.append(data[ptype, weight_field])
fields = [np.ascontiguousarray(f) for f in fields]
d = data.deposit(pos, fields, method=method, kernel_name=kernel_name)
d = data.ds.arr(d, input_units=units)
if method == "weighted_mean":
d[np.isnan(d)] = 0.0
return d
|
def _deposit_field(field, data):
"""
Create a grid field for particle quantities using given method.
"""
pos = data[ptype, "particle_position"]
if method == "weighted_mean":
d = data.ds.arr(
data.deposit(
pos,
[data[ptype, deposit_field], data[ptype, weight_field]],
method=method,
kernel_name=kernel_name,
),
input_units=units,
)
d[np.isnan(d)] = 0.0
else:
d = data.ds.arr(
data.deposit(
pos,
[data[ptype, deposit_field]],
method=method,
kernel_name=kernel_name,
),
input_units=units,
)
return d
|
https://github.com/yt-project/yt/issues/1319
|
jam:scratch desika$ python vel_proj_idealized.py
yt : [INFO ] 2017-02-07 11:14:47,621 Omega Lambda is 0.0, so we are turning off Cosmology.
yt : [INFO ] 2017-02-07 11:14:47,630 Parameters: current_time = 0.25
yt : [INFO ] 2017-02-07 11:14:47,630 Parameters: domain_dimensions = [2 2 2]
yt : [INFO ] 2017-02-07 11:14:47,631 Parameters: domain_left_edge = [-1000. -1000. -1000.]
yt : [INFO ] 2017-02-07 11:14:47,631 Parameters: domain_right_edge = [ 1000. 1000. 1000.]
yt : [INFO ] 2017-02-07 11:14:47,632 Parameters: cosmological_simulation = 0
yt : [INFO ] 2017-02-07 11:14:47,634 Allocating for 2.515e+07 particles (index particle type 'all')
yt : [INFO ] 2017-02-07 11:14:51,665 Identified 1.540e+06 octs
[ 0. 0. 0. ..., 0. 0. 0.] code_velocity
Traceback (most recent call last):
File "vel_proj_idealized.py", line 23, in <module>
vy = arb[ ('deposit', 'PartType0_sum_velocity_y')]
File "/Users/desika/yt/yt/data_objects/data_containers.py", line 281, in __getitem__
self.get_data(f)
File "/Users/desika/yt/yt/data_objects/construction_data_containers.py", line 635, in get_data
if len(gen) > 0: self._generate_fields(gen)
File "/Users/desika/yt/yt/data_objects/data_containers.py", line 1353, in _generate_fields
fd = self._generate_field(field)
File "/Users/desika/yt/yt/data_objects/data_containers.py", line 318, in _generate_field
tr = self._generate_fluid_field(field)
File "/Users/desika/yt/yt/data_objects/data_containers.py", line 338, in _generate_fluid_field
rv = finfo(gen_obj)
File "/Users/desika/yt/yt/fields/derived_field.py", line 220, in __call__
dd = self._function(self, data)
File "/Users/desika/yt/yt/data_objects/static_output.py", line 1260, in _deposit_field
method=method, kernel_name=kernel_name),
File "/Users/desika/yt/yt/data_objects/construction_data_containers.py", line 728, in deposit
op.process_grid(self, positions, fields)
File "yt/geometry/particle_deposit.pyx", line 120, in yt.geometry.particle_deposit.ParticleDepositOperation.process_grid (yt/geometry/particle_deposit.c:5181)
ValueError: Buffer and memoryview are not contiguous in the same dimension.
|
ValueError
|
def add_deposited_particle_field(
self, deposit_field, method, kernel_name="cubic", weight_field="particle_mass"
):
"""Add a new deposited particle field
Creates a new deposited field based on the particle *deposit_field*.
Parameters
----------
deposit_field : tuple
The field name tuple of the particle field the deposited field will
be created from. This must be a field name tuple so yt can
appropriately infer the correct particle type.
method : string
This is the "method name" which will be looked up in the
`particle_deposit` namespace as `methodname_deposit`. Current
methods include `simple_smooth`, `sum`, `std`, `cic`, `weighted_mean`,
`mesh_id`, and `nearest`.
kernel_name : string, default 'cubic'
This is the name of the smoothing kernel to use. It is only used for
the `simple_smooth` method and is otherwise ignored. Current
supported kernel names include `cubic`, `quartic`, `quintic`,
`wendland2`, `wendland4`, and `wendland6`.
weight_field : string, default 'particle_mass'
Weighting field name for deposition method `weighted_mean`.
Returns
-------
The field name tuple for the newly created field.
"""
self.index
if isinstance(deposit_field, tuple):
ptype, deposit_field = deposit_field[0], deposit_field[1]
else:
raise RuntimeError
units = self.field_info[ptype, deposit_field].units
take_log = self.field_info[ptype, deposit_field].take_log
name_map = {
"sum": "sum",
"std": "std",
"cic": "cic",
"weighted_mean": "avg",
"nearest": "nn",
"simple_smooth": "ss",
"count": "count",
}
field_name = "%s_" + name_map[method] + "_%s"
field_name = field_name % (ptype, deposit_field.replace("particle_", ""))
if method == "count":
field_name = "%s_count" % ptype
if ("deposit", field_name) in self.field_info:
mylog.warning("The deposited field %s already exists" % field_name)
return ("deposit", field_name)
else:
units = "dimensionless"
take_log = False
def _deposit_field(field, data):
"""
Create a grid field for particle quantities using given method.
"""
pos = data[ptype, "particle_position"]
fields = [data[ptype, deposit_field]]
if method == "weighted_mean":
fields.append(data[ptype, weight_field])
fields = [np.ascontiguousarray(f) for f in fields]
d = data.deposit(pos, fields, method=method, kernel_name=kernel_name)
d = data.ds.arr(d, input_units=units)
if method == "weighted_mean":
d[np.isnan(d)] = 0.0
return d
self.add_field(
("deposit", field_name),
function=_deposit_field,
units=units,
take_log=take_log,
validators=[ValidateSpatial()],
)
return ("deposit", field_name)
|
def add_deposited_particle_field(
self, deposit_field, method, kernel_name="cubic", weight_field="particle_mass"
):
"""Add a new deposited particle field
Creates a new deposited field based on the particle *deposit_field*.
Parameters
----------
deposit_field : tuple
The field name tuple of the particle field the deposited field will
be created from. This must be a field name tuple so yt can
appropriately infer the correct particle type.
method : string
This is the "method name" which will be looked up in the
`particle_deposit` namespace as `methodname_deposit`. Current
methods include `simple_smooth`, `sum`, `std`, `cic`, `weighted_mean`,
`mesh_id`, and `nearest`.
kernel_name : string, default 'cubic'
This is the name of the smoothing kernel to use. It is only used for
the `simple_smooth` method and is otherwise ignored. Current
supported kernel names include `cubic`, `quartic`, `quintic`,
`wendland2`, `wendland4`, and `wendland6`.
weight_field : string, default 'particle_mass'
Weighting field name for deposition method `weighted_mean`.
Returns
-------
The field name tuple for the newly created field.
"""
self.index
if isinstance(deposit_field, tuple):
ptype, deposit_field = deposit_field[0], deposit_field[1]
else:
raise RuntimeError
units = self.field_info[ptype, deposit_field].units
take_log = self.field_info[ptype, deposit_field].take_log
name_map = {
"sum": "sum",
"std": "std",
"cic": "cic",
"weighted_mean": "avg",
"nearest": "nn",
"simple_smooth": "ss",
"count": "count",
}
field_name = "%s_" + name_map[method] + "_%s"
field_name = field_name % (ptype, deposit_field.replace("particle_", ""))
if method == "count":
field_name = "%s_count" % ptype
if ("deposit", field_name) in self.field_info:
mylog.warning("The deposited field %s already exists" % field_name)
return ("deposit", field_name)
else:
units = "dimensionless"
take_log = False
def _deposit_field(field, data):
"""
Create a grid field for particle quantities using given method.
"""
pos = data[ptype, "particle_position"]
if method == "weighted_mean":
d = data.ds.arr(
data.deposit(
pos,
[data[ptype, deposit_field], data[ptype, weight_field]],
method=method,
kernel_name=kernel_name,
),
input_units=units,
)
d[np.isnan(d)] = 0.0
else:
d = data.ds.arr(
data.deposit(
pos,
[data[ptype, deposit_field]],
method=method,
kernel_name=kernel_name,
),
input_units=units,
)
return d
self.add_field(
("deposit", field_name),
function=_deposit_field,
units=units,
take_log=take_log,
validators=[ValidateSpatial()],
)
return ("deposit", field_name)
|
https://github.com/yt-project/yt/issues/1319
|
jam:scratch desika$ python vel_proj_idealized.py
yt : [INFO ] 2017-02-07 11:14:47,621 Omega Lambda is 0.0, so we are turning off Cosmology.
yt : [INFO ] 2017-02-07 11:14:47,630 Parameters: current_time = 0.25
yt : [INFO ] 2017-02-07 11:14:47,630 Parameters: domain_dimensions = [2 2 2]
yt : [INFO ] 2017-02-07 11:14:47,631 Parameters: domain_left_edge = [-1000. -1000. -1000.]
yt : [INFO ] 2017-02-07 11:14:47,631 Parameters: domain_right_edge = [ 1000. 1000. 1000.]
yt : [INFO ] 2017-02-07 11:14:47,632 Parameters: cosmological_simulation = 0
yt : [INFO ] 2017-02-07 11:14:47,634 Allocating for 2.515e+07 particles (index particle type 'all')
yt : [INFO ] 2017-02-07 11:14:51,665 Identified 1.540e+06 octs
[ 0. 0. 0. ..., 0. 0. 0.] code_velocity
Traceback (most recent call last):
File "vel_proj_idealized.py", line 23, in <module>
vy = arb[ ('deposit', 'PartType0_sum_velocity_y')]
File "/Users/desika/yt/yt/data_objects/data_containers.py", line 281, in __getitem__
self.get_data(f)
File "/Users/desika/yt/yt/data_objects/construction_data_containers.py", line 635, in get_data
if len(gen) > 0: self._generate_fields(gen)
File "/Users/desika/yt/yt/data_objects/data_containers.py", line 1353, in _generate_fields
fd = self._generate_field(field)
File "/Users/desika/yt/yt/data_objects/data_containers.py", line 318, in _generate_field
tr = self._generate_fluid_field(field)
File "/Users/desika/yt/yt/data_objects/data_containers.py", line 338, in _generate_fluid_field
rv = finfo(gen_obj)
File "/Users/desika/yt/yt/fields/derived_field.py", line 220, in __call__
dd = self._function(self, data)
File "/Users/desika/yt/yt/data_objects/static_output.py", line 1260, in _deposit_field
method=method, kernel_name=kernel_name),
File "/Users/desika/yt/yt/data_objects/construction_data_containers.py", line 728, in deposit
op.process_grid(self, positions, fields)
File "yt/geometry/particle_deposit.pyx", line 120, in yt.geometry.particle_deposit.ParticleDepositOperation.process_grid (yt/geometry/particle_deposit.c:5181)
ValueError: Buffer and memoryview are not contiguous in the same dimension.
|
ValueError
|
def deposit(self, positions, fields=None, method=None, kernel_name="cubic"):
r"""Operate on the mesh, in a particle-against-mesh fashion, with
exclusively local input.
This uses the octree indexing system to call a "deposition" operation
(defined in yt/geometry/particle_deposit.pyx) that can take input from
several particles (local to the mesh) and construct some value on the
mesh. The canonical example is to sum the total mass in a mesh cell
and then divide by its volume.
Parameters
----------
positions : array_like (Nx3)
The positions of all of the particles to be examined. A new
indexed octree will be constructed on these particles.
fields : list of arrays
All the necessary fields for computing the particle operation. For
instance, this might include mass, velocity, etc.
method : string
This is the "method name" which will be looked up in the
`particle_deposit` namespace as `methodname_deposit`. Current
methods include `count`, `simple_smooth`, `sum`, `std`, `cic`,
`weighted_mean`, `mesh_id`, and `nearest`.
kernel_name : string, default 'cubic'
This is the name of the smoothing kernel to use. Current supported
kernel names include `cubic`, `quartic`, `quintic`, `wendland2`,
`wendland4`, and `wendland6`.
Returns
-------
List of fortran-ordered, mesh-like arrays.
"""
# Here we perform our particle deposition.
if fields is None:
fields = []
cls = getattr(particle_deposit, "deposit_%s" % method, None)
if cls is None:
raise YTParticleDepositionNotImplemented(method)
nz = self.nz
nvals = (nz, nz, nz, (self.domain_ind >= 0).sum())
# We allocate number of zones, not number of octs
op = cls(nvals, kernel_name)
op.initialize()
mylog.debug(
"Depositing %s (%s^3) particles into %s Octs",
positions.shape[0],
positions.shape[0] ** 0.3333333,
nvals[-1],
)
pos = np.asarray(positions.convert_to_units("code_length"), dtype="float64")
# We should not need the following if we know in advance all our fields
# need no casting.
fields = [np.ascontiguousarray(f, dtype="float64") for f in fields]
op.process_octree(
self.oct_handler,
self.domain_ind,
pos,
fields,
self.domain_id,
self._domain_offset,
)
vals = op.finalize()
if vals is None:
return
return np.asfortranarray(vals)
|
def deposit(self, positions, fields=None, method=None, kernel_name="cubic"):
r"""Operate on the mesh, in a particle-against-mesh fashion, with
exclusively local input.
This uses the octree indexing system to call a "deposition" operation
(defined in yt/geometry/particle_deposit.pyx) that can take input from
several particles (local to the mesh) and construct some value on the
mesh. The canonical example is to sum the total mass in a mesh cell
and then divide by its volume.
Parameters
----------
positions : array_like (Nx3)
The positions of all of the particles to be examined. A new
indexed octree will be constructed on these particles.
fields : list of arrays
All the necessary fields for computing the particle operation. For
instance, this might include mass, velocity, etc.
method : string
This is the "method name" which will be looked up in the
`particle_deposit` namespace as `methodname_deposit`. Current
methods include `count`, `simple_smooth`, `sum`, `std`, `cic`,
`weighted_mean`, `mesh_id`, and `nearest`.
kernel_name : string, default 'cubic'
This is the name of the smoothing kernel to use. Current supported
kernel names include `cubic`, `quartic`, `quintic`, `wendland2`,
`wendland4`, and `wendland6`.
Returns
-------
List of fortran-ordered, mesh-like arrays.
"""
# Here we perform our particle deposition.
if fields is None:
fields = []
cls = getattr(particle_deposit, "deposit_%s" % method, None)
if cls is None:
raise YTParticleDepositionNotImplemented(method)
nz = self.nz
nvals = (nz, nz, nz, (self.domain_ind >= 0).sum())
# We allocate number of zones, not number of octs
op = cls(nvals, kernel_name)
op.initialize()
mylog.debug(
"Depositing %s (%s^3) particles into %s Octs",
positions.shape[0],
positions.shape[0] ** 0.3333333,
nvals[-1],
)
pos = np.asarray(positions.convert_to_units("code_length"), dtype="float64")
# We should not need the following if we know in advance all our fields
# need no casting.
fields = [np.asarray(f, dtype="float64") for f in fields]
op.process_octree(
self.oct_handler,
self.domain_ind,
pos,
fields,
self.domain_id,
self._domain_offset,
)
vals = op.finalize()
if vals is None:
return
return np.asfortranarray(vals)
|
https://github.com/yt-project/yt/issues/1312
|
In [1]: run vel_proj
yt : [INFO ] 2017-01-17 15:27:33,808 Calculating time from 3.448e-01 to be 1.108e+17 seconds
yt : [INFO ] 2017-01-17 15:27:33,808 Assuming length units are in kpc/h (comoving)
yt : [INFO ] 2017-01-17 15:27:33,825 Parameters: current_time = 1.10758107325e+17 s
yt : [INFO ] 2017-01-17 15:27:33,825 Parameters: domain_dimensions = [2 2 2]
yt : [INFO ] 2017-01-17 15:27:33,825 Parameters: domain_left_edge = [ 0. 0. 0.]
yt : [INFO ] 2017-01-17 15:27:33,826 Parameters: domain_right_edge = [ 64000. 64000. 64000.]
yt : [INFO ] 2017-01-17 15:27:33,826 Parameters: cosmological_simulation = 1
yt : [INFO ] 2017-01-17 15:27:33,826 Parameters: current_redshift = 1.89999652869
yt : [INFO ] 2017-01-17 15:27:33,826 Parameters: omega_lambda = 0.721
yt : [INFO ] 2017-01-17 15:27:33,827 Parameters: omega_matter = 0.279
yt : [INFO ] 2017-01-17 15:27:33,827 Parameters: hubble_constant = 0.7
yt : [INFO ] 2017-01-17 15:27:33,830 Allocating for 1.191e+07 particles (index particle type 'all')
yt : [INFO ] 2017-01-17 15:27:35,746 Identified 8.088e+05 octs
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
/Users/desika/Dropbox/scratch/vel_proj.py in <module>()
6
7 ds.add_deposited_particle_field( ('PartType0', 'particle_velocity_y'),"sum")
----> 8 print ad[('deposit', 'PartType0_sum_velocity_y')]
/Users/desika/yt/yt/data_objects/data_containers.pyc in __getitem__(self, key)
279 return self.field_data[f]
280 else:
--> 281 self.get_data(f)
282 # fi.units is the unit expression string. We depend on the registry
283 # hanging off the dataset to define this unit object.
/Users/desika/yt/yt/data_objects/data_containers.pyc in get_data(self, fields)
1331
1332 fields_to_generate += gen_fluids + gen_particles
-> 1333 self._generate_fields(fields_to_generate)
1334 for field in list(self.field_data.keys()):
1335 if field not in ofields:
/Users/desika/yt/yt/data_objects/data_containers.pyc in _generate_fields(self, fields_to_generate)
1351 fi = self.ds._get_field_info(*field)
1352 try:
-> 1353 fd = self._generate_field(field)
1354 if fd is None:
1355 raise RuntimeError
/Users/desika/yt/yt/data_objects/data_containers.pyc in _generate_field(self, field)
316 tr = self._generate_particle_field(field)
317 else:
--> 318 tr = self._generate_fluid_field(field)
319 if tr is None:
320 raise YTCouldNotGenerateField(field, self.ds)
/Users/desika/yt/yt/data_objects/data_containers.pyc in _generate_fluid_field(self, field)
334 finfo.check_available(gen_obj)
335 except NeedsGridType as ngt_exception:
--> 336 rv = self._generate_spatial_fluid(field, ngt_exception.ghost_zones)
337 else:
338 rv = finfo(gen_obj)
/Users/desika/yt/yt/data_objects/data_containers.pyc in _generate_spatial_fluid(self, field, ngz)
354 o = self._current_chunk.objs[0]
355 with o._activate_cache():
--> 356 ind += o.select(self.selector, self[field], rv, ind)
357 else:
358 chunks = self.index._chunk(self, "spatial", ngz = ngz)
/Users/desika/yt/yt/data_objects/data_containers.pyc in __getitem__(self, key)
279 return self.field_data[f]
280 else:
--> 281 self.get_data(f)
282 # fi.units is the unit expression string. We depend on the registry
283 # hanging off the dataset to define this unit object.
/Users/desika/yt/yt/data_objects/data_containers.pyc in get_data(self, fields)
1331
1332 fields_to_generate += gen_fluids + gen_particles
-> 1333 self._generate_fields(fields_to_generate)
1334 for field in list(self.field_data.keys()):
1335 if field not in ofields:
/Users/desika/yt/yt/data_objects/data_containers.pyc in _generate_fields(self, fields_to_generate)
1351 fi = self.ds._get_field_info(*field)
1352 try:
-> 1353 fd = self._generate_field(field)
1354 if fd is None:
1355 raise RuntimeError
/Users/desika/yt/yt/data_objects/data_containers.pyc in _generate_field(self, field)
316 tr = self._generate_particle_field(field)
317 else:
--> 318 tr = self._generate_fluid_field(field)
319 if tr is None:
320 raise YTCouldNotGenerateField(field, self.ds)
/Users/desika/yt/yt/data_objects/data_containers.pyc in _generate_fluid_field(self, field)
336 rv = self._generate_spatial_fluid(field, ngt_exception.ghost_zones)
337 else:
--> 338 rv = finfo(gen_obj)
339 return rv
340
/Users/desika/yt/yt/fields/derived_field.pyc in __call__(self, data)
218 "for %s" % (self.name,))
219 with self.unit_registry(data):
--> 220 dd = self._function(self, data)
221 for field_name in data.keys():
222 if field_name not in original_fields:
/Users/desika/yt/yt/data_objects/static_output.pyc in _deposit_field(field, data)
1215 else:
1216 d = data.ds.arr(data.deposit(pos, [data[ptype, deposit_field]],
-> 1217 method=method, kernel_name=kernel_name),
1218 input_units=units)
1219 return d
/Users/desika/yt/yt/data_objects/octree_subset.pyc in deposit(self, positions, fields, method, kernel_name)
193 fields = [np.asarray(f, dtype="float64") for f in fields]
194 op.process_octree(self.oct_handler, self.domain_ind, pos, fields,
--> 195 self.domain_id, self._domain_offset)
196 vals = op.finalize()
197 if vals is None: return
/Users/desika/yt/yt/geometry/particle_deposit.pyx in yt.geometry.particle_deposit.ParticleDepositOperation.process_octree (yt/geometry/particle_deposit.c:4579)()
61 nf = len(fields)
62 cdef np.float64_t[::cython.view.indirect, ::1] field_pointers
---> 63 if nf > 0: field_pointers = OnceIndirect(fields)
64 cdef np.float64_t pos[3]
65 cdef np.float64_t[:] field_vals = np.empty(nf, dtype="float64")
ValueError: Buffer and memoryview are not contiguous in the same dimension.
|
ValueError
|
def _process_exception(resp, **kwargs):
unit = kwargs.get("unit")
if unit == 0:
err = {"message": "Broadcast message, ignoring errors!!!"}
else:
if isinstance(resp, ExceptionResponse):
err = {
"original_function_code": "{} ({})".format(
resp.original_code, hex(resp.original_code)
),
"error_function_code": "{} ({})".format(
resp.function_code, hex(resp.function_code)
),
"exception code": resp.exception_code,
"message": ModbusExceptions.decode(resp.exception_code),
}
elif isinstance(resp, ModbusIOException):
err = {
"original_function_code": "{} ({})".format(resp.fcode, hex(resp.fcode)),
"error": resp.message,
}
else:
err = {"error": str(resp)}
return err
|
def _process_exception(resp):
if isinstance(resp, ExceptionResponse):
err = {
"original_function_code": "{} ({})".format(
resp.original_code, hex(resp.original_code)
),
"error_function_code": "{} ({})".format(
resp.function_code, hex(resp.function_code)
),
"exception code": resp.exception_code,
"message": ModbusExceptions.decode(resp.exception_code),
}
elif isinstance(resp, ModbusIOException):
err = {
"original_function_code": "{} ({})".format(resp.fcode, hex(resp.fcode)),
"error": resp.message,
}
else:
err = {"error": str(resp)}
return err
|
https://github.com/riptideio/pymodbus/issues/515
|
DEBUG transaction :115 Current transaction state - IDLE
DEBUG transaction :120 Running transaction 1
DEBUG transaction :219 SEND: 0x1 0x2b 0xe 0x2 0x4 0x71 0x44
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64 0xca 0xb
DEBUG rtu_framer :180 Getting Frame - 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64
DEBUG factory :266 Factory Response[ReadDeviceInformationResponse: 43]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'function_code': 43, u'information': {4: '#1 Test Slave', 5: 'Research Testbed'}, u'space left': None, u'object count': 2, u'next object id': 0, u'more follows': 0, u'conformity': 131}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 2
DEBUG transaction :219 SEND: 0x1 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3f 0x46
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.71, Current Time stamp - 1595356416.81
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x10 0x0 0x1 0x0 0x3 0xd1 0xc8
DEBUG rtu_framer :180 Getting Frame - 0x10 0x0 0x1 0x0 0x3
DEBUG factory :266 Factory Response[WriteMultipleRegistersResponse: 16]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'count': 3, u'function_code': 16, u'address': 1}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 3
DEBUG transaction :219 SEND: 0x0 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3d 0xc7
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.89, Current Time stamp - 1595356416.99
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :223 Changing transaction state from 'SENDING' to 'TRANSACTION_COMPLETE'
Traceback (most recent call last):
File "repl_broadcast.py", line 30, in <module>
print(client.write_registers(1, [2, 3, 4], unit=0))
File "/home/pi/.local/lib/python2.7/site-packages/pymodbus/repl/client.py", line 172, in write_registers
if not resp.isError():
AttributeError: 'str' object has no attribute 'isError'
|
AttributeError
|
def write_coil(self, address, value, **kwargs):
"""
Write `value` to coil at `address`.
:param address: coil offset to write to
:param value: bit value to write
:param unit: The slave unit this request is targeting
:return:
"""
resp = super(ExtendedRequestSupport, self).write_coil(address, value, **kwargs)
return resp
|
def write_coil(self, address, value, **kwargs):
"""
Write `value` to coil at `address`.
:param address: coil offset to write to
:param value: bit value to write
:param unit: The slave unit this request is targeting
:return:
"""
resp = super(ExtendedRequestSupport, self).write_coil(address, value, **kwargs)
if not resp.isError():
return {
"function_code": resp.function_code,
"address": resp.address,
"value": resp.value,
}
else:
return ExtendedRequestSupport._process_exception(resp)
|
https://github.com/riptideio/pymodbus/issues/515
|
DEBUG transaction :115 Current transaction state - IDLE
DEBUG transaction :120 Running transaction 1
DEBUG transaction :219 SEND: 0x1 0x2b 0xe 0x2 0x4 0x71 0x44
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64 0xca 0xb
DEBUG rtu_framer :180 Getting Frame - 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64
DEBUG factory :266 Factory Response[ReadDeviceInformationResponse: 43]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'function_code': 43, u'information': {4: '#1 Test Slave', 5: 'Research Testbed'}, u'space left': None, u'object count': 2, u'next object id': 0, u'more follows': 0, u'conformity': 131}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 2
DEBUG transaction :219 SEND: 0x1 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3f 0x46
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.71, Current Time stamp - 1595356416.81
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x10 0x0 0x1 0x0 0x3 0xd1 0xc8
DEBUG rtu_framer :180 Getting Frame - 0x10 0x0 0x1 0x0 0x3
DEBUG factory :266 Factory Response[WriteMultipleRegistersResponse: 16]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'count': 3, u'function_code': 16, u'address': 1}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 3
DEBUG transaction :219 SEND: 0x0 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3d 0xc7
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.89, Current Time stamp - 1595356416.99
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :223 Changing transaction state from 'SENDING' to 'TRANSACTION_COMPLETE'
Traceback (most recent call last):
File "repl_broadcast.py", line 30, in <module>
print(client.write_registers(1, [2, 3, 4], unit=0))
File "/home/pi/.local/lib/python2.7/site-packages/pymodbus/repl/client.py", line 172, in write_registers
if not resp.isError():
AttributeError: 'str' object has no attribute 'isError'
|
AttributeError
|
def write_coils(self, address, values, **kwargs):
"""
Write `value` to coil at `address`.
:param address: coil offset to write to
:param value: list of bit values to write (comma seperated)
:param unit: The slave unit this request is targeting
:return:
"""
resp = super(ExtendedRequestSupport, self).write_coils(address, values, **kwargs)
return resp
|
def write_coils(self, address, values, **kwargs):
"""
Write `value` to coil at `address`.
:param address: coil offset to write to
:param value: list of bit values to write (comma seperated)
:param unit: The slave unit this request is targeting
:return:
"""
resp = super(ExtendedRequestSupport, self).write_coils(address, values, **kwargs)
if not resp.isError():
return {
"function_code": resp.function_code,
"address": resp.address,
"count": resp.count,
}
else:
return ExtendedRequestSupport._process_exception(resp)
|
https://github.com/riptideio/pymodbus/issues/515
|
DEBUG transaction :115 Current transaction state - IDLE
DEBUG transaction :120 Running transaction 1
DEBUG transaction :219 SEND: 0x1 0x2b 0xe 0x2 0x4 0x71 0x44
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64 0xca 0xb
DEBUG rtu_framer :180 Getting Frame - 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64
DEBUG factory :266 Factory Response[ReadDeviceInformationResponse: 43]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'function_code': 43, u'information': {4: '#1 Test Slave', 5: 'Research Testbed'}, u'space left': None, u'object count': 2, u'next object id': 0, u'more follows': 0, u'conformity': 131}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 2
DEBUG transaction :219 SEND: 0x1 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3f 0x46
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.71, Current Time stamp - 1595356416.81
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x10 0x0 0x1 0x0 0x3 0xd1 0xc8
DEBUG rtu_framer :180 Getting Frame - 0x10 0x0 0x1 0x0 0x3
DEBUG factory :266 Factory Response[WriteMultipleRegistersResponse: 16]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'count': 3, u'function_code': 16, u'address': 1}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 3
DEBUG transaction :219 SEND: 0x0 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3d 0xc7
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.89, Current Time stamp - 1595356416.99
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :223 Changing transaction state from 'SENDING' to 'TRANSACTION_COMPLETE'
Traceback (most recent call last):
File "repl_broadcast.py", line 30, in <module>
print(client.write_registers(1, [2, 3, 4], unit=0))
File "/home/pi/.local/lib/python2.7/site-packages/pymodbus/repl/client.py", line 172, in write_registers
if not resp.isError():
AttributeError: 'str' object has no attribute 'isError'
|
AttributeError
|
def write_register(self, address, value, **kwargs):
"""
Write `value` to register at `address`.
:param address: register offset to write to
:param value: register value to write
:param unit: The slave unit this request is targeting
:return:
"""
resp = super(ExtendedRequestSupport, self).write_register(address, value, **kwargs)
return resp
|
def write_register(self, address, value, **kwargs):
"""
Write `value` to register at `address`.
:param address: register offset to write to
:param value: register value to write
:param unit: The slave unit this request is targeting
:return:
"""
resp = super(ExtendedRequestSupport, self).write_register(address, value, **kwargs)
if not resp.isError():
return {
"function_code": resp.function_code,
"address": resp.address,
"value": resp.value,
}
else:
return ExtendedRequestSupport._process_exception(resp)
|
https://github.com/riptideio/pymodbus/issues/515
|
DEBUG transaction :115 Current transaction state - IDLE
DEBUG transaction :120 Running transaction 1
DEBUG transaction :219 SEND: 0x1 0x2b 0xe 0x2 0x4 0x71 0x44
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64 0xca 0xb
DEBUG rtu_framer :180 Getting Frame - 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64
DEBUG factory :266 Factory Response[ReadDeviceInformationResponse: 43]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'function_code': 43, u'information': {4: '#1 Test Slave', 5: 'Research Testbed'}, u'space left': None, u'object count': 2, u'next object id': 0, u'more follows': 0, u'conformity': 131}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 2
DEBUG transaction :219 SEND: 0x1 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3f 0x46
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.71, Current Time stamp - 1595356416.81
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x10 0x0 0x1 0x0 0x3 0xd1 0xc8
DEBUG rtu_framer :180 Getting Frame - 0x10 0x0 0x1 0x0 0x3
DEBUG factory :266 Factory Response[WriteMultipleRegistersResponse: 16]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'count': 3, u'function_code': 16, u'address': 1}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 3
DEBUG transaction :219 SEND: 0x0 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3d 0xc7
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.89, Current Time stamp - 1595356416.99
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :223 Changing transaction state from 'SENDING' to 'TRANSACTION_COMPLETE'
Traceback (most recent call last):
File "repl_broadcast.py", line 30, in <module>
print(client.write_registers(1, [2, 3, 4], unit=0))
File "/home/pi/.local/lib/python2.7/site-packages/pymodbus/repl/client.py", line 172, in write_registers
if not resp.isError():
AttributeError: 'str' object has no attribute 'isError'
|
AttributeError
|
def write_registers(self, address, values, **kwargs):
"""
Write list of `values` to registers starting at `address`.
:param address: register offset to write to
:param value: list of register value to write (comma seperated)
:param unit: The slave unit this request is targeting
:return:
"""
resp = super(ExtendedRequestSupport, self).write_registers(
address, values, **kwargs
)
return resp
|
def write_registers(self, address, values, **kwargs):
"""
Write list of `values` to registers starting at `address`.
:param address: register offset to write to
:param value: list of register value to write (comma seperated)
:param unit: The slave unit this request is targeting
:return:
"""
resp = super(ExtendedRequestSupport, self).write_registers(
address, values, **kwargs
)
if not resp.isError():
return {
"function_code": resp.function_code,
"address": resp.address,
"count": resp.count,
}
else:
return ExtendedRequestSupport._process_exception(resp)
|
https://github.com/riptideio/pymodbus/issues/515
|
DEBUG transaction :115 Current transaction state - IDLE
DEBUG transaction :120 Running transaction 1
DEBUG transaction :219 SEND: 0x1 0x2b 0xe 0x2 0x4 0x71 0x44
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64 0xca 0xb
DEBUG rtu_framer :180 Getting Frame - 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64
DEBUG factory :266 Factory Response[ReadDeviceInformationResponse: 43]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'function_code': 43, u'information': {4: '#1 Test Slave', 5: 'Research Testbed'}, u'space left': None, u'object count': 2, u'next object id': 0, u'more follows': 0, u'conformity': 131}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 2
DEBUG transaction :219 SEND: 0x1 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3f 0x46
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.71, Current Time stamp - 1595356416.81
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x10 0x0 0x1 0x0 0x3 0xd1 0xc8
DEBUG rtu_framer :180 Getting Frame - 0x10 0x0 0x1 0x0 0x3
DEBUG factory :266 Factory Response[WriteMultipleRegistersResponse: 16]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'count': 3, u'function_code': 16, u'address': 1}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 3
DEBUG transaction :219 SEND: 0x0 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3d 0xc7
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.89, Current Time stamp - 1595356416.99
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :223 Changing transaction state from 'SENDING' to 'TRANSACTION_COMPLETE'
Traceback (most recent call last):
File "repl_broadcast.py", line 30, in <module>
print(client.write_registers(1, [2, 3, 4], unit=0))
File "/home/pi/.local/lib/python2.7/site-packages/pymodbus/repl/client.py", line 172, in write_registers
if not resp.isError():
AttributeError: 'str' object has no attribute 'isError'
|
AttributeError
|
def main(ctx, verbose, broadcast_support):
if verbose:
global log
import logging
format = (
"%(asctime)-15s %(threadName)-15s "
"%(levelname)-8s %(module)-15s:%(lineno)-8s %(message)s"
)
log = logging.getLogger("pymodbus")
logging.basicConfig(format=format)
log.setLevel(logging.DEBUG)
ctx.obj = {"broadcast": broadcast_support}
|
def main(ctx, verbose):
if verbose:
global log
import logging
format = (
"%(asctime)-15s %(threadName)-15s "
"%(levelname)-8s %(module)-15s:%(lineno)-8s %(message)s"
)
log = logging.getLogger("pymodbus")
logging.basicConfig(format=format)
log.setLevel(logging.DEBUG)
|
https://github.com/riptideio/pymodbus/issues/515
|
DEBUG transaction :115 Current transaction state - IDLE
DEBUG transaction :120 Running transaction 1
DEBUG transaction :219 SEND: 0x1 0x2b 0xe 0x2 0x4 0x71 0x44
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64 0xca 0xb
DEBUG rtu_framer :180 Getting Frame - 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64
DEBUG factory :266 Factory Response[ReadDeviceInformationResponse: 43]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'function_code': 43, u'information': {4: '#1 Test Slave', 5: 'Research Testbed'}, u'space left': None, u'object count': 2, u'next object id': 0, u'more follows': 0, u'conformity': 131}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 2
DEBUG transaction :219 SEND: 0x1 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3f 0x46
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.71, Current Time stamp - 1595356416.81
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x10 0x0 0x1 0x0 0x3 0xd1 0xc8
DEBUG rtu_framer :180 Getting Frame - 0x10 0x0 0x1 0x0 0x3
DEBUG factory :266 Factory Response[WriteMultipleRegistersResponse: 16]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'count': 3, u'function_code': 16, u'address': 1}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 3
DEBUG transaction :219 SEND: 0x0 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3d 0xc7
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.89, Current Time stamp - 1595356416.99
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :223 Changing transaction state from 'SENDING' to 'TRANSACTION_COMPLETE'
Traceback (most recent call last):
File "repl_broadcast.py", line 30, in <module>
print(client.write_registers(1, [2, 3, 4], unit=0))
File "/home/pi/.local/lib/python2.7/site-packages/pymodbus/repl/client.py", line 172, in write_registers
if not resp.isError():
AttributeError: 'str' object has no attribute 'isError'
|
AttributeError
|
def tcp(ctx, host, port, framer):
from pymodbus.repl.client import ModbusTcpClient
broadcast = ctx.obj.get("broadcast")
kwargs = dict(host=host, port=port, broadcast_enable=broadcast)
if framer == "rtu":
from pymodbus.framer.rtu_framer import ModbusRtuFramer
kwargs["framer"] = ModbusRtuFramer
client = ModbusTcpClient(**kwargs)
cli(client)
|
def tcp(ctx, host, port, framer):
from pymodbus.repl.client import ModbusTcpClient
kwargs = dict(host=host, port=port)
if framer == "rtu":
from pymodbus.framer.rtu_framer import ModbusRtuFramer
kwargs["framer"] = ModbusRtuFramer
client = ModbusTcpClient(**kwargs)
cli(client)
|
https://github.com/riptideio/pymodbus/issues/515
|
DEBUG transaction :115 Current transaction state - IDLE
DEBUG transaction :120 Running transaction 1
DEBUG transaction :219 SEND: 0x1 0x2b 0xe 0x2 0x4 0x71 0x44
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64 0xca 0xb
DEBUG rtu_framer :180 Getting Frame - 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64
DEBUG factory :266 Factory Response[ReadDeviceInformationResponse: 43]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'function_code': 43, u'information': {4: '#1 Test Slave', 5: 'Research Testbed'}, u'space left': None, u'object count': 2, u'next object id': 0, u'more follows': 0, u'conformity': 131}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 2
DEBUG transaction :219 SEND: 0x1 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3f 0x46
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.71, Current Time stamp - 1595356416.81
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x10 0x0 0x1 0x0 0x3 0xd1 0xc8
DEBUG rtu_framer :180 Getting Frame - 0x10 0x0 0x1 0x0 0x3
DEBUG factory :266 Factory Response[WriteMultipleRegistersResponse: 16]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'count': 3, u'function_code': 16, u'address': 1}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 3
DEBUG transaction :219 SEND: 0x0 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3d 0xc7
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.89, Current Time stamp - 1595356416.99
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :223 Changing transaction state from 'SENDING' to 'TRANSACTION_COMPLETE'
Traceback (most recent call last):
File "repl_broadcast.py", line 30, in <module>
print(client.write_registers(1, [2, 3, 4], unit=0))
File "/home/pi/.local/lib/python2.7/site-packages/pymodbus/repl/client.py", line 172, in write_registers
if not resp.isError():
AttributeError: 'str' object has no attribute 'isError'
|
AttributeError
|
def run_serial_forwarder():
# ----------------------------------------------------------------------- #
# initialize the datastore(serial client)
# Note this would send the requests on the serial client with address = 0
# ----------------------------------------------------------------------- #
client = ModbusClient(method="rtu", port="/tmp/ptyp0")
# If required to communicate with a specified client use unit=<unit_id>
# in RemoteSlaveContext
# For e.g to forward the requests to slave with unit address 1 use
# store = RemoteSlaveContext(client, unit=1)
store = RemoteSlaveContext(client)
context = ModbusServerContext(slaves=store, single=True)
# ----------------------------------------------------------------------- #
# run the server you want
# ----------------------------------------------------------------------- #
StartServer(context, address=("localhost", 5020))
|
def run_serial_forwarder():
# ----------------------------------------------------------------------- #
# initialize the datastore(serial client)
# ----------------------------------------------------------------------- #
client = ModbusClient(method="rtu", port="/dev/ptyp0")
store = RemoteSlaveContext(client)
context = ModbusServerContext(slaves=store, single=True)
# ----------------------------------------------------------------------- #
# run the server you want
# ----------------------------------------------------------------------- #
StartServer(context, address=("localhost", 5020))
|
https://github.com/riptideio/pymodbus/issues/515
|
DEBUG transaction :115 Current transaction state - IDLE
DEBUG transaction :120 Running transaction 1
DEBUG transaction :219 SEND: 0x1 0x2b 0xe 0x2 0x4 0x71 0x44
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64 0xca 0xb
DEBUG rtu_framer :180 Getting Frame - 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64
DEBUG factory :266 Factory Response[ReadDeviceInformationResponse: 43]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'function_code': 43, u'information': {4: '#1 Test Slave', 5: 'Research Testbed'}, u'space left': None, u'object count': 2, u'next object id': 0, u'more follows': 0, u'conformity': 131}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 2
DEBUG transaction :219 SEND: 0x1 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3f 0x46
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.71, Current Time stamp - 1595356416.81
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x10 0x0 0x1 0x0 0x3 0xd1 0xc8
DEBUG rtu_framer :180 Getting Frame - 0x10 0x0 0x1 0x0 0x3
DEBUG factory :266 Factory Response[WriteMultipleRegistersResponse: 16]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'count': 3, u'function_code': 16, u'address': 1}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 3
DEBUG transaction :219 SEND: 0x0 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3d 0xc7
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.89, Current Time stamp - 1595356416.99
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :223 Changing transaction state from 'SENDING' to 'TRANSACTION_COMPLETE'
Traceback (most recent call last):
File "repl_broadcast.py", line 30, in <module>
print(client.write_registers(1, [2, 3, 4], unit=0))
File "/home/pi/.local/lib/python2.7/site-packages/pymodbus/repl/client.py", line 172, in write_registers
if not resp.isError():
AttributeError: 'str' object has no attribute 'isError'
|
AttributeError
|
def async_io_factory(port=None, framer=None, **kwargs):
"""
Factory to create asyncio based asynchronous serial clients
:param port: Serial port
:param framer: Modbus Framer
:param kwargs: Serial port options
:return: asyncio event loop and serial client
"""
import asyncio
from pymodbus.client.asynchronous.async_io import (
ModbusClientProtocol,
AsyncioModbusSerialClient,
)
loop = kwargs.pop("loop", None) or asyncio.get_event_loop()
proto_cls = kwargs.pop("proto_cls", None) or ModbusClientProtocol
try:
from serial_asyncio import create_serial_connection
except ImportError:
LOGGER.critical(
"pyserial-asyncio is not installed, "
"install with 'pip install pyserial-asyncio"
)
import sys
sys.exit(1)
client = AsyncioModbusSerialClient(port, proto_cls, framer, loop, **kwargs)
coro = client.connect()
if loop.is_running():
future = asyncio.run_coroutine_threadsafe(coro, loop=loop)
future.result()
else:
loop.run_until_complete(coro)
return loop, client
|
def async_io_factory(port=None, framer=None, **kwargs):
"""
Factory to create asyncio based asynchronous serial clients
:param port: Serial port
:param framer: Modbus Framer
:param kwargs: Serial port options
:return: asyncio event loop and serial client
"""
import asyncio
from pymodbus.client.asynchronous.asyncio import (
ModbusClientProtocol,
AsyncioModbusSerialClient,
)
loop = kwargs.pop("loop", None) or asyncio.get_event_loop()
proto_cls = kwargs.pop("proto_cls", None) or ModbusClientProtocol
try:
from serial_asyncio import create_serial_connection
except ImportError:
LOGGER.critical(
"pyserial-asyncio is not installed, "
"install with 'pip install pyserial-asyncio"
)
import sys
sys.exit(1)
client = AsyncioModbusSerialClient(port, proto_cls, framer, loop, **kwargs)
coro = client.connect()
if loop.is_running():
future = asyncio.run_coroutine_threadsafe(coro, loop=loop)
future.result()
else:
loop.run_until_complete(coro)
return loop, client
|
https://github.com/riptideio/pymodbus/issues/515
|
DEBUG transaction :115 Current transaction state - IDLE
DEBUG transaction :120 Running transaction 1
DEBUG transaction :219 SEND: 0x1 0x2b 0xe 0x2 0x4 0x71 0x44
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64 0xca 0xb
DEBUG rtu_framer :180 Getting Frame - 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64
DEBUG factory :266 Factory Response[ReadDeviceInformationResponse: 43]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'function_code': 43, u'information': {4: '#1 Test Slave', 5: 'Research Testbed'}, u'space left': None, u'object count': 2, u'next object id': 0, u'more follows': 0, u'conformity': 131}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 2
DEBUG transaction :219 SEND: 0x1 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3f 0x46
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.71, Current Time stamp - 1595356416.81
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x10 0x0 0x1 0x0 0x3 0xd1 0xc8
DEBUG rtu_framer :180 Getting Frame - 0x10 0x0 0x1 0x0 0x3
DEBUG factory :266 Factory Response[WriteMultipleRegistersResponse: 16]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'count': 3, u'function_code': 16, u'address': 1}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 3
DEBUG transaction :219 SEND: 0x0 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3d 0xc7
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.89, Current Time stamp - 1595356416.99
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :223 Changing transaction state from 'SENDING' to 'TRANSACTION_COMPLETE'
Traceback (most recent call last):
File "repl_broadcast.py", line 30, in <module>
print(client.write_registers(1, [2, 3, 4], unit=0))
File "/home/pi/.local/lib/python2.7/site-packages/pymodbus/repl/client.py", line 172, in write_registers
if not resp.isError():
AttributeError: 'str' object has no attribute 'isError'
|
AttributeError
|
def async_io_factory(
host="127.0.0.1",
port=Defaults.Port,
framer=None,
source_address=None,
timeout=None,
**kwargs,
):
"""
Factory to create asyncio based asynchronous tcp clients
:param host: Host IP address
:param port: Port
:param framer: Modbus Framer
:param source_address: Bind address
:param timeout: Timeout in seconds
:param kwargs:
:return: asyncio event loop and tcp client
"""
import asyncio
from pymodbus.client.asynchronous.async_io import init_tcp_client
loop = kwargs.get("loop") or asyncio.new_event_loop()
proto_cls = kwargs.get("proto_cls", None)
if not loop.is_running():
asyncio.set_event_loop(loop)
cor = init_tcp_client(proto_cls, loop, host, port)
client = loop.run_until_complete(asyncio.gather(cor))[0]
else:
cor = init_tcp_client(proto_cls, loop, host, port)
future = asyncio.run_coroutine_threadsafe(cor, loop=loop)
client = future.result()
return loop, client
|
def async_io_factory(
host="127.0.0.1",
port=Defaults.Port,
framer=None,
source_address=None,
timeout=None,
**kwargs,
):
"""
Factory to create asyncio based asynchronous tcp clients
:param host: Host IP address
:param port: Port
:param framer: Modbus Framer
:param source_address: Bind address
:param timeout: Timeout in seconds
:param kwargs:
:return: asyncio event loop and tcp client
"""
import asyncio
from pymodbus.client.asynchronous.asyncio import init_tcp_client
loop = kwargs.get("loop") or asyncio.new_event_loop()
proto_cls = kwargs.get("proto_cls", None)
if not loop.is_running():
asyncio.set_event_loop(loop)
cor = init_tcp_client(proto_cls, loop, host, port)
client = loop.run_until_complete(asyncio.gather(cor))[0]
else:
cor = init_tcp_client(proto_cls, loop, host, port)
future = asyncio.run_coroutine_threadsafe(cor, loop=loop)
client = future.result()
return loop, client
|
https://github.com/riptideio/pymodbus/issues/515
|
DEBUG transaction :115 Current transaction state - IDLE
DEBUG transaction :120 Running transaction 1
DEBUG transaction :219 SEND: 0x1 0x2b 0xe 0x2 0x4 0x71 0x44
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64 0xca 0xb
DEBUG rtu_framer :180 Getting Frame - 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64
DEBUG factory :266 Factory Response[ReadDeviceInformationResponse: 43]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'function_code': 43, u'information': {4: '#1 Test Slave', 5: 'Research Testbed'}, u'space left': None, u'object count': 2, u'next object id': 0, u'more follows': 0, u'conformity': 131}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 2
DEBUG transaction :219 SEND: 0x1 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3f 0x46
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.71, Current Time stamp - 1595356416.81
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x10 0x0 0x1 0x0 0x3 0xd1 0xc8
DEBUG rtu_framer :180 Getting Frame - 0x10 0x0 0x1 0x0 0x3
DEBUG factory :266 Factory Response[WriteMultipleRegistersResponse: 16]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'count': 3, u'function_code': 16, u'address': 1}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 3
DEBUG transaction :219 SEND: 0x0 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3d 0xc7
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.89, Current Time stamp - 1595356416.99
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :223 Changing transaction state from 'SENDING' to 'TRANSACTION_COMPLETE'
Traceback (most recent call last):
File "repl_broadcast.py", line 30, in <module>
print(client.write_registers(1, [2, 3, 4], unit=0))
File "/home/pi/.local/lib/python2.7/site-packages/pymodbus/repl/client.py", line 172, in write_registers
if not resp.isError():
AttributeError: 'str' object has no attribute 'isError'
|
AttributeError
|
def async_io_factory(
host="127.0.0.1",
port=Defaults.TLSPort,
sslctx=None,
server_hostname=None,
framer=None,
source_address=None,
timeout=None,
**kwargs,
):
"""
Factory to create asyncio based asynchronous tls clients
:param host: Host IP address
:param port: Port
:param sslctx: The SSLContext to use for TLS (default None and auto create)
:param server_hostname: Target server's name matched for certificate
:param framer: Modbus Framer
:param source_address: Bind address
:param timeout: Timeout in seconds
:param kwargs:
:return: asyncio event loop and tcp client
"""
import asyncio
from pymodbus.client.asynchronous.async_io import init_tls_client
loop = kwargs.get("loop") or asyncio.new_event_loop()
proto_cls = kwargs.get("proto_cls", None)
if not loop.is_running():
asyncio.set_event_loop(loop)
cor = init_tls_client(
proto_cls, loop, host, port, sslctx, server_hostname, framer
)
client = loop.run_until_complete(asyncio.gather(cor))[0]
else:
cor = init_tls_client(
proto_cls, loop, host, port, sslctx, server_hostname, framer
)
future = asyncio.run_coroutine_threadsafe(cor, loop=loop)
client = future.result()
return loop, client
|
def async_io_factory(
host="127.0.0.1",
port=Defaults.TLSPort,
sslctx=None,
server_hostname=None,
framer=None,
source_address=None,
timeout=None,
**kwargs,
):
"""
Factory to create asyncio based asynchronous tls clients
:param host: Host IP address
:param port: Port
:param sslctx: The SSLContext to use for TLS (default None and auto create)
:param server_hostname: Target server's name matched for certificate
:param framer: Modbus Framer
:param source_address: Bind address
:param timeout: Timeout in seconds
:param kwargs:
:return: asyncio event loop and tcp client
"""
import asyncio
from pymodbus.client.asynchronous.asyncio import init_tls_client
loop = kwargs.get("loop") or asyncio.new_event_loop()
proto_cls = kwargs.get("proto_cls", None)
if not loop.is_running():
asyncio.set_event_loop(loop)
cor = init_tls_client(
proto_cls, loop, host, port, sslctx, server_hostname, framer
)
client = loop.run_until_complete(asyncio.gather(cor))[0]
else:
cor = init_tls_client(
proto_cls, loop, host, port, sslctx, server_hostname, framer
)
future = asyncio.run_coroutine_threadsafe(cor, loop=loop)
client = future.result()
return loop, client
|
https://github.com/riptideio/pymodbus/issues/515
|
DEBUG transaction :115 Current transaction state - IDLE
DEBUG transaction :120 Running transaction 1
DEBUG transaction :219 SEND: 0x1 0x2b 0xe 0x2 0x4 0x71 0x44
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64 0xca 0xb
DEBUG rtu_framer :180 Getting Frame - 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64
DEBUG factory :266 Factory Response[ReadDeviceInformationResponse: 43]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'function_code': 43, u'information': {4: '#1 Test Slave', 5: 'Research Testbed'}, u'space left': None, u'object count': 2, u'next object id': 0, u'more follows': 0, u'conformity': 131}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 2
DEBUG transaction :219 SEND: 0x1 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3f 0x46
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.71, Current Time stamp - 1595356416.81
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x10 0x0 0x1 0x0 0x3 0xd1 0xc8
DEBUG rtu_framer :180 Getting Frame - 0x10 0x0 0x1 0x0 0x3
DEBUG factory :266 Factory Response[WriteMultipleRegistersResponse: 16]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'count': 3, u'function_code': 16, u'address': 1}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 3
DEBUG transaction :219 SEND: 0x0 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3d 0xc7
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.89, Current Time stamp - 1595356416.99
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :223 Changing transaction state from 'SENDING' to 'TRANSACTION_COMPLETE'
Traceback (most recent call last):
File "repl_broadcast.py", line 30, in <module>
print(client.write_registers(1, [2, 3, 4], unit=0))
File "/home/pi/.local/lib/python2.7/site-packages/pymodbus/repl/client.py", line 172, in write_registers
if not resp.isError():
AttributeError: 'str' object has no attribute 'isError'
|
AttributeError
|
def async_io_factory(
host="127.0.0.1",
port=Defaults.Port,
framer=None,
source_address=None,
timeout=None,
**kwargs,
):
"""
Factory to create asyncio based asynchronous udp clients
:param host: Host IP address
:param port: Port
:param framer: Modbus Framer
:param source_address: Bind address
:param timeout: Timeout in seconds
:param kwargs:
:return: asyncio event loop and udp client
"""
import asyncio
from pymodbus.client.asynchronous.async_io import init_udp_client
loop = kwargs.get("loop") or asyncio.get_event_loop()
proto_cls = kwargs.get("proto_cls", None)
cor = init_udp_client(proto_cls, loop, host, port)
if not loop.is_running():
client = loop.run_until_complete(asyncio.gather(cor))[0]
else:
client = asyncio.run_coroutine_threadsafe(cor, loop=loop)
client = client.result()
return loop, client
|
def async_io_factory(
host="127.0.0.1",
port=Defaults.Port,
framer=None,
source_address=None,
timeout=None,
**kwargs,
):
"""
Factory to create asyncio based asynchronous udp clients
:param host: Host IP address
:param port: Port
:param framer: Modbus Framer
:param source_address: Bind address
:param timeout: Timeout in seconds
:param kwargs:
:return: asyncio event loop and udp client
"""
import asyncio
from pymodbus.client.asynchronous.asyncio import init_udp_client
loop = kwargs.get("loop") or asyncio.get_event_loop()
proto_cls = kwargs.get("proto_cls", None)
cor = init_udp_client(proto_cls, loop, host, port)
if not loop.is_running():
client = loop.run_until_complete(asyncio.gather(cor))[0]
else:
client = asyncio.run_coroutine_threadsafe(cor, loop=loop)
client = client.result()
return loop, client
|
https://github.com/riptideio/pymodbus/issues/515
|
DEBUG transaction :115 Current transaction state - IDLE
DEBUG transaction :120 Running transaction 1
DEBUG transaction :219 SEND: 0x1 0x2b 0xe 0x2 0x4 0x71 0x44
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64 0xca 0xb
DEBUG rtu_framer :180 Getting Frame - 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64
DEBUG factory :266 Factory Response[ReadDeviceInformationResponse: 43]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'function_code': 43, u'information': {4: '#1 Test Slave', 5: 'Research Testbed'}, u'space left': None, u'object count': 2, u'next object id': 0, u'more follows': 0, u'conformity': 131}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 2
DEBUG transaction :219 SEND: 0x1 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3f 0x46
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.71, Current Time stamp - 1595356416.81
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x10 0x0 0x1 0x0 0x3 0xd1 0xc8
DEBUG rtu_framer :180 Getting Frame - 0x10 0x0 0x1 0x0 0x3
DEBUG factory :266 Factory Response[WriteMultipleRegistersResponse: 16]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'count': 3, u'function_code': 16, u'address': 1}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 3
DEBUG transaction :219 SEND: 0x0 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3d 0xc7
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.89, Current Time stamp - 1595356416.99
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :223 Changing transaction state from 'SENDING' to 'TRANSACTION_COMPLETE'
Traceback (most recent call last):
File "repl_broadcast.py", line 30, in <module>
print(client.write_registers(1, [2, 3, 4], unit=0))
File "/home/pi/.local/lib/python2.7/site-packages/pymodbus/repl/client.py", line 172, in write_registers
if not resp.isError():
AttributeError: 'str' object has no attribute 'isError'
|
AttributeError
|
def on_receive(self, *args):
"""
On data recieve call back
:param args: data received
:return:
"""
data = args[0] if len(args) > 0 else None
if not data:
return
LOGGER.debug("recv: " + " ".join([hex(byte2int(x)) for x in data]))
unit = self.framer.decode_data(data).get("unit", 0)
self.framer.processIncomingPacket(data, self._handle_response, unit=unit)
|
def on_receive(self, *args):
"""
On data recieve call back
:param args: data received
:return:
"""
data = args[0] if len(args) > 0 else None
if not data:
return
LOGGER.debug("recv: " + " ".join([hex(byte2int(x)) for x in data]))
unit = self.framer.decode_data(data).get("uid", 0)
self.framer.processIncomingPacket(data, self._handle_response, unit=unit)
|
https://github.com/riptideio/pymodbus/issues/515
|
DEBUG transaction :115 Current transaction state - IDLE
DEBUG transaction :120 Running transaction 1
DEBUG transaction :219 SEND: 0x1 0x2b 0xe 0x2 0x4 0x71 0x44
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64 0xca 0xb
DEBUG rtu_framer :180 Getting Frame - 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64
DEBUG factory :266 Factory Response[ReadDeviceInformationResponse: 43]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'function_code': 43, u'information': {4: '#1 Test Slave', 5: 'Research Testbed'}, u'space left': None, u'object count': 2, u'next object id': 0, u'more follows': 0, u'conformity': 131}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 2
DEBUG transaction :219 SEND: 0x1 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3f 0x46
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.71, Current Time stamp - 1595356416.81
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x10 0x0 0x1 0x0 0x3 0xd1 0xc8
DEBUG rtu_framer :180 Getting Frame - 0x10 0x0 0x1 0x0 0x3
DEBUG factory :266 Factory Response[WriteMultipleRegistersResponse: 16]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'count': 3, u'function_code': 16, u'address': 1}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 3
DEBUG transaction :219 SEND: 0x0 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3d 0xc7
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.89, Current Time stamp - 1595356416.99
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :223 Changing transaction state from 'SENDING' to 'TRANSACTION_COMPLETE'
Traceback (most recent call last):
File "repl_broadcast.py", line 30, in <module>
print(client.write_registers(1, [2, 3, 4], unit=0))
File "/home/pi/.local/lib/python2.7/site-packages/pymodbus/repl/client.py", line 172, in write_registers
if not resp.isError():
AttributeError: 'str' object has no attribute 'isError'
|
AttributeError
|
def dataReceived(self, data):
"""
Get response, check for valid message, decode result
:param data: The data returned from the server
"""
unit = self.framer.decode_data(data).get("unit", 0)
self.framer.processIncomingPacket(data, self._handleResponse, unit=unit)
|
def dataReceived(self, data):
"""
Get response, check for valid message, decode result
:param data: The data returned from the server
"""
unit = self.framer.decode_data(data).get("uid", 0)
self.framer.processIncomingPacket(data, self._handleResponse, unit=unit)
|
https://github.com/riptideio/pymodbus/issues/515
|
DEBUG transaction :115 Current transaction state - IDLE
DEBUG transaction :120 Running transaction 1
DEBUG transaction :219 SEND: 0x1 0x2b 0xe 0x2 0x4 0x71 0x44
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64 0xca 0xb
DEBUG rtu_framer :180 Getting Frame - 0x2b 0xe 0x2 0x83 0x0 0x0 0x2 0x4 0xd 0x23 0x31 0x20 0x54 0x65 0x73 0x74 0x20 0x53 0x6c 0x61 0x76 0x65 0x5 0x10 0x52 0x65 0x73 0x65 0x61 0x72 0x63 0x68 0x20 0x54 0x65 0x73 0x74 0x62 0x65 0x64
DEBUG factory :266 Factory Response[ReadDeviceInformationResponse: 43]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'function_code': 43, u'information': {4: '#1 Test Slave', 5: 'Research Testbed'}, u'space left': None, u'object count': 2, u'next object id': 0, u'more follows': 0, u'conformity': 131}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 2
DEBUG transaction :219 SEND: 0x1 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3f 0x46
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.71, Current Time stamp - 1595356416.81
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :228 Changing transaction state from 'SENDING' to 'WAITING FOR REPLY'
DEBUG transaction :304 Changing transaction state from 'WAITING FOR REPLY' to 'PROCESSING REPLY'
DEBUG transaction :233 RECV: 0x1 0x10 0x0 0x1 0x0 0x3 0xd1 0xc8
DEBUG rtu_framer :180 Getting Frame - 0x10 0x0 0x1 0x0 0x3
DEBUG factory :266 Factory Response[WriteMultipleRegistersResponse: 16]
DEBUG rtu_framer :115 Frame advanced, resetting header!!
DEBUG transaction :383 Adding transaction 1
DEBUG transaction :394 Getting transaction 1
DEBUG transaction :193 Changing transaction state from 'PROCESSING REPLY' to 'TRANSACTION_COMPLETE'
{u'count': 3, u'function_code': 16, u'address': 1}
DEBUG transaction :115 Current transaction state - TRANSACTION_COMPLETE
DEBUG transaction :120 Running transaction 3
DEBUG transaction :219 SEND: 0x0 0x10 0x0 0x1 0x0 0x3 0x6 0x0 0x2 0x0 0x3 0x0 0x4 0x3d 0xc7
DEBUG rtu_framer :264 Changing state to IDLE - Last Frame End - 1595356416.89, Current Time stamp - 1595356416.99
DEBUG sync :75 New Transaction state 'SENDING'
DEBUG transaction :223 Changing transaction state from 'SENDING' to 'TRANSACTION_COMPLETE'
Traceback (most recent call last):
File "repl_broadcast.py", line 30, in <module>
print(client.write_registers(1, [2, 3, 4], unit=0))
File "/home/pi/.local/lib/python2.7/site-packages/pymodbus/repl/client.py", line 172, in write_registers
if not resp.isError():
AttributeError: 'str' object has no attribute 'isError'
|
AttributeError
|
def run_async_server():
# ----------------------------------------------------------------------- #
# initialize your data store
# ----------------------------------------------------------------------- #
# The datastores only respond to the addresses that they are initialized to
# Therefore, if you initialize a DataBlock to addresses from 0x00 to 0xFF,
# a request to 0x100 will respond with an invalid address exception.
# This is because many devices exhibit this kind of behavior (but not all)
#
# block = ModbusSequentialDataBlock(0x00, [0]*0xff)
#
# Continuing, you can choose to use a sequential or a sparse DataBlock in
# your data context. The difference is that the sequential has no gaps in
# the data while the sparse can. Once again, there are devices that exhibit
# both forms of behavior::
#
# block = ModbusSparseDataBlock({0x00: 0, 0x05: 1})
# block = ModbusSequentialDataBlock(0x00, [0]*5)
#
# Alternately, you can use the factory methods to initialize the DataBlocks
# or simply do not pass them to have them initialized to 0x00 on the full
# address range::
#
# store = ModbusSlaveContext(di = ModbusSequentialDataBlock.create())
# store = ModbusSlaveContext()
#
# Finally, you are allowed to use the same DataBlock reference for every
# table or you you may use a seperate DataBlock for each table.
# This depends if you would like functions to be able to access and modify
# the same data or not::
#
# block = ModbusSequentialDataBlock(0x00, [0]*0xff)
# store = ModbusSlaveContext(di=block, co=block, hr=block, ir=block)
#
# The server then makes use of a server context that allows the server to
# respond with different slave contexts for different unit ids. By default
# it will return the same context for every unit id supplied (broadcast
# mode).
# However, this can be overloaded by setting the single flag to False
# and then supplying a dictionary of unit id to context mapping::
#
# slaves = {
# 0x01: ModbusSlaveContext(...),
# 0x02: ModbusSlaveContext(...),
# 0x03: ModbusSlaveContext(...),
# }
# context = ModbusServerContext(slaves=slaves, single=False)
#
# The slave context can also be initialized in zero_mode which means that a
# request to address(0-7) will map to the address (0-7). The default is
# False which is based on section 4.4 of the specification, so address(0-7)
# will map to (1-8)::
#
# store = ModbusSlaveContext(..., zero_mode=True)
# ----------------------------------------------------------------------- #
store = ModbusSlaveContext(
di=ModbusSequentialDataBlock(0, [17] * 100),
co=ModbusSequentialDataBlock(0, [17] * 100),
hr=ModbusSequentialDataBlock(0, [17] * 100),
ir=ModbusSequentialDataBlock(0, [17] * 100),
)
store.register(
CustomModbusRequest.function_code,
"cm",
ModbusSequentialDataBlock(0, [17] * 100),
)
context = ModbusServerContext(slaves=store, single=True)
# ----------------------------------------------------------------------- #
# initialize the server information
# ----------------------------------------------------------------------- #
# If you don't set this or any fields, they are defaulted to empty strings.
# ----------------------------------------------------------------------- #
identity = ModbusDeviceIdentification()
identity.VendorName = "Pymodbus"
identity.ProductCode = "PM"
identity.VendorUrl = "http://github.com/bashwork/pymodbus/"
identity.ProductName = "Pymodbus Server"
identity.ModelName = "Pymodbus Server"
identity.MajorMinorRevision = "2.2.0"
# ----------------------------------------------------------------------- #
# run the server you want
# ----------------------------------------------------------------------- #
# TCP Server
StartTcpServer(
context,
identity=identity,
address=("localhost", 5020),
custom_functions=[CustomModbusRequest],
)
|
def run_async_server():
# ----------------------------------------------------------------------- #
# initialize your data store
# ----------------------------------------------------------------------- #
# The datastores only respond to the addresses that they are initialized to
# Therefore, if you initialize a DataBlock to addresses from 0x00 to 0xFF,
# a request to 0x100 will respond with an invalid address exception.
# This is because many devices exhibit this kind of behavior (but not all)
#
# block = ModbusSequentialDataBlock(0x00, [0]*0xff)
#
# Continuing, you can choose to use a sequential or a sparse DataBlock in
# your data context. The difference is that the sequential has no gaps in
# the data while the sparse can. Once again, there are devices that exhibit
# both forms of behavior::
#
# block = ModbusSparseDataBlock({0x00: 0, 0x05: 1})
# block = ModbusSequentialDataBlock(0x00, [0]*5)
#
# Alternately, you can use the factory methods to initialize the DataBlocks
# or simply do not pass them to have them initialized to 0x00 on the full
# address range::
#
# store = ModbusSlaveContext(di = ModbusSequentialDataBlock.create())
# store = ModbusSlaveContext()
#
# Finally, you are allowed to use the same DataBlock reference for every
# table or you you may use a seperate DataBlock for each table.
# This depends if you would like functions to be able to access and modify
# the same data or not::
#
# block = ModbusSequentialDataBlock(0x00, [0]*0xff)
# store = ModbusSlaveContext(di=block, co=block, hr=block, ir=block)
#
# The server then makes use of a server context that allows the server to
# respond with different slave contexts for different unit ids. By default
# it will return the same context for every unit id supplied (broadcast
# mode).
# However, this can be overloaded by setting the single flag to False
# and then supplying a dictionary of unit id to context mapping::
#
# slaves = {
# 0x01: ModbusSlaveContext(...),
# 0x02: ModbusSlaveContext(...),
# 0x03: ModbusSlaveContext(...),
# }
# context = ModbusServerContext(slaves=slaves, single=False)
#
# The slave context can also be initialized in zero_mode which means that a
# request to address(0-7) will map to the address (0-7). The default is
# False which is based on section 4.4 of the specification, so address(0-7)
# will map to (1-8)::
#
# store = ModbusSlaveContext(..., zero_mode=True)
# ----------------------------------------------------------------------- #
store = ModbusSlaveContext(
di=ModbusSequentialDataBlock(0, [17] * 100),
co=ModbusSequentialDataBlock(0, [17] * 100),
hr=ModbusSequentialDataBlock(0, [17] * 100),
ir=ModbusSequentialDataBlock(0, [17] * 100),
)
context = ModbusServerContext(slaves=store, single=True)
# ----------------------------------------------------------------------- #
# initialize the server information
# ----------------------------------------------------------------------- #
# If you don't set this or any fields, they are defaulted to empty strings.
# ----------------------------------------------------------------------- #
identity = ModbusDeviceIdentification()
identity.VendorName = "Pymodbus"
identity.ProductCode = "PM"
identity.VendorUrl = "http://github.com/bashwork/pymodbus/"
identity.ProductName = "Pymodbus Server"
identity.ModelName = "Pymodbus Server"
identity.MajorMinorRevision = "1.5"
# ----------------------------------------------------------------------- #
# run the server you want
# ----------------------------------------------------------------------- #
# TCP Server
StartTcpServer(context, identity=identity, address=("localhost", 5020))
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def run_callback_server():
# ----------------------------------------------------------------------- #
# initialize your data store
# ----------------------------------------------------------------------- #
queue = Queue()
devices = read_device_map("device-mapping")
block = CallbackDataBlock(devices, queue)
store = ModbusSlaveContext(di=block, co=block, hr=block, ir=block)
context = ModbusServerContext(slaves=store, single=True)
# ----------------------------------------------------------------------- #
# initialize the server information
# ----------------------------------------------------------------------- #
identity = ModbusDeviceIdentification()
identity.VendorName = "pymodbus"
identity.ProductCode = "PM"
identity.VendorUrl = "http://github.com/bashwork/pymodbus/"
identity.ProductName = "pymodbus Server"
identity.ModelName = "pymodbus Server"
identity.MajorMinorRevision = "2.2.0"
# ----------------------------------------------------------------------- #
# run the server you want
# ----------------------------------------------------------------------- #
p = Process(target=device_writer, args=(queue,))
p.start()
StartTcpServer(context, identity=identity, address=("localhost", 5020))
|
def run_callback_server():
# ----------------------------------------------------------------------- #
# initialize your data store
# ----------------------------------------------------------------------- #
queue = Queue()
devices = read_device_map("device-mapping")
block = CallbackDataBlock(devices, queue)
store = ModbusSlaveContext(di=block, co=block, hr=block, ir=block)
context = ModbusServerContext(slaves=store, single=True)
# ----------------------------------------------------------------------- #
# initialize the server information
# ----------------------------------------------------------------------- #
identity = ModbusDeviceIdentification()
identity.VendorName = "pymodbus"
identity.ProductCode = "PM"
identity.VendorUrl = "http://github.com/bashwork/pymodbus/"
identity.ProductName = "pymodbus Server"
identity.ModelName = "pymodbus Server"
identity.MajorMinorRevision = "1.0"
# ----------------------------------------------------------------------- #
# run the server you want
# ----------------------------------------------------------------------- #
p = Process(target=device_writer, args=(queue,))
p.start()
StartTcpServer(context, identity=identity, address=("localhost", 5020))
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def run_custom_db_server():
# ----------------------------------------------------------------------- #
# initialize your data store
# ----------------------------------------------------------------------- #
block = CustomDataBlock([0] * 100)
store = ModbusSlaveContext(di=block, co=block, hr=block, ir=block)
context = ModbusServerContext(slaves=store, single=True)
# ----------------------------------------------------------------------- #
# initialize the server information
# ----------------------------------------------------------------------- #
identity = ModbusDeviceIdentification()
identity.VendorName = "pymodbus"
identity.ProductCode = "PM"
identity.VendorUrl = "http://github.com/bashwork/pymodbus/"
identity.ProductName = "pymodbus Server"
identity.ModelName = "pymodbus Server"
identity.MajorMinorRevision = "2.2.0"
# ----------------------------------------------------------------------- #
# run the server you want
# ----------------------------------------------------------------------- #
# p = Process(target=device_writer, args=(queue,))
# p.start()
StartTcpServer(context, identity=identity, address=("localhost", 5020))
|
def run_custom_db_server():
# ----------------------------------------------------------------------- #
# initialize your data store
# ----------------------------------------------------------------------- #
block = CustomDataBlock([0] * 100)
store = ModbusSlaveContext(di=block, co=block, hr=block, ir=block)
context = ModbusServerContext(slaves=store, single=True)
# ----------------------------------------------------------------------- #
# initialize the server information
# ----------------------------------------------------------------------- #
identity = ModbusDeviceIdentification()
identity.VendorName = "pymodbus"
identity.ProductCode = "PM"
identity.VendorUrl = "http://github.com/bashwork/pymodbus/"
identity.ProductName = "pymodbus Server"
identity.ModelName = "pymodbus Server"
identity.MajorMinorRevision = "1.0"
# ----------------------------------------------------------------------- #
# run the server you want
# ----------------------------------------------------------------------- #
# p = Process(target=device_writer, args=(queue,))
# p.start()
StartTcpServer(context, identity=identity, address=("localhost", 5020))
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def __init__(self, address=None, **kwargs):
ModbusRequest.__init__(self, **kwargs)
self.address = address
self.count = 16
|
def __init__(self, address):
ModbusRequest.__init__(self)
self.address = address
self.count = 16
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def __init__(self, address, **kwargs):
"""Initializes a new instance
:param address: The address to start reading from
"""
ReadCoilsRequest.__init__(self, address, 16, **kwargs)
|
def __init__(self, address):
"""Initializes a new instance
:param address: The address to start reading from
"""
ReadCoilsRequest.__init__(self, address, 16)
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def updating_writer(a):
"""A worker process that runs every so often and
updates live values of the context which resides in an SQLite3 database.
It should be noted that there is a race condition for the update.
:param arguments: The input arguments to the call
"""
log.debug("Updating the database context")
context = a[0]
readfunction = 0x03 # read holding registers
writefunction = 0x10
slave_id = 0x01 # slave address
count = 50
# import pdb; pdb.set_trace()
rand_value = random.randint(0, 9999)
rand_addr = random.randint(0, 65000)
log.debug("Writing to datastore: {}, {}".format(rand_addr, rand_value))
# import pdb; pdb.set_trace()
context[slave_id].setValues(writefunction, rand_addr, [rand_value], update=False)
values = context[slave_id].getValues(readfunction, rand_addr, count)
log.debug("Values from datastore: " + str(values))
|
def updating_writer(a):
"""A worker process that runs every so often and
updates live values of the context which resides in an SQLite3 database.
It should be noted that there is a race condition for the update.
:param arguments: The input arguments to the call
"""
log.debug("Updating the database context")
context = a[0]
readfunction = 0x03 # read holding registers
writefunction = 0x10
slave_id = 0x01 # slave address
count = 50
# import pdb; pdb.set_trace()
rand_value = random.randint(0, 9999)
rand_addr = random.randint(0, 65000)
log.debug("Writing to datastore: {}, {}".format(rand_addr, rand_value))
# import pdb; pdb.set_trace()
context[slave_id].setValues(writefunction, rand_addr, [rand_value])
values = context[slave_id].getValues(readfunction, rand_addr, count)
log.debug("Values from datastore: " + str(values))
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def run_dbstore_update_server():
# ----------------------------------------------------------------------- #
# initialize your data store
# ----------------------------------------------------------------------- #
block = ModbusSequentialDataBlock(0x00, [0] * 0xFF)
store = SqlSlaveContext(block)
context = ModbusServerContext(slaves={1: store}, single=False)
# ----------------------------------------------------------------------- #
# initialize the server information
# ----------------------------------------------------------------------- #
identity = ModbusDeviceIdentification()
identity.VendorName = "pymodbus"
identity.ProductCode = "PM"
identity.VendorUrl = "http://github.com/bashwork/pymodbus/"
identity.ProductName = "pymodbus Server"
identity.ModelName = "pymodbus Server"
identity.MajorMinorRevision = "2.2.0"
# ----------------------------------------------------------------------- #
# run the server you want
# ----------------------------------------------------------------------- #
time = 5 # 5 seconds delay
loop = LoopingCall(f=updating_writer, a=(context,))
loop.start(time, now=False) # initially delay by time
loop.stop()
StartTcpServer(context, identity=identity, address=("", 5020))
|
def run_dbstore_update_server():
# ----------------------------------------------------------------------- #
# initialize your data store
# ----------------------------------------------------------------------- #
block = ModbusSequentialDataBlock(0x00, [0] * 0xFF)
store = SqlSlaveContext(block)
context = ModbusServerContext(slaves={1: store}, single=False)
# ----------------------------------------------------------------------- #
# initialize the server information
# ----------------------------------------------------------------------- #
identity = ModbusDeviceIdentification()
identity.VendorName = "pymodbus"
identity.ProductCode = "PM"
identity.VendorUrl = "http://github.com/bashwork/pymodbus/"
identity.ProductName = "pymodbus Server"
identity.ModelName = "pymodbus Server"
identity.MajorMinorRevision = "1.0"
# ----------------------------------------------------------------------- #
# run the server you want
# ----------------------------------------------------------------------- #
time = 5 # 5 seconds delay
loop = LoopingCall(f=updating_writer, a=(context,))
loop.start(time, now=False) # initially delay by time
StartTcpServer(context, identity=identity, address=("", 5020))
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def run_payload_server():
# ----------------------------------------------------------------------- #
# build your payload
# ----------------------------------------------------------------------- #
builder = BinaryPayloadBuilder(byteorder=Endian.Little, wordorder=Endian.Little)
builder.add_string("abcdefgh")
builder.add_bits([0, 1, 0, 1, 1, 0, 1, 0])
builder.add_8bit_int(-0x12)
builder.add_8bit_uint(0x12)
builder.add_16bit_int(-0x5678)
builder.add_16bit_uint(0x1234)
builder.add_32bit_int(-0x1234)
builder.add_32bit_uint(0x12345678)
builder.add_32bit_float(22.34)
builder.add_32bit_float(-22.34)
builder.add_64bit_int(-0xDEADBEEF)
builder.add_64bit_uint(0x12345678DEADBEEF)
builder.add_64bit_uint(0xDEADBEEFDEADBEED)
builder.add_64bit_float(123.45)
builder.add_64bit_float(-123.45)
# ----------------------------------------------------------------------- #
# use that payload in the data store
# ----------------------------------------------------------------------- #
# Here we use the same reference block for each underlying store.
# ----------------------------------------------------------------------- #
block = ModbusSequentialDataBlock(1, builder.to_registers())
store = ModbusSlaveContext(di=block, co=block, hr=block, ir=block)
context = ModbusServerContext(slaves=store, single=True)
# ----------------------------------------------------------------------- #
# initialize the server information
# ----------------------------------------------------------------------- #
# If you don't set this or any fields, they are defaulted to empty strings.
# ----------------------------------------------------------------------- #
identity = ModbusDeviceIdentification()
identity.VendorName = "Pymodbus"
identity.ProductCode = "PM"
identity.VendorUrl = "http://github.com/bashwork/pymodbus/"
identity.ProductName = "Pymodbus Server"
identity.ModelName = "Pymodbus Server"
identity.MajorMinorRevision = "2.2.0"
# ----------------------------------------------------------------------- #
# run the server you want
# ----------------------------------------------------------------------- #
StartTcpServer(context, identity=identity, address=("localhost", 5020))
|
def run_payload_server():
# ----------------------------------------------------------------------- #
# build your payload
# ----------------------------------------------------------------------- #
builder = BinaryPayloadBuilder(byteorder=Endian.Little, wordorder=Endian.Little)
builder.add_string("abcdefgh")
builder.add_bits([0, 1, 0, 1, 1, 0, 1, 0])
builder.add_8bit_int(-0x12)
builder.add_8bit_uint(0x12)
builder.add_16bit_int(-0x5678)
builder.add_16bit_uint(0x1234)
builder.add_32bit_int(-0x1234)
builder.add_32bit_uint(0x12345678)
builder.add_32bit_float(22.34)
builder.add_32bit_float(-22.34)
builder.add_64bit_int(-0xDEADBEEF)
builder.add_64bit_uint(0x12345678DEADBEEF)
builder.add_64bit_uint(0xDEADBEEFDEADBEED)
builder.add_64bit_float(123.45)
builder.add_64bit_float(-123.45)
# ----------------------------------------------------------------------- #
# use that payload in the data store
# ----------------------------------------------------------------------- #
# Here we use the same reference block for each underlying store.
# ----------------------------------------------------------------------- #
block = ModbusSequentialDataBlock(1, builder.to_registers())
store = ModbusSlaveContext(di=block, co=block, hr=block, ir=block)
context = ModbusServerContext(slaves=store, single=True)
# ----------------------------------------------------------------------- #
# initialize the server information
# ----------------------------------------------------------------------- #
# If you don't set this or any fields, they are defaulted to empty strings.
# ----------------------------------------------------------------------- #
identity = ModbusDeviceIdentification()
identity.VendorName = "Pymodbus"
identity.ProductCode = "PM"
identity.VendorUrl = "http://github.com/bashwork/pymodbus/"
identity.ProductName = "Pymodbus Server"
identity.ModelName = "Pymodbus Server"
identity.MajorMinorRevision = "1.5"
# ----------------------------------------------------------------------- #
# run the server you want
# ----------------------------------------------------------------------- #
StartTcpServer(context, identity=identity, address=("localhost", 5020))
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def run_server():
# ----------------------------------------------------------------------- #
# initialize your data store
# ----------------------------------------------------------------------- #
# The datastores only respond to the addresses that they are initialized to
# Therefore, if you initialize a DataBlock to addresses of 0x00 to 0xFF, a
# request to 0x100 will respond with an invalid address exception. This is
# because many devices exhibit this kind of behavior (but not all)::
#
# block = ModbusSequentialDataBlock(0x00, [0]*0xff)
#
# Continuing, you can choose to use a sequential or a sparse DataBlock in
# your data context. The difference is that the sequential has no gaps in
# the data while the sparse can. Once again, there are devices that exhibit
# both forms of behavior::
#
# block = ModbusSparseDataBlock({0x00: 0, 0x05: 1})
# block = ModbusSequentialDataBlock(0x00, [0]*5)
#
# Alternately, you can use the factory methods to initialize the DataBlocks
# or simply do not pass them to have them initialized to 0x00 on the full
# address range::
#
# store = ModbusSlaveContext(di = ModbusSequentialDataBlock.create())
# store = ModbusSlaveContext()
#
# Finally, you are allowed to use the same DataBlock reference for every
# table or you may use a separate DataBlock for each table.
# This depends if you would like functions to be able to access and modify
# the same data or not::
#
# block = ModbusSequentialDataBlock(0x00, [0]*0xff)
# store = ModbusSlaveContext(di=block, co=block, hr=block, ir=block)
#
# The server then makes use of a server context that allows the server to
# respond with different slave contexts for different unit ids. By default
# it will return the same context for every unit id supplied (broadcast
# mode).
# However, this can be overloaded by setting the single flag to False and
# then supplying a dictionary of unit id to context mapping::
#
# slaves = {
# 0x01: ModbusSlaveContext(...),
# 0x02: ModbusSlaveContext(...),
# 0x03: ModbusSlaveContext(...),
# }
# context = ModbusServerContext(slaves=slaves, single=False)
#
# The slave context can also be initialized in zero_mode which means that a
# request to address(0-7) will map to the address (0-7). The default is
# False which is based on section 4.4 of the specification, so address(0-7)
# will map to (1-8)::
#
# store = ModbusSlaveContext(..., zero_mode=True)
# ----------------------------------------------------------------------- #
store = ModbusSlaveContext(
di=ModbusSequentialDataBlock(0, [17] * 100),
co=ModbusSequentialDataBlock(0, [17] * 100),
hr=ModbusSequentialDataBlock(0, [17] * 100),
ir=ModbusSequentialDataBlock(0, [17] * 100),
)
context = ModbusServerContext(slaves=store, single=True)
# ----------------------------------------------------------------------- #
# initialize the server information
# ----------------------------------------------------------------------- #
# If you don't set this or any fields, they are defaulted to empty strings.
# ----------------------------------------------------------------------- #
identity = ModbusDeviceIdentification()
identity.VendorName = "Pymodbus"
identity.ProductCode = "PM"
identity.VendorUrl = "http://github.com/riptideio/pymodbus/"
identity.ProductName = "Pymodbus Server"
identity.ModelName = "Pymodbus Server"
identity.MajorMinorRevision = "2.2.0"
# ----------------------------------------------------------------------- #
# run the server you want
# ----------------------------------------------------------------------- #
# Tcp:
StartTcpServer(context, identity=identity, address=("localhost", 5020))
|
def run_server():
# ----------------------------------------------------------------------- #
# initialize your data store
# ----------------------------------------------------------------------- #
# The datastores only respond to the addresses that they are initialized to
# Therefore, if you initialize a DataBlock to addresses of 0x00 to 0xFF, a
# request to 0x100 will respond with an invalid address exception. This is
# because many devices exhibit this kind of behavior (but not all)::
#
# block = ModbusSequentialDataBlock(0x00, [0]*0xff)
#
# Continuing, you can choose to use a sequential or a sparse DataBlock in
# your data context. The difference is that the sequential has no gaps in
# the data while the sparse can. Once again, there are devices that exhibit
# both forms of behavior::
#
# block = ModbusSparseDataBlock({0x00: 0, 0x05: 1})
# block = ModbusSequentialDataBlock(0x00, [0]*5)
#
# Alternately, you can use the factory methods to initialize the DataBlocks
# or simply do not pass them to have them initialized to 0x00 on the full
# address range::
#
# store = ModbusSlaveContext(di = ModbusSequentialDataBlock.create())
# store = ModbusSlaveContext()
#
# Finally, you are allowed to use the same DataBlock reference for every
# table or you may use a separate DataBlock for each table.
# This depends if you would like functions to be able to access and modify
# the same data or not::
#
# block = ModbusSequentialDataBlock(0x00, [0]*0xff)
# store = ModbusSlaveContext(di=block, co=block, hr=block, ir=block)
#
# The server then makes use of a server context that allows the server to
# respond with different slave contexts for different unit ids. By default
# it will return the same context for every unit id supplied (broadcast
# mode).
# However, this can be overloaded by setting the single flag to False and
# then supplying a dictionary of unit id to context mapping::
#
# slaves = {
# 0x01: ModbusSlaveContext(...),
# 0x02: ModbusSlaveContext(...),
# 0x03: ModbusSlaveContext(...),
# }
# context = ModbusServerContext(slaves=slaves, single=False)
#
# The slave context can also be initialized in zero_mode which means that a
# request to address(0-7) will map to the address (0-7). The default is
# False which is based on section 4.4 of the specification, so address(0-7)
# will map to (1-8)::
#
# store = ModbusSlaveContext(..., zero_mode=True)
# ----------------------------------------------------------------------- #
store = ModbusSlaveContext(
di=ModbusSequentialDataBlock(0, [17] * 100),
co=ModbusSequentialDataBlock(0, [17] * 100),
hr=ModbusSequentialDataBlock(0, [17] * 100),
ir=ModbusSequentialDataBlock(0, [17] * 100),
)
context = ModbusServerContext(slaves=store, single=True)
# ----------------------------------------------------------------------- #
# initialize the server information
# ----------------------------------------------------------------------- #
# If you don't set this or any fields, they are defaulted to empty strings.
# ----------------------------------------------------------------------- #
identity = ModbusDeviceIdentification()
identity.VendorName = "Pymodbus"
identity.ProductCode = "PM"
identity.VendorUrl = "http://github.com/riptideio/pymodbus/"
identity.ProductName = "Pymodbus Server"
identity.ModelName = "Pymodbus Server"
identity.MajorMinorRevision = "1.5"
# ----------------------------------------------------------------------- #
# run the server you want
# ----------------------------------------------------------------------- #
# Tcp:
StartTcpServer(context, identity=identity, address=("localhost", 5020))
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def run_updating_server():
# ----------------------------------------------------------------------- #
# initialize your data store
# ----------------------------------------------------------------------- #
store = ModbusSlaveContext(
di=ModbusSequentialDataBlock(0, [17] * 100),
co=ModbusSequentialDataBlock(0, [17] * 100),
hr=ModbusSequentialDataBlock(0, [17] * 100),
ir=ModbusSequentialDataBlock(0, [17] * 100),
)
context = ModbusServerContext(slaves=store, single=True)
# ----------------------------------------------------------------------- #
# initialize the server information
# ----------------------------------------------------------------------- #
identity = ModbusDeviceIdentification()
identity.VendorName = "pymodbus"
identity.ProductCode = "PM"
identity.VendorUrl = "http://github.com/bashwork/pymodbus/"
identity.ProductName = "pymodbus Server"
identity.ModelName = "pymodbus Server"
identity.MajorMinorRevision = "2.2.0"
# ----------------------------------------------------------------------- #
# run the server you want
# ----------------------------------------------------------------------- #
time = 5 # 5 seconds delay
loop = LoopingCall(f=updating_writer, a=(context,))
loop.start(time, now=False) # initially delay by time
StartTcpServer(context, identity=identity, address=("localhost", 5020))
|
def run_updating_server():
# ----------------------------------------------------------------------- #
# initialize your data store
# ----------------------------------------------------------------------- #
store = ModbusSlaveContext(
di=ModbusSequentialDataBlock(0, [17] * 100),
co=ModbusSequentialDataBlock(0, [17] * 100),
hr=ModbusSequentialDataBlock(0, [17] * 100),
ir=ModbusSequentialDataBlock(0, [17] * 100),
)
context = ModbusServerContext(slaves=store, single=True)
# ----------------------------------------------------------------------- #
# initialize the server information
# ----------------------------------------------------------------------- #
identity = ModbusDeviceIdentification()
identity.VendorName = "pymodbus"
identity.ProductCode = "PM"
identity.VendorUrl = "http://github.com/bashwork/pymodbus/"
identity.ProductName = "pymodbus Server"
identity.ModelName = "pymodbus Server"
identity.MajorMinorRevision = "1.0"
# ----------------------------------------------------------------------- #
# run the server you want
# ----------------------------------------------------------------------- #
time = 5 # 5 seconds delay
loop = LoopingCall(f=updating_writer, a=(context,))
loop.start(time, now=False) # initially delay by time
StartTcpServer(context, identity=identity, address=("localhost", 5020))
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def run_server():
# ----------------------------------------------------------------------- #
# initialize your data store
# ----------------------------------------------------------------------- #
store = ModbusSlaveContext()
context = ModbusServerContext(slaves=store, single=True)
# ----------------------------------------------------------------------- #
# initialize the server information
# ----------------------------------------------------------------------- #
# If you don't set this or any fields, they are defaulted to empty strings.
# ----------------------------------------------------------------------- #
identity = ModbusDeviceIdentification()
identity.VendorName = "Pymodbus"
identity.ProductCode = "PM"
identity.VendorUrl = "http://github.com/riptideio/pymodbus/"
identity.ProductName = "Pymodbus Server"
identity.ModelName = "Pymodbus Server"
identity.MajorMinorRevision = "2.2.0"
# ----------------------------------------------------------------------- #
# Add an example which is long enough to force the ReadDeviceInformation
# request / response to require multiple responses to send back all of the
# information.
# ----------------------------------------------------------------------- #
identity[0x80] = (
"Lorem ipsum dolor sit amet, consectetur adipiscing "
"elit. Vivamus rhoncus massa turpis, sit amet "
"ultrices orci semper ut. Aliquam tristique sapien in "
"lacus pharetra, in convallis nunc consectetur. Nunc "
"velit elit, vehicula tempus tempus sed. "
)
# ----------------------------------------------------------------------- #
# Add an example with repeated object IDs. The MODBUS specification is
# entirely silent on whether or not this is allowed. In practice, this
# should be assumed to be contrary to the MODBUS specification and other
# clients (other than pymodbus) might behave differently when presented
# with an object ID occurring twice in the returned information.
#
# Use this at your discretion, and at the very least ensure that all
# objects which share a single object ID can fit together within a single
# ADU unit. In the case of Modbus RTU, this is about 240 bytes or so. In
# other words, when the spec says "An object is indivisible, therefore
# any object must have a size consistent with the size of transaction
# response", if you use repeated OIDs, apply that rule to the entire
# grouping of objects with the repeated OID.
# ----------------------------------------------------------------------- #
identity[0x81] = [
"pymodbus {0}".format(pymodbus_version),
"pyserial {0}".format(pyserial_version),
]
# ----------------------------------------------------------------------- #
# run the server you want
# ----------------------------------------------------------------------- #
# Tcp:
StartTcpServer(context, identity=identity, address=("localhost", 5020))
|
def run_server():
# ----------------------------------------------------------------------- #
# initialize your data store
# ----------------------------------------------------------------------- #
store = ModbusSlaveContext()
context = ModbusServerContext(slaves=store, single=True)
# ----------------------------------------------------------------------- #
# initialize the server information
# ----------------------------------------------------------------------- #
# If you don't set this or any fields, they are defaulted to empty strings.
# ----------------------------------------------------------------------- #
identity = ModbusDeviceIdentification()
identity.VendorName = "Pymodbus"
identity.ProductCode = "PM"
identity.VendorUrl = "http://github.com/riptideio/pymodbus/"
identity.ProductName = "Pymodbus Server"
identity.ModelName = "Pymodbus Server"
identity.MajorMinorRevision = "1.5"
# ----------------------------------------------------------------------- #
# Add an example which is long enough to force the ReadDeviceInformation
# request / response to require multiple responses to send back all of the
# information.
# ----------------------------------------------------------------------- #
identity[0x80] = (
"Lorem ipsum dolor sit amet, consectetur adipiscing "
"elit. Vivamus rhoncus massa turpis, sit amet "
"ultrices orci semper ut. Aliquam tristique sapien in "
"lacus pharetra, in convallis nunc consectetur. Nunc "
"velit elit, vehicula tempus tempus sed. "
)
# ----------------------------------------------------------------------- #
# Add an example with repeated object IDs. The MODBUS specification is
# entirely silent on whether or not this is allowed. In practice, this
# should be assumed to be contrary to the MODBUS specification and other
# clients (other than pymodbus) might behave differently when presented
# with an object ID occurring twice in the returned information.
#
# Use this at your discretion, and at the very least ensure that all
# objects which share a single object ID can fit together within a single
# ADU unit. In the case of Modbus RTU, this is about 240 bytes or so. In
# other words, when the spec says "An object is indivisible, therefore
# any object must have a size consistent with the size of transaction
# response", if you use repeated OIDs, apply that rule to the entire
# grouping of objects with the repeated OID.
# ----------------------------------------------------------------------- #
identity[0x81] = [
"pymodbus {0}".format(pymodbus_version),
"pyserial {0}".format(pyserial_version),
]
# ----------------------------------------------------------------------- #
# run the server you want
# ----------------------------------------------------------------------- #
# Tcp:
StartTcpServer(context, identity=identity, address=("localhost", 5020))
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def execute(self, request=None):
"""
Executes a transaction
:param request: Request to be written on to the bus
:return:
"""
request.transaction_id = self.transaction.getNextTID()
def callback(*args):
LOGGER.debug("in callback - {}".format(request.transaction_id))
while True:
waiting = self.stream.connection.in_waiting
if waiting:
data = self.stream.connection.read(waiting)
LOGGER.debug("recv: " + " ".join([hex(byte2int(x)) for x in data]))
unit = self.framer.decode_data(data).get("uid", 0)
self.framer.processIncomingPacket(
data, self._handle_response, unit, tid=request.transaction_id
)
break
packet = self.framer.buildPacket(request)
LOGGER.debug("send: " + " ".join([hex(byte2int(x)) for x in packet]))
self.stream.write(packet, callback=callback)
f = self._build_response(request.transaction_id)
return f
|
def execute(self, request=None):
"""
Executes a transaction
:param request: Request to be written on to the bus
:return:
"""
request.transaction_id = self.transaction.getNextTID()
def callback(*args):
LOGGER.debug("in callback - {}".format(request.transaction_id))
while True:
waiting = self.stream.connection.in_waiting
if waiting:
data = self.stream.connection.read(waiting)
LOGGER.debug("recv: " + " ".join([hex(byte2int(x)) for x in data]))
self.framer.processIncomingPacket(
data, self._handle_response, tid=request.transaction_id
)
break
packet = self.framer.buildPacket(request)
LOGGER.debug("send: " + " ".join([hex(byte2int(x)) for x in packet]))
self.stream.write(packet, callback=callback)
f = self._build_response(request.transaction_id)
return f
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def callback(*args):
LOGGER.debug("in callback - {}".format(request.transaction_id))
while True:
waiting = self.stream.connection.in_waiting
if waiting:
data = self.stream.connection.read(waiting)
LOGGER.debug("recv: " + " ".join([hex(byte2int(x)) for x in data]))
unit = self.framer.decode_data(data).get("uid", 0)
self.framer.processIncomingPacket(
data, self._handle_response, unit, tid=request.transaction_id
)
break
|
def callback(*args):
LOGGER.debug("in callback - {}".format(request.transaction_id))
while True:
waiting = self.stream.connection.in_waiting
if waiting:
data = self.stream.connection.read(waiting)
LOGGER.debug("recv: " + " ".join([hex(byte2int(x)) for x in data]))
self.framer.processIncomingPacket(
data, self._handle_response, tid=request.transaction_id
)
break
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def __init__(self, framer, **kwargs):
"""Initialize a client instance
:param framer: The modbus framer implementation to use
"""
self.framer = framer
self.transaction = DictTransactionManager(self, **kwargs)
self._debug = False
self._debugfd = None
self.broadcast_enable = kwargs.get("broadcast_enable", Defaults.broadcast_enable)
|
def __init__(self, framer, **kwargs):
"""Initialize a client instance
:param framer: The modbus framer implementation to use
"""
self.framer = framer
if isinstance(self.framer, ModbusSocketFramer):
self.transaction = DictTransactionManager(self, **kwargs)
else:
self.transaction = FifoTransactionManager(self, **kwargs)
self._debug = False
self._debugfd = None
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def _recv(self, size):
"""Reads data from the underlying descriptor
:param size: The number of bytes to read
:return: The bytes read
"""
if not self.socket:
raise ConnectionException(self.__str__())
# socket.recv(size) waits until it gets some data from the host but
# not necessarily the entire response that can be fragmented in
# many packets.
# To avoid the splitted responses to be recognized as invalid
# messages and to be discarded, loops socket.recv until full data
# is received or timeout is expired.
# If timeout expires returns the read data, also if its length is
# less than the expected size.
self.socket.setblocking(0)
timeout = self.timeout
# If size isn't specified read 1 byte at a time.
if size is None:
recv_size = 1
else:
recv_size = size
data = b""
time_ = time.time()
end = time_ + timeout
while recv_size > 0:
ready = select.select([self.socket], [], [], end - time_)
if ready[0]:
data += self.socket.recv(recv_size)
time_ = time.time()
# If size isn't specified continue to read until timeout expires.
if size:
recv_size = size - len(data)
# Timeout is reduced also if some data has been received in order
# to avoid infinite loops when there isn't an expected response
# size and the slave sends noisy data continuosly.
if time_ > end:
break
return data
|
def _recv(self, size):
"""Reads data from the underlying descriptor
:param size: The number of bytes to read
:return: The bytes read
"""
if not self.socket:
raise ConnectionException(self.__str__())
# socket.recv(size) waits until it gets some data from the host but
# not necessarily the entire response that can be fragmented in
# many packets.
# To avoid the splitted responses to be recognized as invalid
# messages and to be discarded, loops socket.recv until full data
# is received or timeout is expired.
# If timeout expires returns the read data, also if its length is
# less than the expected size.
self.socket.setblocking(0)
timeout = self.timeout
# If size isn't specified read 1 byte at a time.
if size is None:
recv_size = 1
else:
recv_size = size
data = b""
begin = time.time()
while recv_size > 0:
ready = select.select([self.socket], [], [], timeout)
if ready[0]:
data += self.socket.recv(recv_size)
# If size isn't specified continue to read until timeout expires.
if size:
recv_size = size - len(data)
# Timeout is reduced also if some data has been received in order
# to avoid infinite loops when there isn't an expected response size
# and the slave sends noisy data continuosly.
timeout -= time.time() - begin
if timeout <= 0:
break
return data
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def connect(self):
"""Connect to the modbus serial server
:returns: True if connection succeeded, False otherwise
"""
if self.socket:
return True
try:
self.socket = serial.Serial(
port=self.port,
timeout=self.timeout,
bytesize=self.bytesize,
stopbits=self.stopbits,
baudrate=self.baudrate,
parity=self.parity,
)
if self.method == "rtu":
if self._strict:
self.socket.interCharTimeout = self.inter_char_timeout
self.last_frame_end = None
except serial.SerialException as msg:
_logger.error(msg)
self.close()
return self.socket is not None
|
def connect(self):
"""Connect to the modbus serial server
:returns: True if connection succeeded, False otherwise
"""
if self.socket:
return True
try:
self.socket = serial.Serial(
port=self.port,
timeout=self.timeout,
bytesize=self.bytesize,
stopbits=self.stopbits,
baudrate=self.baudrate,
parity=self.parity,
)
except serial.SerialException as msg:
_logger.error(msg)
self.close()
if self.method == "rtu":
if self._strict and self.socket:
self.socket.interCharTimeout = self.inter_char_timeout
self.last_frame_end = None
return self.socket is not None
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def validate(self, fx, address, count=1):
"""Validates the request to make sure it is in range
:param fx: The function we are working with
:param address: The starting address
:param count: The number of values to test
:returns: True if the request in within range, False otherwise
"""
if not self.zero_mode:
address = address + 1
_logger.debug("validate: fc-[%d] address-%d: count-%d" % (fx, address, count))
return self.store[self.decode(fx)].validate(address, count)
|
def validate(self, fx, address, count=1):
"""Validates the request to make sure it is in range
:param fx: The function we are working with
:param address: The starting address
:param count: The number of values to test
:returns: True if the request in within range, False otherwise
"""
if not self.zero_mode:
address = address + 1
_logger.debug("validate[%d] %d:%d" % (fx, address, count))
return self.store[self.decode(fx)].validate(address, count)
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def getValues(self, fx, address, count=1):
"""Get `count` values from datastore
:param fx: The function we are working with
:param address: The starting address
:param count: The number of values to retrieve
:returns: The requested values from a:a+c
"""
if not self.zero_mode:
address = address + 1
_logger.debug("getValues fc-[%d] address-%d: count-%d" % (fx, address, count))
return self.store[self.decode(fx)].getValues(address, count)
|
def getValues(self, fx, address, count=1):
"""Get `count` values from datastore
:param fx: The function we are working with
:param address: The starting address
:param count: The number of values to retrieve
:returns: The requested values from a:a+c
"""
if not self.zero_mode:
address = address + 1
_logger.debug("getValues[%d] %d:%d" % (fx, address, count))
return self.store[self.decode(fx)].getValues(address, count)
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def __init__(self, *args, **kwargs):
"""Initializes the datastores
:param kwargs: Each element is a ModbusDataBlock
"""
self.table = kwargs.get("table", "pymodbus")
self.database = kwargs.get("database", "sqlite:///pymodbus.db")
self._db_create(self.table, self.database)
|
def __init__(self, *args, **kwargs):
"""Initializes the datastores
:param kwargs: Each element is a ModbusDataBlock
"""
self.table = kwargs.get("table", "pymodbus")
self.database = kwargs.get("database", "sqlite:///pymodbus.db")
self._db_create(self.table, self.database)
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def __str__(self):
"""Returns a string representation of the context
:returns: A string representation of the context
"""
return "Modbus Slave Context"
|
def __str__(self):
"""Returns a string representation of the context
:returns: A string representation of the context
"""
return "Modbus Slave Context"
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def reset(self):
"""Resets all the datastores to their default values"""
self._metadata.drop_all()
self._db_create(self.table, self.database)
|
def reset(self):
"""Resets all the datastores to their default values"""
self._metadata.drop_all()
self._db_create(self.table, self.database)
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def validate(self, fx, address, count=1):
"""Validates the request to make sure it is in range
:param fx: The function we are working with
:param address: The starting address
:param count: The number of values to test
:returns: True if the request in within range, False otherwise
"""
address = address + 1 # section 4.4 of specification
_logger.debug("validate[%d] %d:%d" % (fx, address, count))
return self._validate(self.decode(fx), address, count)
|
def validate(self, fx, address, count=1):
"""Validates the request to make sure it is in range
:param fx: The function we are working with
:param address: The starting address
:param count: The number of values to test
:returns: True if the request in within range, False otherwise
"""
address = address + 1 # section 4.4 of specification
_logger.debug("validate[%d] %d:%d" % (fx, address, count))
return self._validate(self.decode(fx), address, count)
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def getValues(self, fx, address, count=1):
"""Get `count` values from datastore
:param fx: The function we are working with
:param address: The starting address
:param count: The number of values to retrieve
:returns: The requested values from a:a+c
"""
address = address + 1 # section 4.4 of specification
_logger.debug("get-values[%d] %d:%d" % (fx, address, count))
return self._get(self.decode(fx), address, count)
|
def getValues(self, fx, address, count=1):
"""Get `count` values from datastore
:param fx: The function we are working with
:param address: The starting address
:param count: The number of values to retrieve
:returns: The requested values from a:a+c
"""
address = address + 1 # section 4.4 of specification
_logger.debug("get-values[%d] %d:%d" % (fx, address, count))
return self._get(self.decode(fx), address, count)
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def setValues(self, fx, address, values, update=True):
"""Sets the datastore with the supplied values
:param fx: The function we are working with
:param address: The starting address
:param values: The new values to be set
:param update: Update existing register in the db
"""
address = address + 1 # section 4.4 of specification
_logger.debug("set-values[%d] %d:%d" % (fx, address, len(values)))
if update:
self._update(self.decode(fx), address, values)
else:
self._set(self.decode(fx), address, values)
|
def setValues(self, fx, address, values):
"""Sets the datastore with the supplied values
:param fx: The function we are working with
:param address: The starting address
:param values: The new values to be set
"""
address = address + 1 # section 4.4 of specification
_logger.debug("set-values[%d] %d:%d" % (fx, address, len(values)))
self._set(self.decode(fx), address, values)
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def _db_create(self, table, database):
"""A helper method to initialize the database and handles
:param table: The table name to create
:param database: The database uri to use
"""
self._engine = sqlalchemy.create_engine(database, echo=False)
self._metadata = sqlalchemy.MetaData(self._engine)
self._table = sqlalchemy.Table(
table,
self._metadata,
sqlalchemy.Column("type", sqltypes.String(1)),
sqlalchemy.Column("index", sqltypes.Integer),
sqlalchemy.Column("value", sqltypes.Integer),
UniqueConstraint("type", "index", name="key"),
)
self._table.create(checkfirst=True)
self._connection = self._engine.connect()
|
def _db_create(self, table, database):
"""A helper method to initialize the database and handles
:param table: The table name to create
:param database: The database uri to use
"""
self._engine = sqlalchemy.create_engine(database, echo=False)
self._metadata = sqlalchemy.MetaData(self._engine)
self._table = sqlalchemy.Table(
table,
self._metadata,
sqlalchemy.Column("type", sqltypes.String(1)),
sqlalchemy.Column("index", sqltypes.Integer),
sqlalchemy.Column("value", sqltypes.Integer),
UniqueConstraint("type", "index", name="key"),
)
self._table.create(checkfirst=True)
self._connection = self._engine.connect()
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def _get(self, type, offset, count):
"""
:param type: The key prefix to use
:param offset: The address offset to start at
:param count: The number of bits to read
:returns: The resulting values
"""
query = self._table.select(
and_(
self._table.c.type == type,
self._table.c.index >= offset,
self._table.c.index <= offset + count - 1,
)
)
query = query.order_by(self._table.c.index.asc())
result = self._connection.execute(query).fetchall()
return [row.value for row in result]
|
def _get(self, type, offset, count):
"""
:param type: The key prefix to use
:param offset: The address offset to start at
:param count: The number of bits to read
:returns: The resulting values
"""
query = self._table.select(
and_(
self._table.c.type == type,
self._table.c.index >= offset,
self._table.c.index <= offset + count - 1,
)
)
query = query.order_by(self._table.c.index.asc())
result = self._connection.execute(query).fetchall()
return [row.value for row in result]
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def _build_set(self, type, offset, values, prefix=""):
"""A helper method to generate the sql update context
:param type: The key prefix to use
:param offset: The address offset to start at
:param values: The values to set
:param prefix: Prefix fields index and type, defaults to empty string
"""
result = []
for index, value in enumerate(values):
result.append(
{prefix + "type": type, prefix + "index": offset + index, "value": value}
)
return result
|
def _build_set(self, type, offset, values, prefix=""):
"""A helper method to generate the sql update context
:param type: The key prefix to use
:param offset: The address offset to start at
:param values: The values to set
:param prefix: Prefix fields index and type, defaults to empty string
"""
result = []
for index, value in enumerate(values):
result.append(
{prefix + "type": type, prefix + "index": offset + index, "value": value}
)
return result
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def _set(self, type, offset, values):
"""
:param key: The type prefix to use
:param offset: The address offset to start at
:param values: The values to set
"""
if self._check(type, offset, values):
context = self._build_set(type, offset, values)
query = self._table.insert()
result = self._connection.execute(query, context)
return result.rowcount == len(values)
else:
return False
|
def _set(self, type, offset, values):
"""
:param key: The type prefix to use
:param offset: The address offset to start at
:param values: The values to set
"""
if self._check(type, offset, values):
context = self._build_set(type, offset, values)
query = self._table.insert()
result = self._connection.execute(query, context)
return result.rowcount == len(values)
else:
return False
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def _update(self, type, offset, values):
"""
:param type: The type prefix to use
:param offset: The address offset to start at
:param values: The values to set
"""
context = self._build_set(type, offset, values, prefix="x_")
query = self._table.update().values(value="value")
query = query.where(
and_(
self._table.c.type == bindparam("x_type"),
self._table.c.index == bindparam("x_index"),
)
)
result = self._connection.execute(query, context)
return result.rowcount == len(values)
|
def _update(self, type, offset, values):
"""
:param type: The type prefix to use
:param offset: The address offset to start at
:param values: The values to set
"""
context = self._build_set(type, offset, values, prefix="x_")
query = self._table.update().values(name="value")
query = query.where(
and_(
self._table.c.type == bindparam("x_type"),
self._table.c.index == bindparam("x_index"),
)
)
result = self._connection.execute(query, context)
return result.rowcount == len(values)
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def _validate(self, type, offset, count):
"""
:param key: The key prefix to use
:param offset: The address offset to start at
:param count: The number of bits to read
:returns: The result of the validation
"""
query = self._table.select(
and_(
self._table.c.type == type,
self._table.c.index >= offset,
self._table.c.index <= offset + count - 1,
)
)
result = self._connection.execute(query).fetchall()
return len(result) == count
|
def _validate(self, type, offset, count):
"""
:param key: The key prefix to use
:param offset: The address offset to start at
:param count: The number of bits to read
:returns: The result of the validation
"""
query = self._table.select(
and_(
self._table.c.type == type,
self._table.c.index >= offset,
self._table.c.index <= offset + count - 1,
)
)
result = self._connection.execute(query).fetchall()
return len(result) == count
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def __init__(self, string):
"""Initialize the exception
:param string: The message to append to the error
"""
self.string = string
|
def __init__(self, string):
"""Initialize the exception
:param string: The message to append to the error
"""
self.string = string
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def __init__(self, string="", function_code=None):
"""Initialize the exception
:param string: The message to append to the error
"""
self.fcode = function_code
self.message = "[Input/Output] %s" % string
ModbusException.__init__(self, self.message)
|
def __init__(self, string="", function_code=None):
"""Initialize the exception
:param string: The message to append to the error
"""
self.fcode = function_code
self.message = "[Input/Output] %s" % string
ModbusException.__init__(self, self.message)
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def __init__(self, string=""):
"""Initialize the exception
:param string: The message to append to the error
"""
message = "[Invalid Parameter] %s" % string
ModbusException.__init__(self, message)
|
def __init__(self, string=""):
"""Initialize the exception
:param string: The message to append to the error
"""
message = "[Invalid Parameter] %s" % string
ModbusException.__init__(self, message)
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def __init__(self, string=""):
"""Initialize the exception
:param string: The message to append to the error
"""
message = "[No Such Slave] %s" % string
ModbusException.__init__(self, message)
|
def __init__(self, string=""):
"""Initialize the exception
:param string: The message to append to the error
"""
message = "[No Such Slave] %s" % string
ModbusException.__init__(self, message)
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def __init__(self, string=""):
"""Initialize the exception
:param string: The message to append to the error
"""
message = "[Not Implemented] %s" % string
ModbusException.__init__(self, message)
|
def __init__(self, string=""):
"""Initialize the exception
:param string: The message to append to the error
"""
message = "[Not Implemented] %s" % string
ModbusException.__init__(self, message)
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def __init__(self, string=""):
"""Initialize the exception
:param string: The message to append to the error
"""
message = "[Connection] %s" % string
ModbusException.__init__(self, message)
|
def __init__(self, string=""):
"""Initialize the exception
:param string: The message to append to the error
"""
message = "[Connection] %s" % string
ModbusException.__init__(self, message)
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def __init__(self, string=""):
"""Initialize the exception
:param string: The message to append to the error
"""
message = "[Invalid Message] %s" % string
ModbusException.__init__(self, message)
|
def __init__(self, string=""):
"""Initialize the exception
:param string: The message to append to the error
"""
message = "[Invalid Message] %s" % string
ModbusException.__init__(self, message)
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def __init__(self):
"""Initializes the client lookup tables"""
functions = set(f.function_code for f in self.__function_table)
self.__lookup = dict([(f.function_code, f) for f in self.__function_table])
self.__sub_lookup = dict((f, {}) for f in functions)
for f in self.__sub_function_table:
self.__sub_lookup[f.function_code][f.sub_function_code] = f
|
def __init__(self):
"""Initializes the client lookup tables"""
functions = set(f.function_code for f in self.__function_table)
self.__lookup = dict([(f.function_code, f) for f in self.__function_table])
self.__sub_lookup = dict((f, {}) for f in functions)
for f in self.__sub_function_table:
self.__sub_lookup[f.function_code][f.sub_function_code] = f
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def decode(self, message):
"""Wrapper to decode a request packet
:param message: The raw modbus request packet
:return: The decoded modbus message or None if error
"""
try:
return self._helper(message)
except ModbusException as er:
_logger.warning("Unable to decode request %s" % er)
return None
|
def decode(self, message):
"""Wrapper to decode a request packet
:param message: The raw modbus request packet
:return: The decoded modbus message or None if error
"""
try:
return self._helper(message)
except ModbusException as er:
_logger.warning("Unable to decode request %s" % er)
return None
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def lookupPduClass(self, function_code):
"""Use `function_code` to determine the class of the PDU.
:param function_code: The function code specified in a frame.
:returns: The class of the PDU that has a matching `function_code`.
"""
return self.__lookup.get(function_code, ExceptionResponse)
|
def lookupPduClass(self, function_code):
"""Use `function_code` to determine the class of the PDU.
:param function_code: The function code specified in a frame.
:returns: The class of the PDU that has a matching `function_code`.
"""
return self.__lookup.get(function_code, ExceptionResponse)
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def _helper(self, data):
"""
This factory is used to generate the correct request object
from a valid request packet. This decodes from a list of the
currently implemented request types.
:param data: The request packet to decode
:returns: The decoded request or illegal function request object
"""
function_code = byte2int(data[0])
request = self.__lookup.get(function_code, lambda: None)()
if not request:
_logger.debug("Factory Request[%d]" % function_code)
request = IllegalFunctionRequest(function_code)
else:
fc_string = "%s: %s" % (
str(self.__lookup[function_code]).split(".")[-1].rstrip("'>"),
function_code,
)
_logger.debug("Factory Request[%s]" % fc_string)
request.decode(data[1:])
if hasattr(request, "sub_function_code"):
lookup = self.__sub_lookup.get(request.function_code, {})
subtype = lookup.get(request.sub_function_code, None)
if subtype:
request.__class__ = subtype
return request
|
def _helper(self, data):
"""
This factory is used to generate the correct request object
from a valid request packet. This decodes from a list of the
currently implemented request types.
:param data: The request packet to decode
:returns: The decoded request or illegal function request object
"""
function_code = byte2int(data[0])
_logger.debug("Factory Request[%d]" % function_code)
request = self.__lookup.get(function_code, lambda: None)()
if not request:
request = IllegalFunctionRequest(function_code)
request.decode(data[1:])
if hasattr(request, "sub_function_code"):
lookup = self.__sub_lookup.get(request.function_code, {})
subtype = lookup.get(request.sub_function_code, None)
if subtype:
request.__class__ = subtype
return request
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def decode(self, message):
"""Wrapper to decode a response packet
:param message: The raw packet to decode
:return: The decoded modbus message or None if error
"""
try:
return self._helper(message)
except ModbusException as er:
_logger.error("Unable to decode response %s" % er)
except Exception as ex:
_logger.error(ex)
return None
|
def decode(self, message):
"""Wrapper to decode a response packet
:param message: The raw packet to decode
:return: The decoded modbus message or None if error
"""
try:
return self._helper(message)
except ModbusException as er:
_logger.error("Unable to decode response %s" % er)
except Exception as ex:
_logger.error(ex)
return None
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def _helper(self, data):
"""
This factory is used to generate the correct response object
from a valid response packet. This decodes from a list of the
currently implemented request types.
:param data: The response packet to decode
:returns: The decoded request or an exception response object
"""
fc_string = function_code = byte2int(data[0])
if function_code in self.__lookup:
fc_string = "%s: %s" % (
str(self.__lookup[function_code]).split(".")[-1].rstrip("'>"),
function_code,
)
_logger.debug("Factory Response[%s]" % fc_string)
response = self.__lookup.get(function_code, lambda: None)()
if function_code > 0x80:
code = function_code & 0x7F # strip error portion
response = ExceptionResponse(code, ecode.IllegalFunction)
if not response:
raise ModbusException("Unknown response %d" % function_code)
response.decode(data[1:])
if hasattr(response, "sub_function_code"):
lookup = self.__sub_lookup.get(response.function_code, {})
subtype = lookup.get(response.sub_function_code, None)
if subtype:
response.__class__ = subtype
return response
|
def _helper(self, data):
"""
This factory is used to generate the correct response object
from a valid response packet. This decodes from a list of the
currently implemented request types.
:param data: The response packet to decode
:returns: The decoded request or an exception response object
"""
fc_string = function_code = byte2int(data[0])
if function_code in self.__lookup:
fc_string = "%s: %s" % (
str(self.__lookup[function_code]).split(".")[-1].rstrip("'>"),
function_code,
)
_logger.debug("Factory Response[%s]" % fc_string)
response = self.__lookup.get(function_code, lambda: None)()
if function_code > 0x80:
code = function_code & 0x7F # strip error portion
response = ExceptionResponse(code, ecode.IllegalFunction)
if not response:
raise ModbusException("Unknown response %d" % function_code)
response.decode(data[1:])
if hasattr(response, "sub_function_code"):
lookup = self.__sub_lookup.get(response.function_code, {})
subtype = lookup.get(response.sub_function_code, None)
if subtype:
response.__class__ = subtype
return response
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def checkFrame(self):
"""
Check if the next frame is available.
Return True if we were successful.
1. Populate header
2. Discard frame if UID does not match
"""
try:
self.populateHeader()
frame_size = self._header["len"]
data = self._buffer[: frame_size - 2]
crc = self._buffer[frame_size - 2 : frame_size]
crc_val = (byte2int(crc[0]) << 8) + byte2int(crc[1])
if checkCRC(data, crc_val):
return True
else:
_logger.debug("CRC invalid, discarding header!!")
self.resetFrame()
return False
except (IndexError, KeyError, struct.error):
return False
|
def checkFrame(self):
"""
Check if the next frame is available.
Return True if we were successful.
1. Populate header
2. Discard frame if UID does not match
"""
try:
self.populateHeader()
frame_size = self._header["len"]
data = self._buffer[: frame_size - 2]
crc = self._buffer[frame_size - 2 : frame_size]
crc_val = (byte2int(crc[0]) << 8) + byte2int(crc[1])
return checkCRC(data, crc_val)
except (IndexError, KeyError, struct.error):
return False
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def populateResult(self, result):
"""
Populates the modbus result header
The serial packets do not have any header information
that is copied.
:param result: The response packet
"""
result.unit_id = self._header["uid"]
result.transaction_id = self._header["uid"]
|
def populateResult(self, result):
"""
Populates the modbus result header
The serial packets do not have any header information
that is copied.
:param result: The response packet
"""
result.unit_id = self._header["uid"]
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def processIncomingPacket(self, data, callback, unit, **kwargs):
"""
The new packet processing pattern
This takes in a new request packet, adds it to the current
packet stream, and performs framing on it. That is, checks
for complete messages, and once found, will process all that
exist. This handles the case when we read N + 1 or 1 // N
messages at a time instead of 1.
The processed and decoded messages are pushed to the callback
function to process and send.
:param data: The new packet data
:param callback: The function to send results to
:param unit: Process if unit id matches, ignore otherwise (could be a
list of unit ids (server) or single unit id(client/server)
:param single: True or False (If True, ignore unit address validation)
"""
if not isinstance(unit, (list, tuple)):
unit = [unit]
self.addToFrame(data)
single = kwargs.get("single", False)
if self.isFrameReady():
if self.checkFrame():
if self._validate_unit_id(unit, single):
self._process(callback)
else:
_logger.debug(
"Not a valid unit id - {}, ignoring!!".format(self._header["uid"])
)
self.resetFrame()
else:
_logger.debug("Frame check failed, ignoring!!")
self.resetFrame()
else:
_logger.debug("Frame - [{}] not ready".format(data))
|
def processIncomingPacket(self, data, callback, unit, **kwargs):
"""
The new packet processing pattern
This takes in a new request packet, adds it to the current
packet stream, and performs framing on it. That is, checks
for complete messages, and once found, will process all that
exist. This handles the case when we read N + 1 or 1 // N
messages at a time instead of 1.
The processed and decoded messages are pushed to the callback
function to process and send.
:param data: The new packet data
:param callback: The function to send results to
:param unit: Process if unit id matches, ignore otherwise (could be a
list of unit ids (server) or single unit id(client/server)
:param single: True or False (If True, ignore unit address validation)
"""
if not isinstance(unit, (list, tuple)):
unit = [unit]
self.addToFrame(data)
single = kwargs.get("single", False)
if self.isFrameReady():
if self.checkFrame():
if self._validate_unit_id(unit, single):
self._process(callback)
else:
_logger.debug(
"Not a valid unit id - {}, ignoring!!".format(self._header["uid"])
)
self.resetFrame()
else:
_logger.debug("Frame - [{}] not ready".format(data))
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def buildPacket(self, message):
"""
Creates a ready to send modbus packet
:param message: The populated request/response to send
"""
data = message.encode()
packet = (
struct.pack(RTU_FRAME_HEADER, message.unit_id, message.function_code) + data
)
packet += struct.pack(">H", computeCRC(packet))
message.transaction_id = (
message.unit_id
) # Ensure that transaction is actually the unit id for serial comms
return packet
|
def buildPacket(self, message):
"""
Creates a ready to send modbus packet
:param message: The populated request/response to send
"""
data = message.encode()
packet = (
struct.pack(RTU_FRAME_HEADER, message.unit_id, message.function_code) + data
)
packet += struct.pack(">H", computeCRC(packet))
return packet
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def __new__(cls, *args, **kwargs):
"""Create a new instance"""
if "_inst" not in vars(cls):
cls._inst = object.__new__(cls)
return cls._inst
|
def __new__(cls, *args, **kwargs):
"""Create a new instance"""
if "_inst" not in vars(cls):
cls._inst = object.__new__(cls)
return cls._inst
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
def decode(self, message):
"""Wrapper to decode a given packet
:param message: The raw modbus request packet
:return: The decoded modbus message or None if error
"""
raise NotImplementedException("Method not implemented by derived class")
|
def decode(self, message):
"""Wrapper to decode a given packet
:param message: The raw modbus request packet
:return: The decoded modbus message or None if error
"""
raise NotImplementedException("Method not implemented by derived class")
|
https://github.com/riptideio/pymodbus/issues/377
|
client = ModbusSerialClient(port='notexist', method='rtu')
client.connect()
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "lib/python3.6/site-packages/pymodbus/client/sync.py", line 476, in connect
self.socket.interCharTimeout = self.inter_char_timeout
AttributeError: 'NoneType' object has no attribute 'interCharTimeout'
|
AttributeError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.