repo
stringlengths 7
55
| path
stringlengths 4
223
| func_name
stringlengths 1
134
| original_string
stringlengths 75
104k
| language
stringclasses 1
value | code
stringlengths 75
104k
| code_tokens
listlengths 19
28.4k
| docstring
stringlengths 1
46.9k
| docstring_tokens
listlengths 1
1.97k
| sha
stringlengths 40
40
| url
stringlengths 87
315
| partition
stringclasses 1
value |
|---|---|---|---|---|---|---|---|---|---|---|---|
djgagne/hagelslag
|
hagelslag/processing/ObjectMatcher.py
|
ellipse_distance
|
def ellipse_distance(item_a, time_a, item_b, time_b, max_value):
"""
Calculate differences in the properties of ellipses fitted to each object.
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
ts = np.array([0, np.pi])
ell_a = item_a.get_ellipse_model(time_a)
ell_b = item_b.get_ellipse_model(time_b)
ends_a = ell_a.predict_xy(ts)
ends_b = ell_b.predict_xy(ts)
distances = np.sqrt((ends_a[:, 0:1] - ends_b[:, 0:1].T) ** 2 + (ends_a[:, 1:] - ends_b[:, 1:].T) ** 2)
return np.minimum(distances[0, 1], max_value) / float(max_value)
|
python
|
def ellipse_distance(item_a, time_a, item_b, time_b, max_value):
"""
Calculate differences in the properties of ellipses fitted to each object.
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
ts = np.array([0, np.pi])
ell_a = item_a.get_ellipse_model(time_a)
ell_b = item_b.get_ellipse_model(time_b)
ends_a = ell_a.predict_xy(ts)
ends_b = ell_b.predict_xy(ts)
distances = np.sqrt((ends_a[:, 0:1] - ends_b[:, 0:1].T) ** 2 + (ends_a[:, 1:] - ends_b[:, 1:].T) ** 2)
return np.minimum(distances[0, 1], max_value) / float(max_value)
|
[
"def",
"ellipse_distance",
"(",
"item_a",
",",
"time_a",
",",
"item_b",
",",
"time_b",
",",
"max_value",
")",
":",
"ts",
"=",
"np",
".",
"array",
"(",
"[",
"0",
",",
"np",
".",
"pi",
"]",
")",
"ell_a",
"=",
"item_a",
".",
"get_ellipse_model",
"(",
"time_a",
")",
"ell_b",
"=",
"item_b",
".",
"get_ellipse_model",
"(",
"time_b",
")",
"ends_a",
"=",
"ell_a",
".",
"predict_xy",
"(",
"ts",
")",
"ends_b",
"=",
"ell_b",
".",
"predict_xy",
"(",
"ts",
")",
"distances",
"=",
"np",
".",
"sqrt",
"(",
"(",
"ends_a",
"[",
":",
",",
"0",
":",
"1",
"]",
"-",
"ends_b",
"[",
":",
",",
"0",
":",
"1",
"]",
".",
"T",
")",
"**",
"2",
"+",
"(",
"ends_a",
"[",
":",
",",
"1",
":",
"]",
"-",
"ends_b",
"[",
":",
",",
"1",
":",
"]",
".",
"T",
")",
"**",
"2",
")",
"return",
"np",
".",
"minimum",
"(",
"distances",
"[",
"0",
",",
"1",
"]",
",",
"max_value",
")",
"/",
"float",
"(",
"max_value",
")"
] |
Calculate differences in the properties of ellipses fitted to each object.
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
|
[
"Calculate",
"differences",
"in",
"the",
"properties",
"of",
"ellipses",
"fitted",
"to",
"each",
"object",
"."
] |
6fb6c3df90bf4867e13a97d3460b14471d107df1
|
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/ObjectMatcher.py#L311-L331
|
train
|
djgagne/hagelslag
|
hagelslag/processing/ObjectMatcher.py
|
nonoverlap
|
def nonoverlap(item_a, time_a, item_b, time_b, max_value):
"""
Percentage of pixels in each object that do not overlap with the other object
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
return np.minimum(1 - item_a.count_overlap(time_a, item_b, time_b), max_value) / float(max_value)
|
python
|
def nonoverlap(item_a, time_a, item_b, time_b, max_value):
"""
Percentage of pixels in each object that do not overlap with the other object
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
return np.minimum(1 - item_a.count_overlap(time_a, item_b, time_b), max_value) / float(max_value)
|
[
"def",
"nonoverlap",
"(",
"item_a",
",",
"time_a",
",",
"item_b",
",",
"time_b",
",",
"max_value",
")",
":",
"return",
"np",
".",
"minimum",
"(",
"1",
"-",
"item_a",
".",
"count_overlap",
"(",
"time_a",
",",
"item_b",
",",
"time_b",
")",
",",
"max_value",
")",
"/",
"float",
"(",
"max_value",
")"
] |
Percentage of pixels in each object that do not overlap with the other object
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
|
[
"Percentage",
"of",
"pixels",
"in",
"each",
"object",
"that",
"do",
"not",
"overlap",
"with",
"the",
"other",
"object"
] |
6fb6c3df90bf4867e13a97d3460b14471d107df1
|
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/ObjectMatcher.py#L334-L348
|
train
|
djgagne/hagelslag
|
hagelslag/processing/ObjectMatcher.py
|
max_intensity
|
def max_intensity(item_a, time_a, item_b, time_b, max_value):
"""
RMS difference in maximum intensity
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
intensity_a = item_a.max_intensity(time_a)
intensity_b = item_b.max_intensity(time_b)
diff = np.sqrt((intensity_a - intensity_b) ** 2)
return np.minimum(diff, max_value) / float(max_value)
|
python
|
def max_intensity(item_a, time_a, item_b, time_b, max_value):
"""
RMS difference in maximum intensity
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
intensity_a = item_a.max_intensity(time_a)
intensity_b = item_b.max_intensity(time_b)
diff = np.sqrt((intensity_a - intensity_b) ** 2)
return np.minimum(diff, max_value) / float(max_value)
|
[
"def",
"max_intensity",
"(",
"item_a",
",",
"time_a",
",",
"item_b",
",",
"time_b",
",",
"max_value",
")",
":",
"intensity_a",
"=",
"item_a",
".",
"max_intensity",
"(",
"time_a",
")",
"intensity_b",
"=",
"item_b",
".",
"max_intensity",
"(",
"time_b",
")",
"diff",
"=",
"np",
".",
"sqrt",
"(",
"(",
"intensity_a",
"-",
"intensity_b",
")",
"**",
"2",
")",
"return",
"np",
".",
"minimum",
"(",
"diff",
",",
"max_value",
")",
"/",
"float",
"(",
"max_value",
")"
] |
RMS difference in maximum intensity
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
|
[
"RMS",
"difference",
"in",
"maximum",
"intensity"
] |
6fb6c3df90bf4867e13a97d3460b14471d107df1
|
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/ObjectMatcher.py#L351-L368
|
train
|
djgagne/hagelslag
|
hagelslag/processing/ObjectMatcher.py
|
area_difference
|
def area_difference(item_a, time_a, item_b, time_b, max_value):
"""
RMS Difference in object areas.
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
size_a = item_a.size(time_a)
size_b = item_b.size(time_b)
diff = np.sqrt((size_a - size_b) ** 2)
return np.minimum(diff, max_value) / float(max_value)
|
python
|
def area_difference(item_a, time_a, item_b, time_b, max_value):
"""
RMS Difference in object areas.
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
size_a = item_a.size(time_a)
size_b = item_b.size(time_b)
diff = np.sqrt((size_a - size_b) ** 2)
return np.minimum(diff, max_value) / float(max_value)
|
[
"def",
"area_difference",
"(",
"item_a",
",",
"time_a",
",",
"item_b",
",",
"time_b",
",",
"max_value",
")",
":",
"size_a",
"=",
"item_a",
".",
"size",
"(",
"time_a",
")",
"size_b",
"=",
"item_b",
".",
"size",
"(",
"time_b",
")",
"diff",
"=",
"np",
".",
"sqrt",
"(",
"(",
"size_a",
"-",
"size_b",
")",
"**",
"2",
")",
"return",
"np",
".",
"minimum",
"(",
"diff",
",",
"max_value",
")",
"/",
"float",
"(",
"max_value",
")"
] |
RMS Difference in object areas.
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
|
[
"RMS",
"Difference",
"in",
"object",
"areas",
"."
] |
6fb6c3df90bf4867e13a97d3460b14471d107df1
|
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/ObjectMatcher.py#L371-L388
|
train
|
djgagne/hagelslag
|
hagelslag/processing/ObjectMatcher.py
|
mean_minimum_centroid_distance
|
def mean_minimum_centroid_distance(item_a, item_b, max_value):
"""
RMS difference in the minimum distances from the centroids of one track to the centroids of another track
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
centroids_a = np.array([item_a.center_of_mass(t) for t in item_a.times])
centroids_b = np.array([item_b.center_of_mass(t) for t in item_b.times])
distance_matrix = (centroids_a[:, 0:1] - centroids_b.T[0:1]) ** 2 + (centroids_a[:, 1:] - centroids_b.T[1:]) ** 2
mean_min_distances = np.sqrt(distance_matrix.min(axis=0).mean() + distance_matrix.min(axis=1).mean())
return np.minimum(mean_min_distances, max_value) / float(max_value)
|
python
|
def mean_minimum_centroid_distance(item_a, item_b, max_value):
"""
RMS difference in the minimum distances from the centroids of one track to the centroids of another track
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
centroids_a = np.array([item_a.center_of_mass(t) for t in item_a.times])
centroids_b = np.array([item_b.center_of_mass(t) for t in item_b.times])
distance_matrix = (centroids_a[:, 0:1] - centroids_b.T[0:1]) ** 2 + (centroids_a[:, 1:] - centroids_b.T[1:]) ** 2
mean_min_distances = np.sqrt(distance_matrix.min(axis=0).mean() + distance_matrix.min(axis=1).mean())
return np.minimum(mean_min_distances, max_value) / float(max_value)
|
[
"def",
"mean_minimum_centroid_distance",
"(",
"item_a",
",",
"item_b",
",",
"max_value",
")",
":",
"centroids_a",
"=",
"np",
".",
"array",
"(",
"[",
"item_a",
".",
"center_of_mass",
"(",
"t",
")",
"for",
"t",
"in",
"item_a",
".",
"times",
"]",
")",
"centroids_b",
"=",
"np",
".",
"array",
"(",
"[",
"item_b",
".",
"center_of_mass",
"(",
"t",
")",
"for",
"t",
"in",
"item_b",
".",
"times",
"]",
")",
"distance_matrix",
"=",
"(",
"centroids_a",
"[",
":",
",",
"0",
":",
"1",
"]",
"-",
"centroids_b",
".",
"T",
"[",
"0",
":",
"1",
"]",
")",
"**",
"2",
"+",
"(",
"centroids_a",
"[",
":",
",",
"1",
":",
"]",
"-",
"centroids_b",
".",
"T",
"[",
"1",
":",
"]",
")",
"**",
"2",
"mean_min_distances",
"=",
"np",
".",
"sqrt",
"(",
"distance_matrix",
".",
"min",
"(",
"axis",
"=",
"0",
")",
".",
"mean",
"(",
")",
"+",
"distance_matrix",
".",
"min",
"(",
"axis",
"=",
"1",
")",
".",
"mean",
"(",
")",
")",
"return",
"np",
".",
"minimum",
"(",
"mean_min_distances",
",",
"max_value",
")",
"/",
"float",
"(",
"max_value",
")"
] |
RMS difference in the minimum distances from the centroids of one track to the centroids of another track
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
|
[
"RMS",
"difference",
"in",
"the",
"minimum",
"distances",
"from",
"the",
"centroids",
"of",
"one",
"track",
"to",
"the",
"centroids",
"of",
"another",
"track"
] |
6fb6c3df90bf4867e13a97d3460b14471d107df1
|
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/ObjectMatcher.py#L391-L407
|
train
|
djgagne/hagelslag
|
hagelslag/processing/ObjectMatcher.py
|
mean_min_time_distance
|
def mean_min_time_distance(item_a, item_b, max_value):
"""
Calculate the mean time difference among the time steps in each object.
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
times_a = item_a.times.reshape((item_a.times.size, 1))
times_b = item_b.times.reshape((1, item_b.times.size))
distance_matrix = (times_a - times_b) ** 2
mean_min_distances = np.sqrt(distance_matrix.min(axis=0).mean() + distance_matrix.min(axis=1).mean())
return np.minimum(mean_min_distances, max_value) / float(max_value)
|
python
|
def mean_min_time_distance(item_a, item_b, max_value):
"""
Calculate the mean time difference among the time steps in each object.
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
times_a = item_a.times.reshape((item_a.times.size, 1))
times_b = item_b.times.reshape((1, item_b.times.size))
distance_matrix = (times_a - times_b) ** 2
mean_min_distances = np.sqrt(distance_matrix.min(axis=0).mean() + distance_matrix.min(axis=1).mean())
return np.minimum(mean_min_distances, max_value) / float(max_value)
|
[
"def",
"mean_min_time_distance",
"(",
"item_a",
",",
"item_b",
",",
"max_value",
")",
":",
"times_a",
"=",
"item_a",
".",
"times",
".",
"reshape",
"(",
"(",
"item_a",
".",
"times",
".",
"size",
",",
"1",
")",
")",
"times_b",
"=",
"item_b",
".",
"times",
".",
"reshape",
"(",
"(",
"1",
",",
"item_b",
".",
"times",
".",
"size",
")",
")",
"distance_matrix",
"=",
"(",
"times_a",
"-",
"times_b",
")",
"**",
"2",
"mean_min_distances",
"=",
"np",
".",
"sqrt",
"(",
"distance_matrix",
".",
"min",
"(",
"axis",
"=",
"0",
")",
".",
"mean",
"(",
")",
"+",
"distance_matrix",
".",
"min",
"(",
"axis",
"=",
"1",
")",
".",
"mean",
"(",
")",
")",
"return",
"np",
".",
"minimum",
"(",
"mean_min_distances",
",",
"max_value",
")",
"/",
"float",
"(",
"max_value",
")"
] |
Calculate the mean time difference among the time steps in each object.
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
|
[
"Calculate",
"the",
"mean",
"time",
"difference",
"among",
"the",
"time",
"steps",
"in",
"each",
"object",
"."
] |
6fb6c3df90bf4867e13a97d3460b14471d107df1
|
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/ObjectMatcher.py#L410-L426
|
train
|
djgagne/hagelslag
|
hagelslag/processing/ObjectMatcher.py
|
start_centroid_distance
|
def start_centroid_distance(item_a, item_b, max_value):
"""
Distance between the centroids of the first step in each object.
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
start_a = item_a.center_of_mass(item_a.times[0])
start_b = item_b.center_of_mass(item_b.times[0])
start_distance = np.sqrt((start_a[0] - start_b[0]) ** 2 + (start_a[1] - start_b[1]) ** 2)
return np.minimum(start_distance, max_value) / float(max_value)
|
python
|
def start_centroid_distance(item_a, item_b, max_value):
"""
Distance between the centroids of the first step in each object.
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
start_a = item_a.center_of_mass(item_a.times[0])
start_b = item_b.center_of_mass(item_b.times[0])
start_distance = np.sqrt((start_a[0] - start_b[0]) ** 2 + (start_a[1] - start_b[1]) ** 2)
return np.minimum(start_distance, max_value) / float(max_value)
|
[
"def",
"start_centroid_distance",
"(",
"item_a",
",",
"item_b",
",",
"max_value",
")",
":",
"start_a",
"=",
"item_a",
".",
"center_of_mass",
"(",
"item_a",
".",
"times",
"[",
"0",
"]",
")",
"start_b",
"=",
"item_b",
".",
"center_of_mass",
"(",
"item_b",
".",
"times",
"[",
"0",
"]",
")",
"start_distance",
"=",
"np",
".",
"sqrt",
"(",
"(",
"start_a",
"[",
"0",
"]",
"-",
"start_b",
"[",
"0",
"]",
")",
"**",
"2",
"+",
"(",
"start_a",
"[",
"1",
"]",
"-",
"start_b",
"[",
"1",
"]",
")",
"**",
"2",
")",
"return",
"np",
".",
"minimum",
"(",
"start_distance",
",",
"max_value",
")",
"/",
"float",
"(",
"max_value",
")"
] |
Distance between the centroids of the first step in each object.
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
|
[
"Distance",
"between",
"the",
"centroids",
"of",
"the",
"first",
"step",
"in",
"each",
"object",
"."
] |
6fb6c3df90bf4867e13a97d3460b14471d107df1
|
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/ObjectMatcher.py#L429-L444
|
train
|
djgagne/hagelslag
|
hagelslag/processing/ObjectMatcher.py
|
start_time_distance
|
def start_time_distance(item_a, item_b, max_value):
"""
Absolute difference between the starting times of each item.
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
start_time_diff = np.abs(item_a.times[0] - item_b.times[0])
return np.minimum(start_time_diff, max_value) / float(max_value)
|
python
|
def start_time_distance(item_a, item_b, max_value):
"""
Absolute difference between the starting times of each item.
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
start_time_diff = np.abs(item_a.times[0] - item_b.times[0])
return np.minimum(start_time_diff, max_value) / float(max_value)
|
[
"def",
"start_time_distance",
"(",
"item_a",
",",
"item_b",
",",
"max_value",
")",
":",
"start_time_diff",
"=",
"np",
".",
"abs",
"(",
"item_a",
".",
"times",
"[",
"0",
"]",
"-",
"item_b",
".",
"times",
"[",
"0",
"]",
")",
"return",
"np",
".",
"minimum",
"(",
"start_time_diff",
",",
"max_value",
")",
"/",
"float",
"(",
"max_value",
")"
] |
Absolute difference between the starting times of each item.
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
|
[
"Absolute",
"difference",
"between",
"the",
"starting",
"times",
"of",
"each",
"item",
"."
] |
6fb6c3df90bf4867e13a97d3460b14471d107df1
|
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/ObjectMatcher.py#L447-L460
|
train
|
djgagne/hagelslag
|
hagelslag/processing/ObjectMatcher.py
|
duration_distance
|
def duration_distance(item_a, item_b, max_value):
"""
Absolute difference in the duration of two items
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
duration_a = item_a.times.size
duration_b = item_b.times.size
return np.minimum(np.abs(duration_a - duration_b), max_value) / float(max_value)
|
python
|
def duration_distance(item_a, item_b, max_value):
"""
Absolute difference in the duration of two items
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
duration_a = item_a.times.size
duration_b = item_b.times.size
return np.minimum(np.abs(duration_a - duration_b), max_value) / float(max_value)
|
[
"def",
"duration_distance",
"(",
"item_a",
",",
"item_b",
",",
"max_value",
")",
":",
"duration_a",
"=",
"item_a",
".",
"times",
".",
"size",
"duration_b",
"=",
"item_b",
".",
"times",
".",
"size",
"return",
"np",
".",
"minimum",
"(",
"np",
".",
"abs",
"(",
"duration_a",
"-",
"duration_b",
")",
",",
"max_value",
")",
"/",
"float",
"(",
"max_value",
")"
] |
Absolute difference in the duration of two items
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
|
[
"Absolute",
"difference",
"in",
"the",
"duration",
"of",
"two",
"items"
] |
6fb6c3df90bf4867e13a97d3460b14471d107df1
|
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/ObjectMatcher.py#L463-L477
|
train
|
djgagne/hagelslag
|
hagelslag/processing/ObjectMatcher.py
|
mean_area_distance
|
def mean_area_distance(item_a, item_b, max_value):
"""
Absolute difference in the means of the areas of each track over time.
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
mean_area_a = np.mean([item_a.size(t) for t in item_a.times])
mean_area_b = np.mean([item_b.size(t) for t in item_b.times])
return np.abs(mean_area_a - mean_area_b) / float(max_value)
|
python
|
def mean_area_distance(item_a, item_b, max_value):
"""
Absolute difference in the means of the areas of each track over time.
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
mean_area_a = np.mean([item_a.size(t) for t in item_a.times])
mean_area_b = np.mean([item_b.size(t) for t in item_b.times])
return np.abs(mean_area_a - mean_area_b) / float(max_value)
|
[
"def",
"mean_area_distance",
"(",
"item_a",
",",
"item_b",
",",
"max_value",
")",
":",
"mean_area_a",
"=",
"np",
".",
"mean",
"(",
"[",
"item_a",
".",
"size",
"(",
"t",
")",
"for",
"t",
"in",
"item_a",
".",
"times",
"]",
")",
"mean_area_b",
"=",
"np",
".",
"mean",
"(",
"[",
"item_b",
".",
"size",
"(",
"t",
")",
"for",
"t",
"in",
"item_b",
".",
"times",
"]",
")",
"return",
"np",
".",
"abs",
"(",
"mean_area_a",
"-",
"mean_area_b",
")",
"/",
"float",
"(",
"max_value",
")"
] |
Absolute difference in the means of the areas of each track over time.
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
|
[
"Absolute",
"difference",
"in",
"the",
"means",
"of",
"the",
"areas",
"of",
"each",
"track",
"over",
"time",
"."
] |
6fb6c3df90bf4867e13a97d3460b14471d107df1
|
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/ObjectMatcher.py#L480-L494
|
train
|
djgagne/hagelslag
|
hagelslag/processing/ObjectMatcher.py
|
ObjectMatcher.match_objects
|
def match_objects(self, set_a, set_b, time_a, time_b):
"""
Match two sets of objects at particular times.
Args:
set_a: list of STObjects
set_b: list of STObjects
time_a: time at which set_a is being evaluated for matching
time_b: time at which set_b is being evaluated for matching
Returns:
List of tuples containing (set_a index, set_b index) for each match
"""
costs = self.cost_matrix(set_a, set_b, time_a, time_b) * 100
min_row_costs = costs.min(axis=1)
min_col_costs = costs.min(axis=0)
good_rows = np.where(min_row_costs < 100)[0]
good_cols = np.where(min_col_costs < 100)[0]
assignments = []
if len(good_rows) > 0 and len(good_cols) > 0:
munk = Munkres()
initial_assignments = munk.compute(costs[tuple(np.meshgrid(good_rows, good_cols, indexing='ij'))].tolist())
initial_assignments = [(good_rows[x[0]], good_cols[x[1]]) for x in initial_assignments]
for a in initial_assignments:
if costs[a[0], a[1]] < 100:
assignments.append(a)
return assignments
|
python
|
def match_objects(self, set_a, set_b, time_a, time_b):
"""
Match two sets of objects at particular times.
Args:
set_a: list of STObjects
set_b: list of STObjects
time_a: time at which set_a is being evaluated for matching
time_b: time at which set_b is being evaluated for matching
Returns:
List of tuples containing (set_a index, set_b index) for each match
"""
costs = self.cost_matrix(set_a, set_b, time_a, time_b) * 100
min_row_costs = costs.min(axis=1)
min_col_costs = costs.min(axis=0)
good_rows = np.where(min_row_costs < 100)[0]
good_cols = np.where(min_col_costs < 100)[0]
assignments = []
if len(good_rows) > 0 and len(good_cols) > 0:
munk = Munkres()
initial_assignments = munk.compute(costs[tuple(np.meshgrid(good_rows, good_cols, indexing='ij'))].tolist())
initial_assignments = [(good_rows[x[0]], good_cols[x[1]]) for x in initial_assignments]
for a in initial_assignments:
if costs[a[0], a[1]] < 100:
assignments.append(a)
return assignments
|
[
"def",
"match_objects",
"(",
"self",
",",
"set_a",
",",
"set_b",
",",
"time_a",
",",
"time_b",
")",
":",
"costs",
"=",
"self",
".",
"cost_matrix",
"(",
"set_a",
",",
"set_b",
",",
"time_a",
",",
"time_b",
")",
"*",
"100",
"min_row_costs",
"=",
"costs",
".",
"min",
"(",
"axis",
"=",
"1",
")",
"min_col_costs",
"=",
"costs",
".",
"min",
"(",
"axis",
"=",
"0",
")",
"good_rows",
"=",
"np",
".",
"where",
"(",
"min_row_costs",
"<",
"100",
")",
"[",
"0",
"]",
"good_cols",
"=",
"np",
".",
"where",
"(",
"min_col_costs",
"<",
"100",
")",
"[",
"0",
"]",
"assignments",
"=",
"[",
"]",
"if",
"len",
"(",
"good_rows",
")",
">",
"0",
"and",
"len",
"(",
"good_cols",
")",
">",
"0",
":",
"munk",
"=",
"Munkres",
"(",
")",
"initial_assignments",
"=",
"munk",
".",
"compute",
"(",
"costs",
"[",
"tuple",
"(",
"np",
".",
"meshgrid",
"(",
"good_rows",
",",
"good_cols",
",",
"indexing",
"=",
"'ij'",
")",
")",
"]",
".",
"tolist",
"(",
")",
")",
"initial_assignments",
"=",
"[",
"(",
"good_rows",
"[",
"x",
"[",
"0",
"]",
"]",
",",
"good_cols",
"[",
"x",
"[",
"1",
"]",
"]",
")",
"for",
"x",
"in",
"initial_assignments",
"]",
"for",
"a",
"in",
"initial_assignments",
":",
"if",
"costs",
"[",
"a",
"[",
"0",
"]",
",",
"a",
"[",
"1",
"]",
"]",
"<",
"100",
":",
"assignments",
".",
"append",
"(",
"a",
")",
"return",
"assignments"
] |
Match two sets of objects at particular times.
Args:
set_a: list of STObjects
set_b: list of STObjects
time_a: time at which set_a is being evaluated for matching
time_b: time at which set_b is being evaluated for matching
Returns:
List of tuples containing (set_a index, set_b index) for each match
|
[
"Match",
"two",
"sets",
"of",
"objects",
"at",
"particular",
"times",
"."
] |
6fb6c3df90bf4867e13a97d3460b14471d107df1
|
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/ObjectMatcher.py#L27-L53
|
train
|
djgagne/hagelslag
|
hagelslag/processing/ObjectMatcher.py
|
ObjectMatcher.cost_matrix
|
def cost_matrix(self, set_a, set_b, time_a, time_b):
"""
Calculates the costs (distances) between the items in set a and set b at the specified times.
Args:
set_a: List of STObjects
set_b: List of STObjects
time_a: time at which objects in set_a are evaluated
time_b: time at whcih object in set_b are evaluated
Returns:
A numpy array with shape [len(set_a), len(set_b)] containing the cost matrix between the items in set a
and the items in set b.
"""
costs = np.zeros((len(set_a), len(set_b)))
for a, item_a in enumerate(set_a):
for b, item_b in enumerate(set_b):
costs[a, b] = self.total_cost_function(item_a, item_b, time_a, time_b)
return costs
|
python
|
def cost_matrix(self, set_a, set_b, time_a, time_b):
"""
Calculates the costs (distances) between the items in set a and set b at the specified times.
Args:
set_a: List of STObjects
set_b: List of STObjects
time_a: time at which objects in set_a are evaluated
time_b: time at whcih object in set_b are evaluated
Returns:
A numpy array with shape [len(set_a), len(set_b)] containing the cost matrix between the items in set a
and the items in set b.
"""
costs = np.zeros((len(set_a), len(set_b)))
for a, item_a in enumerate(set_a):
for b, item_b in enumerate(set_b):
costs[a, b] = self.total_cost_function(item_a, item_b, time_a, time_b)
return costs
|
[
"def",
"cost_matrix",
"(",
"self",
",",
"set_a",
",",
"set_b",
",",
"time_a",
",",
"time_b",
")",
":",
"costs",
"=",
"np",
".",
"zeros",
"(",
"(",
"len",
"(",
"set_a",
")",
",",
"len",
"(",
"set_b",
")",
")",
")",
"for",
"a",
",",
"item_a",
"in",
"enumerate",
"(",
"set_a",
")",
":",
"for",
"b",
",",
"item_b",
"in",
"enumerate",
"(",
"set_b",
")",
":",
"costs",
"[",
"a",
",",
"b",
"]",
"=",
"self",
".",
"total_cost_function",
"(",
"item_a",
",",
"item_b",
",",
"time_a",
",",
"time_b",
")",
"return",
"costs"
] |
Calculates the costs (distances) between the items in set a and set b at the specified times.
Args:
set_a: List of STObjects
set_b: List of STObjects
time_a: time at which objects in set_a are evaluated
time_b: time at whcih object in set_b are evaluated
Returns:
A numpy array with shape [len(set_a), len(set_b)] containing the cost matrix between the items in set a
and the items in set b.
|
[
"Calculates",
"the",
"costs",
"(",
"distances",
")",
"between",
"the",
"items",
"in",
"set",
"a",
"and",
"set",
"b",
"at",
"the",
"specified",
"times",
"."
] |
6fb6c3df90bf4867e13a97d3460b14471d107df1
|
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/ObjectMatcher.py#L55-L73
|
train
|
djgagne/hagelslag
|
hagelslag/processing/ObjectMatcher.py
|
ObjectMatcher.total_cost_function
|
def total_cost_function(self, item_a, item_b, time_a, time_b):
"""
Calculate total cost function between two items.
Args:
item_a: STObject
item_b: STObject
time_a: Timestep in item_a at which cost function is evaluated
time_b: Timestep in item_b at which cost function is evaluated
Returns:
The total weighted distance between item_a and item_b
"""
distances = np.zeros(len(self.weights))
for c, component in enumerate(self.cost_function_components):
distances[c] = component(item_a, time_a, item_b, time_b, self.max_values[c])
total_distance = np.sum(self.weights * distances)
return total_distance
|
python
|
def total_cost_function(self, item_a, item_b, time_a, time_b):
"""
Calculate total cost function between two items.
Args:
item_a: STObject
item_b: STObject
time_a: Timestep in item_a at which cost function is evaluated
time_b: Timestep in item_b at which cost function is evaluated
Returns:
The total weighted distance between item_a and item_b
"""
distances = np.zeros(len(self.weights))
for c, component in enumerate(self.cost_function_components):
distances[c] = component(item_a, time_a, item_b, time_b, self.max_values[c])
total_distance = np.sum(self.weights * distances)
return total_distance
|
[
"def",
"total_cost_function",
"(",
"self",
",",
"item_a",
",",
"item_b",
",",
"time_a",
",",
"time_b",
")",
":",
"distances",
"=",
"np",
".",
"zeros",
"(",
"len",
"(",
"self",
".",
"weights",
")",
")",
"for",
"c",
",",
"component",
"in",
"enumerate",
"(",
"self",
".",
"cost_function_components",
")",
":",
"distances",
"[",
"c",
"]",
"=",
"component",
"(",
"item_a",
",",
"time_a",
",",
"item_b",
",",
"time_b",
",",
"self",
".",
"max_values",
"[",
"c",
"]",
")",
"total_distance",
"=",
"np",
".",
"sum",
"(",
"self",
".",
"weights",
"*",
"distances",
")",
"return",
"total_distance"
] |
Calculate total cost function between two items.
Args:
item_a: STObject
item_b: STObject
time_a: Timestep in item_a at which cost function is evaluated
time_b: Timestep in item_b at which cost function is evaluated
Returns:
The total weighted distance between item_a and item_b
|
[
"Calculate",
"total",
"cost",
"function",
"between",
"two",
"items",
"."
] |
6fb6c3df90bf4867e13a97d3460b14471d107df1
|
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/ObjectMatcher.py#L75-L92
|
train
|
djgagne/hagelslag
|
hagelslag/processing/ObjectMatcher.py
|
TrackMatcher.match_tracks
|
def match_tracks(self, set_a, set_b, closest_matches=False):
"""
Find the optimal set of matching assignments between set a and set b. This function supports optimal 1:1
matching using the Munkres method and matching from every object in set a to the closest object in set b.
In this situation set b accepts multiple matches from set a.
Args:
set_a:
set_b:
closest_matches:
Returns:
"""
costs = self.track_cost_matrix(set_a, set_b) * 100
min_row_costs = costs.min(axis=1)
min_col_costs = costs.min(axis=0)
good_rows = np.where(min_row_costs < 100)[0]
good_cols = np.where(min_col_costs < 100)[0]
assignments = []
if len(good_rows) > 0 and len(good_cols) > 0:
if closest_matches:
b_matches = costs[np.meshgrid(good_rows, good_cols, indexing='ij')].argmin(axis=1)
a_matches = np.arange(b_matches.size)
initial_assignments = [(good_rows[a_matches[x]], good_cols[b_matches[x]])
for x in range(b_matches.size)]
else:
munk = Munkres()
initial_assignments = munk.compute(costs[np.meshgrid(good_rows, good_cols, indexing='ij')].tolist())
initial_assignments = [(good_rows[x[0]], good_cols[x[1]]) for x in initial_assignments]
for a in initial_assignments:
if costs[a[0], a[1]] < 100:
assignments.append(a)
return assignments
|
python
|
def match_tracks(self, set_a, set_b, closest_matches=False):
"""
Find the optimal set of matching assignments between set a and set b. This function supports optimal 1:1
matching using the Munkres method and matching from every object in set a to the closest object in set b.
In this situation set b accepts multiple matches from set a.
Args:
set_a:
set_b:
closest_matches:
Returns:
"""
costs = self.track_cost_matrix(set_a, set_b) * 100
min_row_costs = costs.min(axis=1)
min_col_costs = costs.min(axis=0)
good_rows = np.where(min_row_costs < 100)[0]
good_cols = np.where(min_col_costs < 100)[0]
assignments = []
if len(good_rows) > 0 and len(good_cols) > 0:
if closest_matches:
b_matches = costs[np.meshgrid(good_rows, good_cols, indexing='ij')].argmin(axis=1)
a_matches = np.arange(b_matches.size)
initial_assignments = [(good_rows[a_matches[x]], good_cols[b_matches[x]])
for x in range(b_matches.size)]
else:
munk = Munkres()
initial_assignments = munk.compute(costs[np.meshgrid(good_rows, good_cols, indexing='ij')].tolist())
initial_assignments = [(good_rows[x[0]], good_cols[x[1]]) for x in initial_assignments]
for a in initial_assignments:
if costs[a[0], a[1]] < 100:
assignments.append(a)
return assignments
|
[
"def",
"match_tracks",
"(",
"self",
",",
"set_a",
",",
"set_b",
",",
"closest_matches",
"=",
"False",
")",
":",
"costs",
"=",
"self",
".",
"track_cost_matrix",
"(",
"set_a",
",",
"set_b",
")",
"*",
"100",
"min_row_costs",
"=",
"costs",
".",
"min",
"(",
"axis",
"=",
"1",
")",
"min_col_costs",
"=",
"costs",
".",
"min",
"(",
"axis",
"=",
"0",
")",
"good_rows",
"=",
"np",
".",
"where",
"(",
"min_row_costs",
"<",
"100",
")",
"[",
"0",
"]",
"good_cols",
"=",
"np",
".",
"where",
"(",
"min_col_costs",
"<",
"100",
")",
"[",
"0",
"]",
"assignments",
"=",
"[",
"]",
"if",
"len",
"(",
"good_rows",
")",
">",
"0",
"and",
"len",
"(",
"good_cols",
")",
">",
"0",
":",
"if",
"closest_matches",
":",
"b_matches",
"=",
"costs",
"[",
"np",
".",
"meshgrid",
"(",
"good_rows",
",",
"good_cols",
",",
"indexing",
"=",
"'ij'",
")",
"]",
".",
"argmin",
"(",
"axis",
"=",
"1",
")",
"a_matches",
"=",
"np",
".",
"arange",
"(",
"b_matches",
".",
"size",
")",
"initial_assignments",
"=",
"[",
"(",
"good_rows",
"[",
"a_matches",
"[",
"x",
"]",
"]",
",",
"good_cols",
"[",
"b_matches",
"[",
"x",
"]",
"]",
")",
"for",
"x",
"in",
"range",
"(",
"b_matches",
".",
"size",
")",
"]",
"else",
":",
"munk",
"=",
"Munkres",
"(",
")",
"initial_assignments",
"=",
"munk",
".",
"compute",
"(",
"costs",
"[",
"np",
".",
"meshgrid",
"(",
"good_rows",
",",
"good_cols",
",",
"indexing",
"=",
"'ij'",
")",
"]",
".",
"tolist",
"(",
")",
")",
"initial_assignments",
"=",
"[",
"(",
"good_rows",
"[",
"x",
"[",
"0",
"]",
"]",
",",
"good_cols",
"[",
"x",
"[",
"1",
"]",
"]",
")",
"for",
"x",
"in",
"initial_assignments",
"]",
"for",
"a",
"in",
"initial_assignments",
":",
"if",
"costs",
"[",
"a",
"[",
"0",
"]",
",",
"a",
"[",
"1",
"]",
"]",
"<",
"100",
":",
"assignments",
".",
"append",
"(",
"a",
")",
"return",
"assignments"
] |
Find the optimal set of matching assignments between set a and set b. This function supports optimal 1:1
matching using the Munkres method and matching from every object in set a to the closest object in set b.
In this situation set b accepts multiple matches from set a.
Args:
set_a:
set_b:
closest_matches:
Returns:
|
[
"Find",
"the",
"optimal",
"set",
"of",
"matching",
"assignments",
"between",
"set",
"a",
"and",
"set",
"b",
".",
"This",
"function",
"supports",
"optimal",
"1",
":",
"1",
"matching",
"using",
"the",
"Munkres",
"method",
"and",
"matching",
"from",
"every",
"object",
"in",
"set",
"a",
"to",
"the",
"closest",
"object",
"in",
"set",
"b",
".",
"In",
"this",
"situation",
"set",
"b",
"accepts",
"multiple",
"matches",
"from",
"set",
"a",
"."
] |
6fb6c3df90bf4867e13a97d3460b14471d107df1
|
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/ObjectMatcher.py#L112-L145
|
train
|
djgagne/hagelslag
|
hagelslag/processing/ObjectMatcher.py
|
TrackStepMatcher.match
|
def match(self, set_a, set_b):
"""
For each step in each track from set_a, identify all steps in all tracks from set_b that meet all
cost function criteria
Args:
set_a: List of STObjects
set_b: List of STObjects
Returns:
track_pairings: pandas.DataFrame
"""
track_step_matches = [[] * len(set_a)]
costs = self.cost_matrix(set_a, set_b)
valid_costs = np.all(costs < 1, axis=2)
set_a_matches, set_b_matches = np.where(valid_costs)
s = 0
track_pairings = pd.DataFrame(index=np.arange(costs.shape[0]),
columns=["Track", "Step", "Time", "Matched", "Pairings"], dtype=object)
set_b_info = []
for trb, track_b in enumerate(set_b):
for t, time in enumerate(track_b.times):
set_b_info.append((trb, t))
set_b_info_arr = np.array(set_b_info, dtype=int)
for tr, track_a in enumerate(set_a):
for t, time in enumerate(track_a.times):
track_pairings.loc[s, ["Track", "Step", "Time"]] = [tr, t, time]
track_pairings.loc[s, "Matched"] = 1 if np.count_nonzero(set_a_matches == s) > 0 else 0
if track_pairings.loc[s, "Matched"] == 1:
track_pairings.loc[s, "Pairings"] = set_b_info_arr[set_b_matches[set_a_matches == s]]
else:
track_pairings.loc[s, "Pairings"] = np.array([])
s += 1
return track_pairings
|
python
|
def match(self, set_a, set_b):
"""
For each step in each track from set_a, identify all steps in all tracks from set_b that meet all
cost function criteria
Args:
set_a: List of STObjects
set_b: List of STObjects
Returns:
track_pairings: pandas.DataFrame
"""
track_step_matches = [[] * len(set_a)]
costs = self.cost_matrix(set_a, set_b)
valid_costs = np.all(costs < 1, axis=2)
set_a_matches, set_b_matches = np.where(valid_costs)
s = 0
track_pairings = pd.DataFrame(index=np.arange(costs.shape[0]),
columns=["Track", "Step", "Time", "Matched", "Pairings"], dtype=object)
set_b_info = []
for trb, track_b in enumerate(set_b):
for t, time in enumerate(track_b.times):
set_b_info.append((trb, t))
set_b_info_arr = np.array(set_b_info, dtype=int)
for tr, track_a in enumerate(set_a):
for t, time in enumerate(track_a.times):
track_pairings.loc[s, ["Track", "Step", "Time"]] = [tr, t, time]
track_pairings.loc[s, "Matched"] = 1 if np.count_nonzero(set_a_matches == s) > 0 else 0
if track_pairings.loc[s, "Matched"] == 1:
track_pairings.loc[s, "Pairings"] = set_b_info_arr[set_b_matches[set_a_matches == s]]
else:
track_pairings.loc[s, "Pairings"] = np.array([])
s += 1
return track_pairings
|
[
"def",
"match",
"(",
"self",
",",
"set_a",
",",
"set_b",
")",
":",
"track_step_matches",
"=",
"[",
"[",
"]",
"*",
"len",
"(",
"set_a",
")",
"]",
"costs",
"=",
"self",
".",
"cost_matrix",
"(",
"set_a",
",",
"set_b",
")",
"valid_costs",
"=",
"np",
".",
"all",
"(",
"costs",
"<",
"1",
",",
"axis",
"=",
"2",
")",
"set_a_matches",
",",
"set_b_matches",
"=",
"np",
".",
"where",
"(",
"valid_costs",
")",
"s",
"=",
"0",
"track_pairings",
"=",
"pd",
".",
"DataFrame",
"(",
"index",
"=",
"np",
".",
"arange",
"(",
"costs",
".",
"shape",
"[",
"0",
"]",
")",
",",
"columns",
"=",
"[",
"\"Track\"",
",",
"\"Step\"",
",",
"\"Time\"",
",",
"\"Matched\"",
",",
"\"Pairings\"",
"]",
",",
"dtype",
"=",
"object",
")",
"set_b_info",
"=",
"[",
"]",
"for",
"trb",
",",
"track_b",
"in",
"enumerate",
"(",
"set_b",
")",
":",
"for",
"t",
",",
"time",
"in",
"enumerate",
"(",
"track_b",
".",
"times",
")",
":",
"set_b_info",
".",
"append",
"(",
"(",
"trb",
",",
"t",
")",
")",
"set_b_info_arr",
"=",
"np",
".",
"array",
"(",
"set_b_info",
",",
"dtype",
"=",
"int",
")",
"for",
"tr",
",",
"track_a",
"in",
"enumerate",
"(",
"set_a",
")",
":",
"for",
"t",
",",
"time",
"in",
"enumerate",
"(",
"track_a",
".",
"times",
")",
":",
"track_pairings",
".",
"loc",
"[",
"s",
",",
"[",
"\"Track\"",
",",
"\"Step\"",
",",
"\"Time\"",
"]",
"]",
"=",
"[",
"tr",
",",
"t",
",",
"time",
"]",
"track_pairings",
".",
"loc",
"[",
"s",
",",
"\"Matched\"",
"]",
"=",
"1",
"if",
"np",
".",
"count_nonzero",
"(",
"set_a_matches",
"==",
"s",
")",
">",
"0",
"else",
"0",
"if",
"track_pairings",
".",
"loc",
"[",
"s",
",",
"\"Matched\"",
"]",
"==",
"1",
":",
"track_pairings",
".",
"loc",
"[",
"s",
",",
"\"Pairings\"",
"]",
"=",
"set_b_info_arr",
"[",
"set_b_matches",
"[",
"set_a_matches",
"==",
"s",
"]",
"]",
"else",
":",
"track_pairings",
".",
"loc",
"[",
"s",
",",
"\"Pairings\"",
"]",
"=",
"np",
".",
"array",
"(",
"[",
"]",
")",
"s",
"+=",
"1",
"return",
"track_pairings"
] |
For each step in each track from set_a, identify all steps in all tracks from set_b that meet all
cost function criteria
Args:
set_a: List of STObjects
set_b: List of STObjects
Returns:
track_pairings: pandas.DataFrame
|
[
"For",
"each",
"step",
"in",
"each",
"track",
"from",
"set_a",
"identify",
"all",
"steps",
"in",
"all",
"tracks",
"from",
"set_b",
"that",
"meet",
"all",
"cost",
"function",
"criteria",
"Args",
":",
"set_a",
":",
"List",
"of",
"STObjects",
"set_b",
":",
"List",
"of",
"STObjects"
] |
6fb6c3df90bf4867e13a97d3460b14471d107df1
|
https://github.com/djgagne/hagelslag/blob/6fb6c3df90bf4867e13a97d3460b14471d107df1/hagelslag/processing/ObjectMatcher.py#L190-L224
|
train
|
nion-software/nionswift
|
nion/swift/model/Symbolic.py
|
ComputationVariable.variable_specifier
|
def variable_specifier(self) -> dict:
"""Return the variable specifier for this variable.
The specifier can be used to lookup the value of this variable in a computation context.
"""
if self.value_type is not None:
return {"type": "variable", "version": 1, "uuid": str(self.uuid), "x-name": self.name, "x-value": self.value}
else:
return self.specifier
|
python
|
def variable_specifier(self) -> dict:
"""Return the variable specifier for this variable.
The specifier can be used to lookup the value of this variable in a computation context.
"""
if self.value_type is not None:
return {"type": "variable", "version": 1, "uuid": str(self.uuid), "x-name": self.name, "x-value": self.value}
else:
return self.specifier
|
[
"def",
"variable_specifier",
"(",
"self",
")",
"->",
"dict",
":",
"if",
"self",
".",
"value_type",
"is",
"not",
"None",
":",
"return",
"{",
"\"type\"",
":",
"\"variable\"",
",",
"\"version\"",
":",
"1",
",",
"\"uuid\"",
":",
"str",
"(",
"self",
".",
"uuid",
")",
",",
"\"x-name\"",
":",
"self",
".",
"name",
",",
"\"x-value\"",
":",
"self",
".",
"value",
"}",
"else",
":",
"return",
"self",
".",
"specifier"
] |
Return the variable specifier for this variable.
The specifier can be used to lookup the value of this variable in a computation context.
|
[
"Return",
"the",
"variable",
"specifier",
"for",
"this",
"variable",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/Symbolic.py#L228-L236
|
train
|
nion-software/nionswift
|
nion/swift/model/Symbolic.py
|
ComputationVariable.bound_variable
|
def bound_variable(self):
"""Return an object with a value property and a changed_event.
The value property returns the value of the variable. The changed_event is fired
whenever the value changes.
"""
class BoundVariable:
def __init__(self, variable):
self.__variable = variable
self.changed_event = Event.Event()
self.needs_rebind_event = Event.Event()
def property_changed(key):
if key == "value":
self.changed_event.fire()
self.__variable_property_changed_listener = variable.property_changed_event.listen(property_changed)
@property
def value(self):
return self.__variable.value
def close(self):
self.__variable_property_changed_listener.close()
self.__variable_property_changed_listener = None
return BoundVariable(self)
|
python
|
def bound_variable(self):
"""Return an object with a value property and a changed_event.
The value property returns the value of the variable. The changed_event is fired
whenever the value changes.
"""
class BoundVariable:
def __init__(self, variable):
self.__variable = variable
self.changed_event = Event.Event()
self.needs_rebind_event = Event.Event()
def property_changed(key):
if key == "value":
self.changed_event.fire()
self.__variable_property_changed_listener = variable.property_changed_event.listen(property_changed)
@property
def value(self):
return self.__variable.value
def close(self):
self.__variable_property_changed_listener.close()
self.__variable_property_changed_listener = None
return BoundVariable(self)
|
[
"def",
"bound_variable",
"(",
"self",
")",
":",
"class",
"BoundVariable",
":",
"def",
"__init__",
"(",
"self",
",",
"variable",
")",
":",
"self",
".",
"__variable",
"=",
"variable",
"self",
".",
"changed_event",
"=",
"Event",
".",
"Event",
"(",
")",
"self",
".",
"needs_rebind_event",
"=",
"Event",
".",
"Event",
"(",
")",
"def",
"property_changed",
"(",
"key",
")",
":",
"if",
"key",
"==",
"\"value\"",
":",
"self",
".",
"changed_event",
".",
"fire",
"(",
")",
"self",
".",
"__variable_property_changed_listener",
"=",
"variable",
".",
"property_changed_event",
".",
"listen",
"(",
"property_changed",
")",
"@",
"property",
"def",
"value",
"(",
"self",
")",
":",
"return",
"self",
".",
"__variable",
".",
"value",
"def",
"close",
"(",
"self",
")",
":",
"self",
".",
"__variable_property_changed_listener",
".",
"close",
"(",
")",
"self",
".",
"__variable_property_changed_listener",
"=",
"None",
"return",
"BoundVariable",
"(",
"self",
")"
] |
Return an object with a value property and a changed_event.
The value property returns the value of the variable. The changed_event is fired
whenever the value changes.
|
[
"Return",
"an",
"object",
"with",
"a",
"value",
"property",
"and",
"a",
"changed_event",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/Symbolic.py#L239-L261
|
train
|
nion-software/nionswift
|
nion/swift/model/Symbolic.py
|
ComputationContext.resolve_object_specifier
|
def resolve_object_specifier(self, object_specifier, secondary_specifier=None, property_name=None, objects_model=None):
"""Resolve the object specifier.
First lookup the object specifier in the enclosing computation. If it's not found,
then lookup in the computation's context. Otherwise it should be a value type variable.
In that case, return the bound variable.
"""
variable = self.__computation().resolve_variable(object_specifier)
if not variable:
return self.__context.resolve_object_specifier(object_specifier, secondary_specifier, property_name, objects_model)
elif variable.specifier is None:
return variable.bound_variable
return None
|
python
|
def resolve_object_specifier(self, object_specifier, secondary_specifier=None, property_name=None, objects_model=None):
"""Resolve the object specifier.
First lookup the object specifier in the enclosing computation. If it's not found,
then lookup in the computation's context. Otherwise it should be a value type variable.
In that case, return the bound variable.
"""
variable = self.__computation().resolve_variable(object_specifier)
if not variable:
return self.__context.resolve_object_specifier(object_specifier, secondary_specifier, property_name, objects_model)
elif variable.specifier is None:
return variable.bound_variable
return None
|
[
"def",
"resolve_object_specifier",
"(",
"self",
",",
"object_specifier",
",",
"secondary_specifier",
"=",
"None",
",",
"property_name",
"=",
"None",
",",
"objects_model",
"=",
"None",
")",
":",
"variable",
"=",
"self",
".",
"__computation",
"(",
")",
".",
"resolve_variable",
"(",
"object_specifier",
")",
"if",
"not",
"variable",
":",
"return",
"self",
".",
"__context",
".",
"resolve_object_specifier",
"(",
"object_specifier",
",",
"secondary_specifier",
",",
"property_name",
",",
"objects_model",
")",
"elif",
"variable",
".",
"specifier",
"is",
"None",
":",
"return",
"variable",
".",
"bound_variable",
"return",
"None"
] |
Resolve the object specifier.
First lookup the object specifier in the enclosing computation. If it's not found,
then lookup in the computation's context. Otherwise it should be a value type variable.
In that case, return the bound variable.
|
[
"Resolve",
"the",
"object",
"specifier",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/Symbolic.py#L435-L447
|
train
|
nion-software/nionswift
|
nion/swift/model/Symbolic.py
|
Computation.parse_names
|
def parse_names(cls, expression):
"""Return the list of identifiers used in the expression."""
names = set()
try:
ast_node = ast.parse(expression, "ast")
class Visitor(ast.NodeVisitor):
def visit_Name(self, node):
names.add(node.id)
Visitor().visit(ast_node)
except Exception:
pass
return names
|
python
|
def parse_names(cls, expression):
"""Return the list of identifiers used in the expression."""
names = set()
try:
ast_node = ast.parse(expression, "ast")
class Visitor(ast.NodeVisitor):
def visit_Name(self, node):
names.add(node.id)
Visitor().visit(ast_node)
except Exception:
pass
return names
|
[
"def",
"parse_names",
"(",
"cls",
",",
"expression",
")",
":",
"names",
"=",
"set",
"(",
")",
"try",
":",
"ast_node",
"=",
"ast",
".",
"parse",
"(",
"expression",
",",
"\"ast\"",
")",
"class",
"Visitor",
"(",
"ast",
".",
"NodeVisitor",
")",
":",
"def",
"visit_Name",
"(",
"self",
",",
"node",
")",
":",
"names",
".",
"add",
"(",
"node",
".",
"id",
")",
"Visitor",
"(",
")",
".",
"visit",
"(",
"ast_node",
")",
"except",
"Exception",
":",
"pass",
"return",
"names"
] |
Return the list of identifiers used in the expression.
|
[
"Return",
"the",
"list",
"of",
"identifiers",
"used",
"in",
"the",
"expression",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/Symbolic.py#L686-L699
|
train
|
nion-software/nionswift
|
nion/swift/model/Symbolic.py
|
Computation.bind
|
def bind(self, context) -> None:
"""Bind a context to this computation.
The context allows the computation to convert object specifiers to actual objects.
"""
# make a computation context based on the enclosing context.
self.__computation_context = ComputationContext(self, context)
# re-bind is not valid. be careful to set the computation after the data item is already in document.
for variable in self.variables:
assert variable.bound_item is None
for result in self.results:
assert result.bound_item is None
# bind the variables
for variable in self.variables:
self.__bind_variable(variable)
# bind the results
for result in self.results:
self.__bind_result(result)
|
python
|
def bind(self, context) -> None:
"""Bind a context to this computation.
The context allows the computation to convert object specifiers to actual objects.
"""
# make a computation context based on the enclosing context.
self.__computation_context = ComputationContext(self, context)
# re-bind is not valid. be careful to set the computation after the data item is already in document.
for variable in self.variables:
assert variable.bound_item is None
for result in self.results:
assert result.bound_item is None
# bind the variables
for variable in self.variables:
self.__bind_variable(variable)
# bind the results
for result in self.results:
self.__bind_result(result)
|
[
"def",
"bind",
"(",
"self",
",",
"context",
")",
"->",
"None",
":",
"# make a computation context based on the enclosing context.",
"self",
".",
"__computation_context",
"=",
"ComputationContext",
"(",
"self",
",",
"context",
")",
"# re-bind is not valid. be careful to set the computation after the data item is already in document.",
"for",
"variable",
"in",
"self",
".",
"variables",
":",
"assert",
"variable",
".",
"bound_item",
"is",
"None",
"for",
"result",
"in",
"self",
".",
"results",
":",
"assert",
"result",
".",
"bound_item",
"is",
"None",
"# bind the variables",
"for",
"variable",
"in",
"self",
".",
"variables",
":",
"self",
".",
"__bind_variable",
"(",
"variable",
")",
"# bind the results",
"for",
"result",
"in",
"self",
".",
"results",
":",
"self",
".",
"__bind_result",
"(",
"result",
")"
] |
Bind a context to this computation.
The context allows the computation to convert object specifiers to actual objects.
|
[
"Bind",
"a",
"context",
"to",
"this",
"computation",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/Symbolic.py#L863-L884
|
train
|
nion-software/nionswift
|
nion/swift/model/Symbolic.py
|
Computation.unbind
|
def unbind(self):
"""Unlisten and close each bound item."""
for variable in self.variables:
self.__unbind_variable(variable)
for result in self.results:
self.__unbind_result(result)
|
python
|
def unbind(self):
"""Unlisten and close each bound item."""
for variable in self.variables:
self.__unbind_variable(variable)
for result in self.results:
self.__unbind_result(result)
|
[
"def",
"unbind",
"(",
"self",
")",
":",
"for",
"variable",
"in",
"self",
".",
"variables",
":",
"self",
".",
"__unbind_variable",
"(",
"variable",
")",
"for",
"result",
"in",
"self",
".",
"results",
":",
"self",
".",
"__unbind_result",
"(",
"result",
")"
] |
Unlisten and close each bound item.
|
[
"Unlisten",
"and",
"close",
"each",
"bound",
"item",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/Symbolic.py#L886-L891
|
train
|
base4sistemas/satcfe
|
satcfe/clientelocal.py
|
ClienteSATLocal.ativar_sat
|
def ativar_sat(self, tipo_certificado, cnpj, codigo_uf):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.ativar_sat`.
:return: Uma resposta SAT especilizada em ``AtivarSAT``.
:rtype: satcfe.resposta.ativarsat.RespostaAtivarSAT
"""
retorno = super(ClienteSATLocal, self).ativar_sat(
tipo_certificado, cnpj, codigo_uf)
return RespostaAtivarSAT.analisar(retorno)
|
python
|
def ativar_sat(self, tipo_certificado, cnpj, codigo_uf):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.ativar_sat`.
:return: Uma resposta SAT especilizada em ``AtivarSAT``.
:rtype: satcfe.resposta.ativarsat.RespostaAtivarSAT
"""
retorno = super(ClienteSATLocal, self).ativar_sat(
tipo_certificado, cnpj, codigo_uf)
return RespostaAtivarSAT.analisar(retorno)
|
[
"def",
"ativar_sat",
"(",
"self",
",",
"tipo_certificado",
",",
"cnpj",
",",
"codigo_uf",
")",
":",
"retorno",
"=",
"super",
"(",
"ClienteSATLocal",
",",
"self",
")",
".",
"ativar_sat",
"(",
"tipo_certificado",
",",
"cnpj",
",",
"codigo_uf",
")",
"return",
"RespostaAtivarSAT",
".",
"analisar",
"(",
"retorno",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.ativar_sat`.
:return: Uma resposta SAT especilizada em ``AtivarSAT``.
:rtype: satcfe.resposta.ativarsat.RespostaAtivarSAT
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"ativar_sat",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientelocal.py#L46-L54
|
train
|
base4sistemas/satcfe
|
satcfe/clientelocal.py
|
ClienteSATLocal.comunicar_certificado_icpbrasil
|
def comunicar_certificado_icpbrasil(self, certificado):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.comunicar_certificado_icpbrasil`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).\
comunicar_certificado_icpbrasil(certificado)
return RespostaSAT.comunicar_certificado_icpbrasil(retorno)
|
python
|
def comunicar_certificado_icpbrasil(self, certificado):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.comunicar_certificado_icpbrasil`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).\
comunicar_certificado_icpbrasil(certificado)
return RespostaSAT.comunicar_certificado_icpbrasil(retorno)
|
[
"def",
"comunicar_certificado_icpbrasil",
"(",
"self",
",",
"certificado",
")",
":",
"retorno",
"=",
"super",
"(",
"ClienteSATLocal",
",",
"self",
")",
".",
"comunicar_certificado_icpbrasil",
"(",
"certificado",
")",
"return",
"RespostaSAT",
".",
"comunicar_certificado_icpbrasil",
"(",
"retorno",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.comunicar_certificado_icpbrasil`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"comunicar_certificado_icpbrasil",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientelocal.py#L57-L65
|
train
|
base4sistemas/satcfe
|
satcfe/clientelocal.py
|
ClienteSATLocal.enviar_dados_venda
|
def enviar_dados_venda(self, dados_venda):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.enviar_dados_venda`.
:return: Uma resposta SAT especializada em ``EnviarDadosVenda``.
:rtype: satcfe.resposta.enviardadosvenda.RespostaEnviarDadosVenda
"""
retorno = super(ClienteSATLocal, self).enviar_dados_venda(dados_venda)
return RespostaEnviarDadosVenda.analisar(retorno)
|
python
|
def enviar_dados_venda(self, dados_venda):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.enviar_dados_venda`.
:return: Uma resposta SAT especializada em ``EnviarDadosVenda``.
:rtype: satcfe.resposta.enviardadosvenda.RespostaEnviarDadosVenda
"""
retorno = super(ClienteSATLocal, self).enviar_dados_venda(dados_venda)
return RespostaEnviarDadosVenda.analisar(retorno)
|
[
"def",
"enviar_dados_venda",
"(",
"self",
",",
"dados_venda",
")",
":",
"retorno",
"=",
"super",
"(",
"ClienteSATLocal",
",",
"self",
")",
".",
"enviar_dados_venda",
"(",
"dados_venda",
")",
"return",
"RespostaEnviarDadosVenda",
".",
"analisar",
"(",
"retorno",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.enviar_dados_venda`.
:return: Uma resposta SAT especializada em ``EnviarDadosVenda``.
:rtype: satcfe.resposta.enviardadosvenda.RespostaEnviarDadosVenda
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"enviar_dados_venda",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientelocal.py#L68-L75
|
train
|
base4sistemas/satcfe
|
satcfe/clientelocal.py
|
ClienteSATLocal.cancelar_ultima_venda
|
def cancelar_ultima_venda(self, chave_cfe, dados_cancelamento):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.cancelar_ultima_venda`.
:return: Uma resposta SAT especializada em ``CancelarUltimaVenda``.
:rtype: satcfe.resposta.cancelarultimavenda.RespostaCancelarUltimaVenda
"""
retorno = super(ClienteSATLocal, self).\
cancelar_ultima_venda(chave_cfe, dados_cancelamento)
return RespostaCancelarUltimaVenda.analisar(retorno)
|
python
|
def cancelar_ultima_venda(self, chave_cfe, dados_cancelamento):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.cancelar_ultima_venda`.
:return: Uma resposta SAT especializada em ``CancelarUltimaVenda``.
:rtype: satcfe.resposta.cancelarultimavenda.RespostaCancelarUltimaVenda
"""
retorno = super(ClienteSATLocal, self).\
cancelar_ultima_venda(chave_cfe, dados_cancelamento)
return RespostaCancelarUltimaVenda.analisar(retorno)
|
[
"def",
"cancelar_ultima_venda",
"(",
"self",
",",
"chave_cfe",
",",
"dados_cancelamento",
")",
":",
"retorno",
"=",
"super",
"(",
"ClienteSATLocal",
",",
"self",
")",
".",
"cancelar_ultima_venda",
"(",
"chave_cfe",
",",
"dados_cancelamento",
")",
"return",
"RespostaCancelarUltimaVenda",
".",
"analisar",
"(",
"retorno",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.cancelar_ultima_venda`.
:return: Uma resposta SAT especializada em ``CancelarUltimaVenda``.
:rtype: satcfe.resposta.cancelarultimavenda.RespostaCancelarUltimaVenda
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"cancelar_ultima_venda",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientelocal.py#L78-L86
|
train
|
base4sistemas/satcfe
|
satcfe/clientelocal.py
|
ClienteSATLocal.consultar_sat
|
def consultar_sat(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).consultar_sat()
return RespostaSAT.consultar_sat(retorno)
|
python
|
def consultar_sat(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).consultar_sat()
return RespostaSAT.consultar_sat(retorno)
|
[
"def",
"consultar_sat",
"(",
"self",
")",
":",
"retorno",
"=",
"super",
"(",
"ClienteSATLocal",
",",
"self",
")",
".",
"consultar_sat",
"(",
")",
"return",
"RespostaSAT",
".",
"consultar_sat",
"(",
"retorno",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"consultar_sat",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientelocal.py#L89-L96
|
train
|
base4sistemas/satcfe
|
satcfe/clientelocal.py
|
ClienteSATLocal.consultar_status_operacional
|
def consultar_status_operacional(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_status_operacional`.
:return: Uma resposta SAT especializada em ``ConsultarStatusOperacional``.
:rtype: satcfe.resposta.consultarstatusoperacional.RespostaConsultarStatusOperacional
"""
retorno = super(ClienteSATLocal, self).consultar_status_operacional()
return RespostaConsultarStatusOperacional.analisar(retorno)
|
python
|
def consultar_status_operacional(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_status_operacional`.
:return: Uma resposta SAT especializada em ``ConsultarStatusOperacional``.
:rtype: satcfe.resposta.consultarstatusoperacional.RespostaConsultarStatusOperacional
"""
retorno = super(ClienteSATLocal, self).consultar_status_operacional()
return RespostaConsultarStatusOperacional.analisar(retorno)
|
[
"def",
"consultar_status_operacional",
"(",
"self",
")",
":",
"retorno",
"=",
"super",
"(",
"ClienteSATLocal",
",",
"self",
")",
".",
"consultar_status_operacional",
"(",
")",
"return",
"RespostaConsultarStatusOperacional",
".",
"analisar",
"(",
"retorno",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_status_operacional`.
:return: Uma resposta SAT especializada em ``ConsultarStatusOperacional``.
:rtype: satcfe.resposta.consultarstatusoperacional.RespostaConsultarStatusOperacional
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"consultar_status_operacional",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientelocal.py#L109-L116
|
train
|
base4sistemas/satcfe
|
satcfe/clientelocal.py
|
ClienteSATLocal.consultar_numero_sessao
|
def consultar_numero_sessao(self, numero_sessao):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_numero_sessao`.
:return: Uma resposta SAT que irá depender da sessão consultada.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).\
consultar_numero_sessao(numero_sessao)
return RespostaConsultarNumeroSessao.analisar(retorno)
|
python
|
def consultar_numero_sessao(self, numero_sessao):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_numero_sessao`.
:return: Uma resposta SAT que irá depender da sessão consultada.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).\
consultar_numero_sessao(numero_sessao)
return RespostaConsultarNumeroSessao.analisar(retorno)
|
[
"def",
"consultar_numero_sessao",
"(",
"self",
",",
"numero_sessao",
")",
":",
"retorno",
"=",
"super",
"(",
"ClienteSATLocal",
",",
"self",
")",
".",
"consultar_numero_sessao",
"(",
"numero_sessao",
")",
"return",
"RespostaConsultarNumeroSessao",
".",
"analisar",
"(",
"retorno",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_numero_sessao`.
:return: Uma resposta SAT que irá depender da sessão consultada.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"consultar_numero_sessao",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientelocal.py#L119-L127
|
train
|
base4sistemas/satcfe
|
satcfe/clientelocal.py
|
ClienteSATLocal.configurar_interface_de_rede
|
def configurar_interface_de_rede(self, configuracao):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.configurar_interface_de_rede`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).\
configurar_interface_de_rede(configuracao)
return RespostaSAT.configurar_interface_de_rede(retorno)
|
python
|
def configurar_interface_de_rede(self, configuracao):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.configurar_interface_de_rede`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).\
configurar_interface_de_rede(configuracao)
return RespostaSAT.configurar_interface_de_rede(retorno)
|
[
"def",
"configurar_interface_de_rede",
"(",
"self",
",",
"configuracao",
")",
":",
"retorno",
"=",
"super",
"(",
"ClienteSATLocal",
",",
"self",
")",
".",
"configurar_interface_de_rede",
"(",
"configuracao",
")",
"return",
"RespostaSAT",
".",
"configurar_interface_de_rede",
"(",
"retorno",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.configurar_interface_de_rede`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"configurar_interface_de_rede",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientelocal.py#L130-L138
|
train
|
base4sistemas/satcfe
|
satcfe/clientelocal.py
|
ClienteSATLocal.associar_assinatura
|
def associar_assinatura(self, sequencia_cnpj, assinatura_ac):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.associar_assinatura`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).\
associar_assinatura(sequencia_cnpj, assinatura_ac)
# (!) resposta baseada na redação com efeitos até 31-12-2016
return RespostaSAT.associar_assinatura(retorno)
|
python
|
def associar_assinatura(self, sequencia_cnpj, assinatura_ac):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.associar_assinatura`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).\
associar_assinatura(sequencia_cnpj, assinatura_ac)
# (!) resposta baseada na redação com efeitos até 31-12-2016
return RespostaSAT.associar_assinatura(retorno)
|
[
"def",
"associar_assinatura",
"(",
"self",
",",
"sequencia_cnpj",
",",
"assinatura_ac",
")",
":",
"retorno",
"=",
"super",
"(",
"ClienteSATLocal",
",",
"self",
")",
".",
"associar_assinatura",
"(",
"sequencia_cnpj",
",",
"assinatura_ac",
")",
"# (!) resposta baseada na redação com efeitos até 31-12-2016",
"return",
"RespostaSAT",
".",
"associar_assinatura",
"(",
"retorno",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.associar_assinatura`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"associar_assinatura",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientelocal.py#L141-L150
|
train
|
base4sistemas/satcfe
|
satcfe/clientelocal.py
|
ClienteSATLocal.atualizar_software_sat
|
def atualizar_software_sat(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.atualizar_software_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).atualizar_software_sat()
return RespostaSAT.atualizar_software_sat(retorno)
|
python
|
def atualizar_software_sat(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.atualizar_software_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).atualizar_software_sat()
return RespostaSAT.atualizar_software_sat(retorno)
|
[
"def",
"atualizar_software_sat",
"(",
"self",
")",
":",
"retorno",
"=",
"super",
"(",
"ClienteSATLocal",
",",
"self",
")",
".",
"atualizar_software_sat",
"(",
")",
"return",
"RespostaSAT",
".",
"atualizar_software_sat",
"(",
"retorno",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.atualizar_software_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"atualizar_software_sat",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientelocal.py#L153-L160
|
train
|
base4sistemas/satcfe
|
satcfe/clientelocal.py
|
ClienteSATLocal.extrair_logs
|
def extrair_logs(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.extrair_logs`.
:return: Uma resposta SAT especializada em ``ExtrairLogs``.
:rtype: satcfe.resposta.extrairlogs.RespostaExtrairLogs
"""
retorno = super(ClienteSATLocal, self).extrair_logs()
return RespostaExtrairLogs.analisar(retorno)
|
python
|
def extrair_logs(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.extrair_logs`.
:return: Uma resposta SAT especializada em ``ExtrairLogs``.
:rtype: satcfe.resposta.extrairlogs.RespostaExtrairLogs
"""
retorno = super(ClienteSATLocal, self).extrair_logs()
return RespostaExtrairLogs.analisar(retorno)
|
[
"def",
"extrair_logs",
"(",
"self",
")",
":",
"retorno",
"=",
"super",
"(",
"ClienteSATLocal",
",",
"self",
")",
".",
"extrair_logs",
"(",
")",
"return",
"RespostaExtrairLogs",
".",
"analisar",
"(",
"retorno",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.extrair_logs`.
:return: Uma resposta SAT especializada em ``ExtrairLogs``.
:rtype: satcfe.resposta.extrairlogs.RespostaExtrairLogs
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"extrair_logs",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientelocal.py#L163-L170
|
train
|
base4sistemas/satcfe
|
satcfe/clientelocal.py
|
ClienteSATLocal.bloquear_sat
|
def bloquear_sat(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.bloquear_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).bloquear_sat()
return RespostaSAT.bloquear_sat(retorno)
|
python
|
def bloquear_sat(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.bloquear_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).bloquear_sat()
return RespostaSAT.bloquear_sat(retorno)
|
[
"def",
"bloquear_sat",
"(",
"self",
")",
":",
"retorno",
"=",
"super",
"(",
"ClienteSATLocal",
",",
"self",
")",
".",
"bloquear_sat",
"(",
")",
"return",
"RespostaSAT",
".",
"bloquear_sat",
"(",
"retorno",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.bloquear_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"bloquear_sat",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientelocal.py#L173-L180
|
train
|
base4sistemas/satcfe
|
satcfe/clientelocal.py
|
ClienteSATLocal.desbloquear_sat
|
def desbloquear_sat(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.desbloquear_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).desbloquear_sat()
return RespostaSAT.desbloquear_sat(retorno)
|
python
|
def desbloquear_sat(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.desbloquear_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).desbloquear_sat()
return RespostaSAT.desbloquear_sat(retorno)
|
[
"def",
"desbloquear_sat",
"(",
"self",
")",
":",
"retorno",
"=",
"super",
"(",
"ClienteSATLocal",
",",
"self",
")",
".",
"desbloquear_sat",
"(",
")",
"return",
"RespostaSAT",
".",
"desbloquear_sat",
"(",
"retorno",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.desbloquear_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"desbloquear_sat",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientelocal.py#L183-L190
|
train
|
base4sistemas/satcfe
|
satcfe/clientelocal.py
|
ClienteSATLocal.trocar_codigo_de_ativacao
|
def trocar_codigo_de_ativacao(self, novo_codigo_ativacao,
opcao=constantes.CODIGO_ATIVACAO_REGULAR,
codigo_emergencia=None):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.trocar_codigo_de_ativacao`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).trocar_codigo_de_ativacao(
novo_codigo_ativacao, opcao=opcao,
codigo_emergencia=codigo_emergencia)
return RespostaSAT.trocar_codigo_de_ativacao(retorno)
|
python
|
def trocar_codigo_de_ativacao(self, novo_codigo_ativacao,
opcao=constantes.CODIGO_ATIVACAO_REGULAR,
codigo_emergencia=None):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.trocar_codigo_de_ativacao`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).trocar_codigo_de_ativacao(
novo_codigo_ativacao, opcao=opcao,
codigo_emergencia=codigo_emergencia)
return RespostaSAT.trocar_codigo_de_ativacao(retorno)
|
[
"def",
"trocar_codigo_de_ativacao",
"(",
"self",
",",
"novo_codigo_ativacao",
",",
"opcao",
"=",
"constantes",
".",
"CODIGO_ATIVACAO_REGULAR",
",",
"codigo_emergencia",
"=",
"None",
")",
":",
"retorno",
"=",
"super",
"(",
"ClienteSATLocal",
",",
"self",
")",
".",
"trocar_codigo_de_ativacao",
"(",
"novo_codigo_ativacao",
",",
"opcao",
"=",
"opcao",
",",
"codigo_emergencia",
"=",
"codigo_emergencia",
")",
"return",
"RespostaSAT",
".",
"trocar_codigo_de_ativacao",
"(",
"retorno",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.trocar_codigo_de_ativacao`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"trocar_codigo_de_ativacao",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientelocal.py#L193-L204
|
train
|
base4sistemas/satcfe
|
satcfe/entidades.py
|
Entidade.documento
|
def documento(self, *args, **kwargs):
"""Resulta no documento XML como string, que pode ou não incluir a
declaração XML no início do documento.
"""
forcar_unicode = kwargs.pop('forcar_unicode', False)
incluir_xml_decl = kwargs.pop('incluir_xml_decl', True)
doc = ET.tostring(self._xml(*args, **kwargs),
encoding='utf-8').decode('utf-8')
if forcar_unicode:
if incluir_xml_decl:
doc = u'{}\n{}'.format(constantes.XML_DECL_UNICODE, doc)
else:
if incluir_xml_decl:
doc = '{}\n{}'.format(constantes.XML_DECL, unidecode(doc))
else:
doc = unidecode(doc)
return doc
|
python
|
def documento(self, *args, **kwargs):
"""Resulta no documento XML como string, que pode ou não incluir a
declaração XML no início do documento.
"""
forcar_unicode = kwargs.pop('forcar_unicode', False)
incluir_xml_decl = kwargs.pop('incluir_xml_decl', True)
doc = ET.tostring(self._xml(*args, **kwargs),
encoding='utf-8').decode('utf-8')
if forcar_unicode:
if incluir_xml_decl:
doc = u'{}\n{}'.format(constantes.XML_DECL_UNICODE, doc)
else:
if incluir_xml_decl:
doc = '{}\n{}'.format(constantes.XML_DECL, unidecode(doc))
else:
doc = unidecode(doc)
return doc
|
[
"def",
"documento",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"forcar_unicode",
"=",
"kwargs",
".",
"pop",
"(",
"'forcar_unicode'",
",",
"False",
")",
"incluir_xml_decl",
"=",
"kwargs",
".",
"pop",
"(",
"'incluir_xml_decl'",
",",
"True",
")",
"doc",
"=",
"ET",
".",
"tostring",
"(",
"self",
".",
"_xml",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
",",
"encoding",
"=",
"'utf-8'",
")",
".",
"decode",
"(",
"'utf-8'",
")",
"if",
"forcar_unicode",
":",
"if",
"incluir_xml_decl",
":",
"doc",
"=",
"u'{}\\n{}'",
".",
"format",
"(",
"constantes",
".",
"XML_DECL_UNICODE",
",",
"doc",
")",
"else",
":",
"if",
"incluir_xml_decl",
":",
"doc",
"=",
"'{}\\n{}'",
".",
"format",
"(",
"constantes",
".",
"XML_DECL",
",",
"unidecode",
"(",
"doc",
")",
")",
"else",
":",
"doc",
"=",
"unidecode",
"(",
"doc",
")",
"return",
"doc"
] |
Resulta no documento XML como string, que pode ou não incluir a
declaração XML no início do documento.
|
[
"Resulta",
"no",
"documento",
"XML",
"como",
"string",
"que",
"pode",
"ou",
"não",
"incluir",
"a",
"declaração",
"XML",
"no",
"início",
"do",
"documento",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/entidades.py#L308-L324
|
train
|
nion-software/nionswift
|
nion/swift/Application.py
|
Application.start
|
def start(self, skip_choose=False, fixed_workspace_dir=None):
"""
Start the application.
Looks for workspace_location persistent string. If it doesn't find it, uses a default
workspace location.
Then checks to see if that workspace exists. If not and if skip_choose has not been
set to True, asks the user for a workspace location. User may choose new folder or
existing location. This works by putting up the dialog which will either call start
again or exit.
Creates workspace in location if it doesn't exist.
Migrates database to latest version.
Creates document model, resources path, etc.
"""
logging.getLogger("migration").setLevel(logging.INFO)
if fixed_workspace_dir:
workspace_dir = fixed_workspace_dir
else:
documents_dir = self.ui.get_document_location()
workspace_dir = os.path.join(documents_dir, "Nion Swift Libraries")
workspace_dir = self.ui.get_persistent_string("workspace_location", workspace_dir)
welcome_message_enabled = fixed_workspace_dir is None
profile, is_created = Profile.create_profile(pathlib.Path(workspace_dir), welcome_message_enabled, skip_choose)
if not profile:
self.choose_library()
return True
self.workspace_dir = workspace_dir
DocumentModel.DocumentModel.computation_min_period = 0.1
document_model = DocumentModel.DocumentModel(profile=profile)
document_model.create_default_data_groups()
document_model.start_dispatcher()
# parse the hardware aliases file
alias_path = os.path.join(self.workspace_dir, "aliases.ini")
HardwareSource.parse_hardware_aliases_config_file(alias_path)
# create the document controller
document_controller = self.create_document_controller(document_model, "library")
if self.__resources_path is not None:
document_model.create_sample_images(self.__resources_path)
workspace_history = self.ui.get_persistent_object("workspace_history", list())
if workspace_dir in workspace_history:
workspace_history.remove(workspace_dir)
workspace_history.insert(0, workspace_dir)
self.ui.set_persistent_object("workspace_history", workspace_history)
self.ui.set_persistent_string("workspace_location", workspace_dir)
if welcome_message_enabled:
logging.info("Welcome to Nion Swift.")
if is_created and len(document_model.display_items) > 0:
document_controller.selected_display_panel.set_display_panel_display_item(document_model.display_items[0])
document_controller.selected_display_panel.perform_action("set_fill_mode")
return True
|
python
|
def start(self, skip_choose=False, fixed_workspace_dir=None):
"""
Start the application.
Looks for workspace_location persistent string. If it doesn't find it, uses a default
workspace location.
Then checks to see if that workspace exists. If not and if skip_choose has not been
set to True, asks the user for a workspace location. User may choose new folder or
existing location. This works by putting up the dialog which will either call start
again or exit.
Creates workspace in location if it doesn't exist.
Migrates database to latest version.
Creates document model, resources path, etc.
"""
logging.getLogger("migration").setLevel(logging.INFO)
if fixed_workspace_dir:
workspace_dir = fixed_workspace_dir
else:
documents_dir = self.ui.get_document_location()
workspace_dir = os.path.join(documents_dir, "Nion Swift Libraries")
workspace_dir = self.ui.get_persistent_string("workspace_location", workspace_dir)
welcome_message_enabled = fixed_workspace_dir is None
profile, is_created = Profile.create_profile(pathlib.Path(workspace_dir), welcome_message_enabled, skip_choose)
if not profile:
self.choose_library()
return True
self.workspace_dir = workspace_dir
DocumentModel.DocumentModel.computation_min_period = 0.1
document_model = DocumentModel.DocumentModel(profile=profile)
document_model.create_default_data_groups()
document_model.start_dispatcher()
# parse the hardware aliases file
alias_path = os.path.join(self.workspace_dir, "aliases.ini")
HardwareSource.parse_hardware_aliases_config_file(alias_path)
# create the document controller
document_controller = self.create_document_controller(document_model, "library")
if self.__resources_path is not None:
document_model.create_sample_images(self.__resources_path)
workspace_history = self.ui.get_persistent_object("workspace_history", list())
if workspace_dir in workspace_history:
workspace_history.remove(workspace_dir)
workspace_history.insert(0, workspace_dir)
self.ui.set_persistent_object("workspace_history", workspace_history)
self.ui.set_persistent_string("workspace_location", workspace_dir)
if welcome_message_enabled:
logging.info("Welcome to Nion Swift.")
if is_created and len(document_model.display_items) > 0:
document_controller.selected_display_panel.set_display_panel_display_item(document_model.display_items[0])
document_controller.selected_display_panel.perform_action("set_fill_mode")
return True
|
[
"def",
"start",
"(",
"self",
",",
"skip_choose",
"=",
"False",
",",
"fixed_workspace_dir",
"=",
"None",
")",
":",
"logging",
".",
"getLogger",
"(",
"\"migration\"",
")",
".",
"setLevel",
"(",
"logging",
".",
"INFO",
")",
"if",
"fixed_workspace_dir",
":",
"workspace_dir",
"=",
"fixed_workspace_dir",
"else",
":",
"documents_dir",
"=",
"self",
".",
"ui",
".",
"get_document_location",
"(",
")",
"workspace_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"documents_dir",
",",
"\"Nion Swift Libraries\"",
")",
"workspace_dir",
"=",
"self",
".",
"ui",
".",
"get_persistent_string",
"(",
"\"workspace_location\"",
",",
"workspace_dir",
")",
"welcome_message_enabled",
"=",
"fixed_workspace_dir",
"is",
"None",
"profile",
",",
"is_created",
"=",
"Profile",
".",
"create_profile",
"(",
"pathlib",
".",
"Path",
"(",
"workspace_dir",
")",
",",
"welcome_message_enabled",
",",
"skip_choose",
")",
"if",
"not",
"profile",
":",
"self",
".",
"choose_library",
"(",
")",
"return",
"True",
"self",
".",
"workspace_dir",
"=",
"workspace_dir",
"DocumentModel",
".",
"DocumentModel",
".",
"computation_min_period",
"=",
"0.1",
"document_model",
"=",
"DocumentModel",
".",
"DocumentModel",
"(",
"profile",
"=",
"profile",
")",
"document_model",
".",
"create_default_data_groups",
"(",
")",
"document_model",
".",
"start_dispatcher",
"(",
")",
"# parse the hardware aliases file",
"alias_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"workspace_dir",
",",
"\"aliases.ini\"",
")",
"HardwareSource",
".",
"parse_hardware_aliases_config_file",
"(",
"alias_path",
")",
"# create the document controller",
"document_controller",
"=",
"self",
".",
"create_document_controller",
"(",
"document_model",
",",
"\"library\"",
")",
"if",
"self",
".",
"__resources_path",
"is",
"not",
"None",
":",
"document_model",
".",
"create_sample_images",
"(",
"self",
".",
"__resources_path",
")",
"workspace_history",
"=",
"self",
".",
"ui",
".",
"get_persistent_object",
"(",
"\"workspace_history\"",
",",
"list",
"(",
")",
")",
"if",
"workspace_dir",
"in",
"workspace_history",
":",
"workspace_history",
".",
"remove",
"(",
"workspace_dir",
")",
"workspace_history",
".",
"insert",
"(",
"0",
",",
"workspace_dir",
")",
"self",
".",
"ui",
".",
"set_persistent_object",
"(",
"\"workspace_history\"",
",",
"workspace_history",
")",
"self",
".",
"ui",
".",
"set_persistent_string",
"(",
"\"workspace_location\"",
",",
"workspace_dir",
")",
"if",
"welcome_message_enabled",
":",
"logging",
".",
"info",
"(",
"\"Welcome to Nion Swift.\"",
")",
"if",
"is_created",
"and",
"len",
"(",
"document_model",
".",
"display_items",
")",
">",
"0",
":",
"document_controller",
".",
"selected_display_panel",
".",
"set_display_panel_display_item",
"(",
"document_model",
".",
"display_items",
"[",
"0",
"]",
")",
"document_controller",
".",
"selected_display_panel",
".",
"perform_action",
"(",
"\"set_fill_mode\"",
")",
"return",
"True"
] |
Start the application.
Looks for workspace_location persistent string. If it doesn't find it, uses a default
workspace location.
Then checks to see if that workspace exists. If not and if skip_choose has not been
set to True, asks the user for a workspace location. User may choose new folder or
existing location. This works by putting up the dialog which will either call start
again or exit.
Creates workspace in location if it doesn't exist.
Migrates database to latest version.
Creates document model, resources path, etc.
|
[
"Start",
"the",
"application",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/Application.py#L176-L229
|
train
|
nion-software/nionswift
|
nion/swift/model/DataItem.py
|
sort_by_date_key
|
def sort_by_date_key(data_item):
""" A sort key to for the created field of a data item. The sort by uuid makes it determinate. """
return data_item.title + str(data_item.uuid) if data_item.is_live else str(), data_item.date_for_sorting, str(data_item.uuid)
|
python
|
def sort_by_date_key(data_item):
""" A sort key to for the created field of a data item. The sort by uuid makes it determinate. """
return data_item.title + str(data_item.uuid) if data_item.is_live else str(), data_item.date_for_sorting, str(data_item.uuid)
|
[
"def",
"sort_by_date_key",
"(",
"data_item",
")",
":",
"return",
"data_item",
".",
"title",
"+",
"str",
"(",
"data_item",
".",
"uuid",
")",
"if",
"data_item",
".",
"is_live",
"else",
"str",
"(",
")",
",",
"data_item",
".",
"date_for_sorting",
",",
"str",
"(",
"data_item",
".",
"uuid",
")"
] |
A sort key to for the created field of a data item. The sort by uuid makes it determinate.
|
[
"A",
"sort",
"key",
"to",
"for",
"the",
"created",
"field",
"of",
"a",
"data",
"item",
".",
"The",
"sort",
"by",
"uuid",
"makes",
"it",
"determinate",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/DataItem.py#L1148-L1150
|
train
|
nion-software/nionswift
|
nion/swift/model/DataItem.py
|
DataItem.snapshot
|
def snapshot(self):
"""Return a new library item which is a copy of this one with any dynamic behavior made static."""
data_item = self.__class__()
# data format (temporary until moved to buffered data source)
data_item.large_format = self.large_format
data_item.set_data_and_metadata(copy.deepcopy(self.data_and_metadata), self.data_modified)
# metadata
data_item.created = self.created
data_item.timezone = self.timezone
data_item.timezone_offset = self.timezone_offset
data_item.metadata = self.metadata
data_item.title = self.title
data_item.caption = self.caption
data_item.description = self.description
data_item.session_id = self.session_id
data_item.session_data = copy.deepcopy(self.session_data)
return data_item
|
python
|
def snapshot(self):
"""Return a new library item which is a copy of this one with any dynamic behavior made static."""
data_item = self.__class__()
# data format (temporary until moved to buffered data source)
data_item.large_format = self.large_format
data_item.set_data_and_metadata(copy.deepcopy(self.data_and_metadata), self.data_modified)
# metadata
data_item.created = self.created
data_item.timezone = self.timezone
data_item.timezone_offset = self.timezone_offset
data_item.metadata = self.metadata
data_item.title = self.title
data_item.caption = self.caption
data_item.description = self.description
data_item.session_id = self.session_id
data_item.session_data = copy.deepcopy(self.session_data)
return data_item
|
[
"def",
"snapshot",
"(",
"self",
")",
":",
"data_item",
"=",
"self",
".",
"__class__",
"(",
")",
"# data format (temporary until moved to buffered data source)",
"data_item",
".",
"large_format",
"=",
"self",
".",
"large_format",
"data_item",
".",
"set_data_and_metadata",
"(",
"copy",
".",
"deepcopy",
"(",
"self",
".",
"data_and_metadata",
")",
",",
"self",
".",
"data_modified",
")",
"# metadata",
"data_item",
".",
"created",
"=",
"self",
".",
"created",
"data_item",
".",
"timezone",
"=",
"self",
".",
"timezone",
"data_item",
".",
"timezone_offset",
"=",
"self",
".",
"timezone_offset",
"data_item",
".",
"metadata",
"=",
"self",
".",
"metadata",
"data_item",
".",
"title",
"=",
"self",
".",
"title",
"data_item",
".",
"caption",
"=",
"self",
".",
"caption",
"data_item",
".",
"description",
"=",
"self",
".",
"description",
"data_item",
".",
"session_id",
"=",
"self",
".",
"session_id",
"data_item",
".",
"session_data",
"=",
"copy",
".",
"deepcopy",
"(",
"self",
".",
"session_data",
")",
"return",
"data_item"
] |
Return a new library item which is a copy of this one with any dynamic behavior made static.
|
[
"Return",
"a",
"new",
"library",
"item",
"which",
"is",
"a",
"copy",
"of",
"this",
"one",
"with",
"any",
"dynamic",
"behavior",
"made",
"static",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/DataItem.py#L331-L347
|
train
|
nion-software/nionswift
|
nion/swift/model/DataItem.py
|
DataItem.set_r_value
|
def set_r_value(self, r_var: str, *, notify_changed=True) -> None:
"""Used to signal changes to the ref var, which are kept in document controller. ugh."""
self.r_var = r_var
self._description_changed()
if notify_changed: # set to False to set the r-value at startup; avoid marking it as a change
self.__notify_description_changed()
|
python
|
def set_r_value(self, r_var: str, *, notify_changed=True) -> None:
"""Used to signal changes to the ref var, which are kept in document controller. ugh."""
self.r_var = r_var
self._description_changed()
if notify_changed: # set to False to set the r-value at startup; avoid marking it as a change
self.__notify_description_changed()
|
[
"def",
"set_r_value",
"(",
"self",
",",
"r_var",
":",
"str",
",",
"*",
",",
"notify_changed",
"=",
"True",
")",
"->",
"None",
":",
"self",
".",
"r_var",
"=",
"r_var",
"self",
".",
"_description_changed",
"(",
")",
"if",
"notify_changed",
":",
"# set to False to set the r-value at startup; avoid marking it as a change",
"self",
".",
"__notify_description_changed",
"(",
")"
] |
Used to signal changes to the ref var, which are kept in document controller. ugh.
|
[
"Used",
"to",
"signal",
"changes",
"to",
"the",
"ref",
"var",
"which",
"are",
"kept",
"in",
"document",
"controller",
".",
"ugh",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/DataItem.py#L611-L616
|
train
|
nion-software/nionswift
|
nion/swift/model/DataItem.py
|
DataItem.set_data_and_metadata
|
def set_data_and_metadata(self, data_and_metadata, data_modified=None):
"""Sets the underlying data and data-metadata to the data_and_metadata.
Note: this does not make a copy of the data.
"""
self.increment_data_ref_count()
try:
if data_and_metadata:
data = data_and_metadata.data
data_shape_and_dtype = data_and_metadata.data_shape_and_dtype
intensity_calibration = data_and_metadata.intensity_calibration
dimensional_calibrations = data_and_metadata.dimensional_calibrations
metadata = data_and_metadata.metadata
timestamp = data_and_metadata.timestamp
data_descriptor = data_and_metadata.data_descriptor
timezone = data_and_metadata.timezone or Utility.get_local_timezone()
timezone_offset = data_and_metadata.timezone_offset or Utility.TimezoneMinutesToStringConverter().convert(Utility.local_utcoffset_minutes())
new_data_and_metadata = DataAndMetadata.DataAndMetadata(self.__load_data, data_shape_and_dtype, intensity_calibration, dimensional_calibrations, metadata, timestamp, data, data_descriptor, timezone, timezone_offset)
else:
new_data_and_metadata = None
self.__set_data_metadata_direct(new_data_and_metadata, data_modified)
if self.__data_and_metadata is not None:
if self.persistent_object_context and not self.persistent_object_context.is_write_delayed(self):
self.persistent_object_context.write_external_data(self, "data", self.__data_and_metadata.data)
self.__data_and_metadata.unloadable = True
finally:
self.decrement_data_ref_count()
|
python
|
def set_data_and_metadata(self, data_and_metadata, data_modified=None):
"""Sets the underlying data and data-metadata to the data_and_metadata.
Note: this does not make a copy of the data.
"""
self.increment_data_ref_count()
try:
if data_and_metadata:
data = data_and_metadata.data
data_shape_and_dtype = data_and_metadata.data_shape_and_dtype
intensity_calibration = data_and_metadata.intensity_calibration
dimensional_calibrations = data_and_metadata.dimensional_calibrations
metadata = data_and_metadata.metadata
timestamp = data_and_metadata.timestamp
data_descriptor = data_and_metadata.data_descriptor
timezone = data_and_metadata.timezone or Utility.get_local_timezone()
timezone_offset = data_and_metadata.timezone_offset or Utility.TimezoneMinutesToStringConverter().convert(Utility.local_utcoffset_minutes())
new_data_and_metadata = DataAndMetadata.DataAndMetadata(self.__load_data, data_shape_and_dtype, intensity_calibration, dimensional_calibrations, metadata, timestamp, data, data_descriptor, timezone, timezone_offset)
else:
new_data_and_metadata = None
self.__set_data_metadata_direct(new_data_and_metadata, data_modified)
if self.__data_and_metadata is not None:
if self.persistent_object_context and not self.persistent_object_context.is_write_delayed(self):
self.persistent_object_context.write_external_data(self, "data", self.__data_and_metadata.data)
self.__data_and_metadata.unloadable = True
finally:
self.decrement_data_ref_count()
|
[
"def",
"set_data_and_metadata",
"(",
"self",
",",
"data_and_metadata",
",",
"data_modified",
"=",
"None",
")",
":",
"self",
".",
"increment_data_ref_count",
"(",
")",
"try",
":",
"if",
"data_and_metadata",
":",
"data",
"=",
"data_and_metadata",
".",
"data",
"data_shape_and_dtype",
"=",
"data_and_metadata",
".",
"data_shape_and_dtype",
"intensity_calibration",
"=",
"data_and_metadata",
".",
"intensity_calibration",
"dimensional_calibrations",
"=",
"data_and_metadata",
".",
"dimensional_calibrations",
"metadata",
"=",
"data_and_metadata",
".",
"metadata",
"timestamp",
"=",
"data_and_metadata",
".",
"timestamp",
"data_descriptor",
"=",
"data_and_metadata",
".",
"data_descriptor",
"timezone",
"=",
"data_and_metadata",
".",
"timezone",
"or",
"Utility",
".",
"get_local_timezone",
"(",
")",
"timezone_offset",
"=",
"data_and_metadata",
".",
"timezone_offset",
"or",
"Utility",
".",
"TimezoneMinutesToStringConverter",
"(",
")",
".",
"convert",
"(",
"Utility",
".",
"local_utcoffset_minutes",
"(",
")",
")",
"new_data_and_metadata",
"=",
"DataAndMetadata",
".",
"DataAndMetadata",
"(",
"self",
".",
"__load_data",
",",
"data_shape_and_dtype",
",",
"intensity_calibration",
",",
"dimensional_calibrations",
",",
"metadata",
",",
"timestamp",
",",
"data",
",",
"data_descriptor",
",",
"timezone",
",",
"timezone_offset",
")",
"else",
":",
"new_data_and_metadata",
"=",
"None",
"self",
".",
"__set_data_metadata_direct",
"(",
"new_data_and_metadata",
",",
"data_modified",
")",
"if",
"self",
".",
"__data_and_metadata",
"is",
"not",
"None",
":",
"if",
"self",
".",
"persistent_object_context",
"and",
"not",
"self",
".",
"persistent_object_context",
".",
"is_write_delayed",
"(",
"self",
")",
":",
"self",
".",
"persistent_object_context",
".",
"write_external_data",
"(",
"self",
",",
"\"data\"",
",",
"self",
".",
"__data_and_metadata",
".",
"data",
")",
"self",
".",
"__data_and_metadata",
".",
"unloadable",
"=",
"True",
"finally",
":",
"self",
".",
"decrement_data_ref_count",
"(",
")"
] |
Sets the underlying data and data-metadata to the data_and_metadata.
Note: this does not make a copy of the data.
|
[
"Sets",
"the",
"underlying",
"data",
"and",
"data",
"-",
"metadata",
"to",
"the",
"data_and_metadata",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/DataItem.py#L1005-L1031
|
train
|
nion-software/nionswift
|
nion/swift/model/DisplayItem.py
|
DisplayDataChannel.color_map_data
|
def color_map_data(self) -> typing.Optional[numpy.ndarray]:
"""Return the color map data as a uint8 ndarray with shape (256, 3)."""
if self.display_data_shape is None: # is there display data?
return None
else:
return self.__color_map_data if self.__color_map_data is not None else ColorMaps.get_color_map_data_by_id("grayscale")
|
python
|
def color_map_data(self) -> typing.Optional[numpy.ndarray]:
"""Return the color map data as a uint8 ndarray with shape (256, 3)."""
if self.display_data_shape is None: # is there display data?
return None
else:
return self.__color_map_data if self.__color_map_data is not None else ColorMaps.get_color_map_data_by_id("grayscale")
|
[
"def",
"color_map_data",
"(",
"self",
")",
"->",
"typing",
".",
"Optional",
"[",
"numpy",
".",
"ndarray",
"]",
":",
"if",
"self",
".",
"display_data_shape",
"is",
"None",
":",
"# is there display data?",
"return",
"None",
"else",
":",
"return",
"self",
".",
"__color_map_data",
"if",
"self",
".",
"__color_map_data",
"is",
"not",
"None",
"else",
"ColorMaps",
".",
"get_color_map_data_by_id",
"(",
"\"grayscale\"",
")"
] |
Return the color map data as a uint8 ndarray with shape (256, 3).
|
[
"Return",
"the",
"color",
"map",
"data",
"as",
"a",
"uint8",
"ndarray",
"with",
"shape",
"(",
"256",
"3",
")",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/DisplayItem.py#L670-L675
|
train
|
nion-software/nionswift
|
nion/swift/model/DisplayItem.py
|
DisplayDataChannel.get_calculated_display_values
|
def get_calculated_display_values(self, immediate: bool=False) -> DisplayValues:
"""Return the display values.
Return the current (possibly uncalculated) display values unless 'immediate' is specified.
If 'immediate', return the existing (calculated) values if they exist. Using the 'immediate' values
avoids calculation except in cases where the display values haven't already been calculated.
"""
if not immediate or not self.__is_master or not self.__last_display_values:
if not self.__current_display_values and self.__data_item:
self.__current_display_values = DisplayValues(self.__data_item.xdata, self.sequence_index, self.collection_index, self.slice_center, self.slice_width, self.display_limits, self.complex_display_type, self.__color_map_data)
def finalize(display_values):
self.__last_display_values = display_values
self.display_values_changed_event.fire()
self.__current_display_values.on_finalize = finalize
return self.__current_display_values
return self.__last_display_values
|
python
|
def get_calculated_display_values(self, immediate: bool=False) -> DisplayValues:
"""Return the display values.
Return the current (possibly uncalculated) display values unless 'immediate' is specified.
If 'immediate', return the existing (calculated) values if they exist. Using the 'immediate' values
avoids calculation except in cases where the display values haven't already been calculated.
"""
if not immediate or not self.__is_master or not self.__last_display_values:
if not self.__current_display_values and self.__data_item:
self.__current_display_values = DisplayValues(self.__data_item.xdata, self.sequence_index, self.collection_index, self.slice_center, self.slice_width, self.display_limits, self.complex_display_type, self.__color_map_data)
def finalize(display_values):
self.__last_display_values = display_values
self.display_values_changed_event.fire()
self.__current_display_values.on_finalize = finalize
return self.__current_display_values
return self.__last_display_values
|
[
"def",
"get_calculated_display_values",
"(",
"self",
",",
"immediate",
":",
"bool",
"=",
"False",
")",
"->",
"DisplayValues",
":",
"if",
"not",
"immediate",
"or",
"not",
"self",
".",
"__is_master",
"or",
"not",
"self",
".",
"__last_display_values",
":",
"if",
"not",
"self",
".",
"__current_display_values",
"and",
"self",
".",
"__data_item",
":",
"self",
".",
"__current_display_values",
"=",
"DisplayValues",
"(",
"self",
".",
"__data_item",
".",
"xdata",
",",
"self",
".",
"sequence_index",
",",
"self",
".",
"collection_index",
",",
"self",
".",
"slice_center",
",",
"self",
".",
"slice_width",
",",
"self",
".",
"display_limits",
",",
"self",
".",
"complex_display_type",
",",
"self",
".",
"__color_map_data",
")",
"def",
"finalize",
"(",
"display_values",
")",
":",
"self",
".",
"__last_display_values",
"=",
"display_values",
"self",
".",
"display_values_changed_event",
".",
"fire",
"(",
")",
"self",
".",
"__current_display_values",
".",
"on_finalize",
"=",
"finalize",
"return",
"self",
".",
"__current_display_values",
"return",
"self",
".",
"__last_display_values"
] |
Return the display values.
Return the current (possibly uncalculated) display values unless 'immediate' is specified.
If 'immediate', return the existing (calculated) values if they exist. Using the 'immediate' values
avoids calculation except in cases where the display values haven't already been calculated.
|
[
"Return",
"the",
"display",
"values",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/DisplayItem.py#L718-L736
|
train
|
nion-software/nionswift
|
nion/swift/model/DisplayItem.py
|
DisplayDataChannel.increment_display_ref_count
|
def increment_display_ref_count(self, amount: int=1):
"""Increment display reference count to indicate this library item is currently displayed."""
display_ref_count = self.__display_ref_count
self.__display_ref_count += amount
if display_ref_count == 0:
self.__is_master = True
if self.__data_item:
for _ in range(amount):
self.__data_item.increment_data_ref_count()
|
python
|
def increment_display_ref_count(self, amount: int=1):
"""Increment display reference count to indicate this library item is currently displayed."""
display_ref_count = self.__display_ref_count
self.__display_ref_count += amount
if display_ref_count == 0:
self.__is_master = True
if self.__data_item:
for _ in range(amount):
self.__data_item.increment_data_ref_count()
|
[
"def",
"increment_display_ref_count",
"(",
"self",
",",
"amount",
":",
"int",
"=",
"1",
")",
":",
"display_ref_count",
"=",
"self",
".",
"__display_ref_count",
"self",
".",
"__display_ref_count",
"+=",
"amount",
"if",
"display_ref_count",
"==",
"0",
":",
"self",
".",
"__is_master",
"=",
"True",
"if",
"self",
".",
"__data_item",
":",
"for",
"_",
"in",
"range",
"(",
"amount",
")",
":",
"self",
".",
"__data_item",
".",
"increment_data_ref_count",
"(",
")"
] |
Increment display reference count to indicate this library item is currently displayed.
|
[
"Increment",
"display",
"reference",
"count",
"to",
"indicate",
"this",
"library",
"item",
"is",
"currently",
"displayed",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/DisplayItem.py#L738-L746
|
train
|
nion-software/nionswift
|
nion/swift/model/DisplayItem.py
|
DisplayDataChannel.decrement_display_ref_count
|
def decrement_display_ref_count(self, amount: int=1):
"""Decrement display reference count to indicate this library item is no longer displayed."""
assert not self._closed
self.__display_ref_count -= amount
if self.__display_ref_count == 0:
self.__is_master = False
if self.__data_item:
for _ in range(amount):
self.__data_item.decrement_data_ref_count()
|
python
|
def decrement_display_ref_count(self, amount: int=1):
"""Decrement display reference count to indicate this library item is no longer displayed."""
assert not self._closed
self.__display_ref_count -= amount
if self.__display_ref_count == 0:
self.__is_master = False
if self.__data_item:
for _ in range(amount):
self.__data_item.decrement_data_ref_count()
|
[
"def",
"decrement_display_ref_count",
"(",
"self",
",",
"amount",
":",
"int",
"=",
"1",
")",
":",
"assert",
"not",
"self",
".",
"_closed",
"self",
".",
"__display_ref_count",
"-=",
"amount",
"if",
"self",
".",
"__display_ref_count",
"==",
"0",
":",
"self",
".",
"__is_master",
"=",
"False",
"if",
"self",
".",
"__data_item",
":",
"for",
"_",
"in",
"range",
"(",
"amount",
")",
":",
"self",
".",
"__data_item",
".",
"decrement_data_ref_count",
"(",
")"
] |
Decrement display reference count to indicate this library item is no longer displayed.
|
[
"Decrement",
"display",
"reference",
"count",
"to",
"indicate",
"this",
"library",
"item",
"is",
"no",
"longer",
"displayed",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/DisplayItem.py#L748-L756
|
train
|
nion-software/nionswift
|
nion/swift/model/DisplayItem.py
|
DisplayDataChannel.auto_display_limits
|
def auto_display_limits(self):
"""Calculate best display limits and set them."""
display_data_and_metadata = self.get_calculated_display_values(True).display_data_and_metadata
data = display_data_and_metadata.data if display_data_and_metadata else None
if data is not None:
# The old algorithm was a problem during EELS where the signal data
# is a small percentage of the overall data and was falling outside
# the included range. This is the new simplified algorithm. Future
# feature may allow user to select more complex algorithms.
mn, mx = numpy.nanmin(data), numpy.nanmax(data)
self.display_limits = mn, mx
|
python
|
def auto_display_limits(self):
"""Calculate best display limits and set them."""
display_data_and_metadata = self.get_calculated_display_values(True).display_data_and_metadata
data = display_data_and_metadata.data if display_data_and_metadata else None
if data is not None:
# The old algorithm was a problem during EELS where the signal data
# is a small percentage of the overall data and was falling outside
# the included range. This is the new simplified algorithm. Future
# feature may allow user to select more complex algorithms.
mn, mx = numpy.nanmin(data), numpy.nanmax(data)
self.display_limits = mn, mx
|
[
"def",
"auto_display_limits",
"(",
"self",
")",
":",
"display_data_and_metadata",
"=",
"self",
".",
"get_calculated_display_values",
"(",
"True",
")",
".",
"display_data_and_metadata",
"data",
"=",
"display_data_and_metadata",
".",
"data",
"if",
"display_data_and_metadata",
"else",
"None",
"if",
"data",
"is",
"not",
"None",
":",
"# The old algorithm was a problem during EELS where the signal data",
"# is a small percentage of the overall data and was falling outside",
"# the included range. This is the new simplified algorithm. Future",
"# feature may allow user to select more complex algorithms.",
"mn",
",",
"mx",
"=",
"numpy",
".",
"nanmin",
"(",
"data",
")",
",",
"numpy",
".",
"nanmax",
"(",
"data",
")",
"self",
".",
"display_limits",
"=",
"mn",
",",
"mx"
] |
Calculate best display limits and set them.
|
[
"Calculate",
"best",
"display",
"limits",
"and",
"set",
"them",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/DisplayItem.py#L766-L776
|
train
|
nion-software/nionswift
|
nion/swift/model/DisplayItem.py
|
DisplayItem.snapshot
|
def snapshot(self):
"""Return a new library item which is a copy of this one with any dynamic behavior made static."""
display_item = self.__class__()
display_item.display_type = self.display_type
# metadata
display_item._set_persistent_property_value("title", self._get_persistent_property_value("title"))
display_item._set_persistent_property_value("caption", self._get_persistent_property_value("caption"))
display_item._set_persistent_property_value("description", self._get_persistent_property_value("description"))
display_item._set_persistent_property_value("session_id", self._get_persistent_property_value("session_id"))
display_item._set_persistent_property_value("calibration_style_id", self._get_persistent_property_value("calibration_style_id"))
display_item._set_persistent_property_value("display_properties", self._get_persistent_property_value("display_properties"))
display_item.created = self.created
for graphic in self.graphics:
display_item.add_graphic(copy.deepcopy(graphic))
for display_data_channel in self.display_data_channels:
display_item.append_display_data_channel(copy.deepcopy(display_data_channel))
# this goes after the display data channels so that the layers don't get adjusted
display_item._set_persistent_property_value("display_layers", self._get_persistent_property_value("display_layers"))
return display_item
|
python
|
def snapshot(self):
"""Return a new library item which is a copy of this one with any dynamic behavior made static."""
display_item = self.__class__()
display_item.display_type = self.display_type
# metadata
display_item._set_persistent_property_value("title", self._get_persistent_property_value("title"))
display_item._set_persistent_property_value("caption", self._get_persistent_property_value("caption"))
display_item._set_persistent_property_value("description", self._get_persistent_property_value("description"))
display_item._set_persistent_property_value("session_id", self._get_persistent_property_value("session_id"))
display_item._set_persistent_property_value("calibration_style_id", self._get_persistent_property_value("calibration_style_id"))
display_item._set_persistent_property_value("display_properties", self._get_persistent_property_value("display_properties"))
display_item.created = self.created
for graphic in self.graphics:
display_item.add_graphic(copy.deepcopy(graphic))
for display_data_channel in self.display_data_channels:
display_item.append_display_data_channel(copy.deepcopy(display_data_channel))
# this goes after the display data channels so that the layers don't get adjusted
display_item._set_persistent_property_value("display_layers", self._get_persistent_property_value("display_layers"))
return display_item
|
[
"def",
"snapshot",
"(",
"self",
")",
":",
"display_item",
"=",
"self",
".",
"__class__",
"(",
")",
"display_item",
".",
"display_type",
"=",
"self",
".",
"display_type",
"# metadata",
"display_item",
".",
"_set_persistent_property_value",
"(",
"\"title\"",
",",
"self",
".",
"_get_persistent_property_value",
"(",
"\"title\"",
")",
")",
"display_item",
".",
"_set_persistent_property_value",
"(",
"\"caption\"",
",",
"self",
".",
"_get_persistent_property_value",
"(",
"\"caption\"",
")",
")",
"display_item",
".",
"_set_persistent_property_value",
"(",
"\"description\"",
",",
"self",
".",
"_get_persistent_property_value",
"(",
"\"description\"",
")",
")",
"display_item",
".",
"_set_persistent_property_value",
"(",
"\"session_id\"",
",",
"self",
".",
"_get_persistent_property_value",
"(",
"\"session_id\"",
")",
")",
"display_item",
".",
"_set_persistent_property_value",
"(",
"\"calibration_style_id\"",
",",
"self",
".",
"_get_persistent_property_value",
"(",
"\"calibration_style_id\"",
")",
")",
"display_item",
".",
"_set_persistent_property_value",
"(",
"\"display_properties\"",
",",
"self",
".",
"_get_persistent_property_value",
"(",
"\"display_properties\"",
")",
")",
"display_item",
".",
"created",
"=",
"self",
".",
"created",
"for",
"graphic",
"in",
"self",
".",
"graphics",
":",
"display_item",
".",
"add_graphic",
"(",
"copy",
".",
"deepcopy",
"(",
"graphic",
")",
")",
"for",
"display_data_channel",
"in",
"self",
".",
"display_data_channels",
":",
"display_item",
".",
"append_display_data_channel",
"(",
"copy",
".",
"deepcopy",
"(",
"display_data_channel",
")",
")",
"# this goes after the display data channels so that the layers don't get adjusted",
"display_item",
".",
"_set_persistent_property_value",
"(",
"\"display_layers\"",
",",
"self",
".",
"_get_persistent_property_value",
"(",
"\"display_layers\"",
")",
")",
"return",
"display_item"
] |
Return a new library item which is a copy of this one with any dynamic behavior made static.
|
[
"Return",
"a",
"new",
"library",
"item",
"which",
"is",
"a",
"copy",
"of",
"this",
"one",
"with",
"any",
"dynamic",
"behavior",
"made",
"static",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/DisplayItem.py#L980-L998
|
train
|
nion-software/nionswift
|
nion/swift/model/DisplayItem.py
|
DisplayItem.increment_display_ref_count
|
def increment_display_ref_count(self, amount: int=1):
"""Increment display reference count to indicate this library item is currently displayed."""
display_ref_count = self.__display_ref_count
self.__display_ref_count += amount
for display_data_channel in self.display_data_channels:
display_data_channel.increment_display_ref_count(amount)
|
python
|
def increment_display_ref_count(self, amount: int=1):
"""Increment display reference count to indicate this library item is currently displayed."""
display_ref_count = self.__display_ref_count
self.__display_ref_count += amount
for display_data_channel in self.display_data_channels:
display_data_channel.increment_display_ref_count(amount)
|
[
"def",
"increment_display_ref_count",
"(",
"self",
",",
"amount",
":",
"int",
"=",
"1",
")",
":",
"display_ref_count",
"=",
"self",
".",
"__display_ref_count",
"self",
".",
"__display_ref_count",
"+=",
"amount",
"for",
"display_data_channel",
"in",
"self",
".",
"display_data_channels",
":",
"display_data_channel",
".",
"increment_display_ref_count",
"(",
"amount",
")"
] |
Increment display reference count to indicate this library item is currently displayed.
|
[
"Increment",
"display",
"reference",
"count",
"to",
"indicate",
"this",
"library",
"item",
"is",
"currently",
"displayed",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/DisplayItem.py#L1167-L1172
|
train
|
nion-software/nionswift
|
nion/swift/model/DisplayItem.py
|
DisplayItem.decrement_display_ref_count
|
def decrement_display_ref_count(self, amount: int=1):
"""Decrement display reference count to indicate this library item is no longer displayed."""
assert not self._closed
self.__display_ref_count -= amount
for display_data_channel in self.display_data_channels:
display_data_channel.decrement_display_ref_count(amount)
|
python
|
def decrement_display_ref_count(self, amount: int=1):
"""Decrement display reference count to indicate this library item is no longer displayed."""
assert not self._closed
self.__display_ref_count -= amount
for display_data_channel in self.display_data_channels:
display_data_channel.decrement_display_ref_count(amount)
|
[
"def",
"decrement_display_ref_count",
"(",
"self",
",",
"amount",
":",
"int",
"=",
"1",
")",
":",
"assert",
"not",
"self",
".",
"_closed",
"self",
".",
"__display_ref_count",
"-=",
"amount",
"for",
"display_data_channel",
"in",
"self",
".",
"display_data_channels",
":",
"display_data_channel",
".",
"decrement_display_ref_count",
"(",
"amount",
")"
] |
Decrement display reference count to indicate this library item is no longer displayed.
|
[
"Decrement",
"display",
"reference",
"count",
"to",
"indicate",
"this",
"library",
"item",
"is",
"no",
"longer",
"displayed",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/DisplayItem.py#L1174-L1179
|
train
|
nion-software/nionswift
|
nion/swift/model/DisplayItem.py
|
DisplayItem.remove_graphic
|
def remove_graphic(self, graphic: Graphics.Graphic, *, safe: bool=False) -> typing.Optional[typing.Sequence]:
"""Remove a graphic, but do it through the container, so dependencies can be tracked."""
return self.remove_model_item(self, "graphics", graphic, safe=safe)
|
python
|
def remove_graphic(self, graphic: Graphics.Graphic, *, safe: bool=False) -> typing.Optional[typing.Sequence]:
"""Remove a graphic, but do it through the container, so dependencies can be tracked."""
return self.remove_model_item(self, "graphics", graphic, safe=safe)
|
[
"def",
"remove_graphic",
"(",
"self",
",",
"graphic",
":",
"Graphics",
".",
"Graphic",
",",
"*",
",",
"safe",
":",
"bool",
"=",
"False",
")",
"->",
"typing",
".",
"Optional",
"[",
"typing",
".",
"Sequence",
"]",
":",
"return",
"self",
".",
"remove_model_item",
"(",
"self",
",",
"\"graphics\"",
",",
"graphic",
",",
"safe",
"=",
"safe",
")"
] |
Remove a graphic, but do it through the container, so dependencies can be tracked.
|
[
"Remove",
"a",
"graphic",
"but",
"do",
"it",
"through",
"the",
"container",
"so",
"dependencies",
"can",
"be",
"tracked",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/DisplayItem.py#L1446-L1448
|
train
|
nion-software/nionswift
|
nion/swift/model/DisplayItem.py
|
DisplayItem.dimensional_shape
|
def dimensional_shape(self) -> typing.Optional[typing.Tuple[int, ...]]:
"""Shape of the underlying data, if only one."""
if not self.__data_and_metadata:
return None
return self.__data_and_metadata.dimensional_shape
|
python
|
def dimensional_shape(self) -> typing.Optional[typing.Tuple[int, ...]]:
"""Shape of the underlying data, if only one."""
if not self.__data_and_metadata:
return None
return self.__data_and_metadata.dimensional_shape
|
[
"def",
"dimensional_shape",
"(",
"self",
")",
"->",
"typing",
".",
"Optional",
"[",
"typing",
".",
"Tuple",
"[",
"int",
",",
"...",
"]",
"]",
":",
"if",
"not",
"self",
".",
"__data_and_metadata",
":",
"return",
"None",
"return",
"self",
".",
"__data_and_metadata",
".",
"dimensional_shape"
] |
Shape of the underlying data, if only one.
|
[
"Shape",
"of",
"the",
"underlying",
"data",
"if",
"only",
"one",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/DisplayItem.py#L1482-L1486
|
train
|
nion-software/nionswift
|
nion/swift/model/DisplayItem.py
|
DisplayItem.view_to_intervals
|
def view_to_intervals(self, data_and_metadata: DataAndMetadata.DataAndMetadata, intervals: typing.List[typing.Tuple[float, float]]) -> None:
"""Change the view to encompass the channels and data represented by the given intervals."""
left = None
right = None
for interval in intervals:
left = min(left, interval[0]) if left is not None else interval[0]
right = max(right, interval[1]) if right is not None else interval[1]
left = left if left is not None else 0.0
right = right if right is not None else 1.0
extra = (right - left) * 0.5
left_channel = int(max(0.0, left - extra) * data_and_metadata.data_shape[-1])
right_channel = int(min(1.0, right + extra) * data_and_metadata.data_shape[-1])
self.set_display_property("left_channel", left_channel)
self.set_display_property("right_channel", right_channel)
data_min = numpy.amin(data_and_metadata.data[..., left_channel:right_channel])
data_max = numpy.amax(data_and_metadata.data[..., left_channel:right_channel])
if data_min > 0 and data_max > 0:
self.set_display_property("y_min", 0.0)
self.set_display_property("y_max", data_max * 1.2)
elif data_min < 0 and data_max < 0:
self.set_display_property("y_min", data_min * 1.2)
self.set_display_property("y_max", 0.0)
else:
self.set_display_property("y_min", data_min * 1.2)
self.set_display_property("y_max", data_max * 1.2)
|
python
|
def view_to_intervals(self, data_and_metadata: DataAndMetadata.DataAndMetadata, intervals: typing.List[typing.Tuple[float, float]]) -> None:
"""Change the view to encompass the channels and data represented by the given intervals."""
left = None
right = None
for interval in intervals:
left = min(left, interval[0]) if left is not None else interval[0]
right = max(right, interval[1]) if right is not None else interval[1]
left = left if left is not None else 0.0
right = right if right is not None else 1.0
extra = (right - left) * 0.5
left_channel = int(max(0.0, left - extra) * data_and_metadata.data_shape[-1])
right_channel = int(min(1.0, right + extra) * data_and_metadata.data_shape[-1])
self.set_display_property("left_channel", left_channel)
self.set_display_property("right_channel", right_channel)
data_min = numpy.amin(data_and_metadata.data[..., left_channel:right_channel])
data_max = numpy.amax(data_and_metadata.data[..., left_channel:right_channel])
if data_min > 0 and data_max > 0:
self.set_display_property("y_min", 0.0)
self.set_display_property("y_max", data_max * 1.2)
elif data_min < 0 and data_max < 0:
self.set_display_property("y_min", data_min * 1.2)
self.set_display_property("y_max", 0.0)
else:
self.set_display_property("y_min", data_min * 1.2)
self.set_display_property("y_max", data_max * 1.2)
|
[
"def",
"view_to_intervals",
"(",
"self",
",",
"data_and_metadata",
":",
"DataAndMetadata",
".",
"DataAndMetadata",
",",
"intervals",
":",
"typing",
".",
"List",
"[",
"typing",
".",
"Tuple",
"[",
"float",
",",
"float",
"]",
"]",
")",
"->",
"None",
":",
"left",
"=",
"None",
"right",
"=",
"None",
"for",
"interval",
"in",
"intervals",
":",
"left",
"=",
"min",
"(",
"left",
",",
"interval",
"[",
"0",
"]",
")",
"if",
"left",
"is",
"not",
"None",
"else",
"interval",
"[",
"0",
"]",
"right",
"=",
"max",
"(",
"right",
",",
"interval",
"[",
"1",
"]",
")",
"if",
"right",
"is",
"not",
"None",
"else",
"interval",
"[",
"1",
"]",
"left",
"=",
"left",
"if",
"left",
"is",
"not",
"None",
"else",
"0.0",
"right",
"=",
"right",
"if",
"right",
"is",
"not",
"None",
"else",
"1.0",
"extra",
"=",
"(",
"right",
"-",
"left",
")",
"*",
"0.5",
"left_channel",
"=",
"int",
"(",
"max",
"(",
"0.0",
",",
"left",
"-",
"extra",
")",
"*",
"data_and_metadata",
".",
"data_shape",
"[",
"-",
"1",
"]",
")",
"right_channel",
"=",
"int",
"(",
"min",
"(",
"1.0",
",",
"right",
"+",
"extra",
")",
"*",
"data_and_metadata",
".",
"data_shape",
"[",
"-",
"1",
"]",
")",
"self",
".",
"set_display_property",
"(",
"\"left_channel\"",
",",
"left_channel",
")",
"self",
".",
"set_display_property",
"(",
"\"right_channel\"",
",",
"right_channel",
")",
"data_min",
"=",
"numpy",
".",
"amin",
"(",
"data_and_metadata",
".",
"data",
"[",
"...",
",",
"left_channel",
":",
"right_channel",
"]",
")",
"data_max",
"=",
"numpy",
".",
"amax",
"(",
"data_and_metadata",
".",
"data",
"[",
"...",
",",
"left_channel",
":",
"right_channel",
"]",
")",
"if",
"data_min",
">",
"0",
"and",
"data_max",
">",
"0",
":",
"self",
".",
"set_display_property",
"(",
"\"y_min\"",
",",
"0.0",
")",
"self",
".",
"set_display_property",
"(",
"\"y_max\"",
",",
"data_max",
"*",
"1.2",
")",
"elif",
"data_min",
"<",
"0",
"and",
"data_max",
"<",
"0",
":",
"self",
".",
"set_display_property",
"(",
"\"y_min\"",
",",
"data_min",
"*",
"1.2",
")",
"self",
".",
"set_display_property",
"(",
"\"y_max\"",
",",
"0.0",
")",
"else",
":",
"self",
".",
"set_display_property",
"(",
"\"y_min\"",
",",
"data_min",
"*",
"1.2",
")",
"self",
".",
"set_display_property",
"(",
"\"y_max\"",
",",
"data_max",
"*",
"1.2",
")"
] |
Change the view to encompass the channels and data represented by the given intervals.
|
[
"Change",
"the",
"view",
"to",
"encompass",
"the",
"channels",
"and",
"data",
"represented",
"by",
"the",
"given",
"intervals",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/DisplayItem.py#L1587-L1611
|
train
|
nion-software/nionswift
|
nion/swift/model/NDataHandler.py
|
write_local_file
|
def write_local_file(fp, name_bytes, writer, dt):
"""
Writes a zip file local file header structure at the current file position.
Returns data_len, crc32 for the data.
:param fp: the file point to which to write the header
:param name: the name of the file
:param writer: a function taking an fp parameter to do the writing, returns crc32
:param dt: the datetime to write to the archive
"""
fp.write(struct.pack('I', 0x04034b50)) # local file header
fp.write(struct.pack('H', 10)) # extract version (default)
fp.write(struct.pack('H', 0)) # general purpose bits
fp.write(struct.pack('H', 0)) # compression method
msdos_date = int(dt.year - 1980) << 9 | int(dt.month) << 5 | int(dt.day)
msdos_time = int(dt.hour) << 11 | int(dt.minute) << 5 | int(dt.second)
fp.write(struct.pack('H', msdos_time)) # extract version (default)
fp.write(struct.pack('H', msdos_date)) # extract version (default)
crc32_pos = fp.tell()
fp.write(struct.pack('I', 0)) # crc32 placeholder
data_len_pos = fp.tell()
fp.write(struct.pack('I', 0)) # compressed length placeholder
fp.write(struct.pack('I', 0)) # uncompressed length placeholder
fp.write(struct.pack('H', len(name_bytes))) # name length
fp.write(struct.pack('H', 0)) # extra length
fp.write(name_bytes)
data_start_pos = fp.tell()
crc32 = writer(fp)
data_end_pos = fp.tell()
data_len = data_end_pos - data_start_pos
fp.seek(crc32_pos)
fp.write(struct.pack('I', crc32)) # crc32
fp.seek(data_len_pos)
fp.write(struct.pack('I', data_len)) # compressed length placeholder
fp.write(struct.pack('I', data_len)) # uncompressed length placeholder
fp.seek(data_end_pos)
return data_len, crc32
|
python
|
def write_local_file(fp, name_bytes, writer, dt):
"""
Writes a zip file local file header structure at the current file position.
Returns data_len, crc32 for the data.
:param fp: the file point to which to write the header
:param name: the name of the file
:param writer: a function taking an fp parameter to do the writing, returns crc32
:param dt: the datetime to write to the archive
"""
fp.write(struct.pack('I', 0x04034b50)) # local file header
fp.write(struct.pack('H', 10)) # extract version (default)
fp.write(struct.pack('H', 0)) # general purpose bits
fp.write(struct.pack('H', 0)) # compression method
msdos_date = int(dt.year - 1980) << 9 | int(dt.month) << 5 | int(dt.day)
msdos_time = int(dt.hour) << 11 | int(dt.minute) << 5 | int(dt.second)
fp.write(struct.pack('H', msdos_time)) # extract version (default)
fp.write(struct.pack('H', msdos_date)) # extract version (default)
crc32_pos = fp.tell()
fp.write(struct.pack('I', 0)) # crc32 placeholder
data_len_pos = fp.tell()
fp.write(struct.pack('I', 0)) # compressed length placeholder
fp.write(struct.pack('I', 0)) # uncompressed length placeholder
fp.write(struct.pack('H', len(name_bytes))) # name length
fp.write(struct.pack('H', 0)) # extra length
fp.write(name_bytes)
data_start_pos = fp.tell()
crc32 = writer(fp)
data_end_pos = fp.tell()
data_len = data_end_pos - data_start_pos
fp.seek(crc32_pos)
fp.write(struct.pack('I', crc32)) # crc32
fp.seek(data_len_pos)
fp.write(struct.pack('I', data_len)) # compressed length placeholder
fp.write(struct.pack('I', data_len)) # uncompressed length placeholder
fp.seek(data_end_pos)
return data_len, crc32
|
[
"def",
"write_local_file",
"(",
"fp",
",",
"name_bytes",
",",
"writer",
",",
"dt",
")",
":",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'I'",
",",
"0x04034b50",
")",
")",
"# local file header",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'H'",
",",
"10",
")",
")",
"# extract version (default)",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'H'",
",",
"0",
")",
")",
"# general purpose bits",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'H'",
",",
"0",
")",
")",
"# compression method",
"msdos_date",
"=",
"int",
"(",
"dt",
".",
"year",
"-",
"1980",
")",
"<<",
"9",
"|",
"int",
"(",
"dt",
".",
"month",
")",
"<<",
"5",
"|",
"int",
"(",
"dt",
".",
"day",
")",
"msdos_time",
"=",
"int",
"(",
"dt",
".",
"hour",
")",
"<<",
"11",
"|",
"int",
"(",
"dt",
".",
"minute",
")",
"<<",
"5",
"|",
"int",
"(",
"dt",
".",
"second",
")",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'H'",
",",
"msdos_time",
")",
")",
"# extract version (default)",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'H'",
",",
"msdos_date",
")",
")",
"# extract version (default)",
"crc32_pos",
"=",
"fp",
".",
"tell",
"(",
")",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'I'",
",",
"0",
")",
")",
"# crc32 placeholder",
"data_len_pos",
"=",
"fp",
".",
"tell",
"(",
")",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'I'",
",",
"0",
")",
")",
"# compressed length placeholder",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'I'",
",",
"0",
")",
")",
"# uncompressed length placeholder",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'H'",
",",
"len",
"(",
"name_bytes",
")",
")",
")",
"# name length",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'H'",
",",
"0",
")",
")",
"# extra length",
"fp",
".",
"write",
"(",
"name_bytes",
")",
"data_start_pos",
"=",
"fp",
".",
"tell",
"(",
")",
"crc32",
"=",
"writer",
"(",
"fp",
")",
"data_end_pos",
"=",
"fp",
".",
"tell",
"(",
")",
"data_len",
"=",
"data_end_pos",
"-",
"data_start_pos",
"fp",
".",
"seek",
"(",
"crc32_pos",
")",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'I'",
",",
"crc32",
")",
")",
"# crc32",
"fp",
".",
"seek",
"(",
"data_len_pos",
")",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'I'",
",",
"data_len",
")",
")",
"# compressed length placeholder",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'I'",
",",
"data_len",
")",
")",
"# uncompressed length placeholder",
"fp",
".",
"seek",
"(",
"data_end_pos",
")",
"return",
"data_len",
",",
"crc32"
] |
Writes a zip file local file header structure at the current file position.
Returns data_len, crc32 for the data.
:param fp: the file point to which to write the header
:param name: the name of the file
:param writer: a function taking an fp parameter to do the writing, returns crc32
:param dt: the datetime to write to the archive
|
[
"Writes",
"a",
"zip",
"file",
"local",
"file",
"header",
"structure",
"at",
"the",
"current",
"file",
"position",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/NDataHandler.py#L40-L77
|
train
|
nion-software/nionswift
|
nion/swift/model/NDataHandler.py
|
write_directory_data
|
def write_directory_data(fp, offset, name_bytes, data_len, crc32, dt):
"""
Write a zip fie directory entry at the current file position
:param fp: the file point to which to write the header
:param offset: the offset of the associated local file header
:param name: the name of the file
:param data_len: the length of data that will be written to the archive
:param crc32: the crc32 of the data to be written
:param dt: the datetime to write to the archive
"""
fp.write(struct.pack('I', 0x02014b50)) # central directory header
fp.write(struct.pack('H', 10)) # made by version (default)
fp.write(struct.pack('H', 10)) # extract version (default)
fp.write(struct.pack('H', 0)) # general purpose bits
fp.write(struct.pack('H', 0)) # compression method
msdos_date = int(dt.year - 1980) << 9 | int(dt.month) << 5 | int(dt.day)
msdos_time = int(dt.hour) << 11 | int(dt.minute) << 5 | int(dt.second)
fp.write(struct.pack('H', msdos_time)) # extract version (default)
fp.write(struct.pack('H', msdos_date)) # extract version (default)
fp.write(struct.pack('I', crc32)) # crc32
fp.write(struct.pack('I', data_len)) # compressed length
fp.write(struct.pack('I', data_len)) # uncompressed length
fp.write(struct.pack('H', len(name_bytes))) # name length
fp.write(struct.pack('H', 0)) # extra length
fp.write(struct.pack('H', 0)) # comments length
fp.write(struct.pack('H', 0)) # disk number
fp.write(struct.pack('H', 0)) # internal file attributes
fp.write(struct.pack('I', 0)) # external file attributes
fp.write(struct.pack('I', offset)) # relative offset of file header
fp.write(name_bytes)
|
python
|
def write_directory_data(fp, offset, name_bytes, data_len, crc32, dt):
"""
Write a zip fie directory entry at the current file position
:param fp: the file point to which to write the header
:param offset: the offset of the associated local file header
:param name: the name of the file
:param data_len: the length of data that will be written to the archive
:param crc32: the crc32 of the data to be written
:param dt: the datetime to write to the archive
"""
fp.write(struct.pack('I', 0x02014b50)) # central directory header
fp.write(struct.pack('H', 10)) # made by version (default)
fp.write(struct.pack('H', 10)) # extract version (default)
fp.write(struct.pack('H', 0)) # general purpose bits
fp.write(struct.pack('H', 0)) # compression method
msdos_date = int(dt.year - 1980) << 9 | int(dt.month) << 5 | int(dt.day)
msdos_time = int(dt.hour) << 11 | int(dt.minute) << 5 | int(dt.second)
fp.write(struct.pack('H', msdos_time)) # extract version (default)
fp.write(struct.pack('H', msdos_date)) # extract version (default)
fp.write(struct.pack('I', crc32)) # crc32
fp.write(struct.pack('I', data_len)) # compressed length
fp.write(struct.pack('I', data_len)) # uncompressed length
fp.write(struct.pack('H', len(name_bytes))) # name length
fp.write(struct.pack('H', 0)) # extra length
fp.write(struct.pack('H', 0)) # comments length
fp.write(struct.pack('H', 0)) # disk number
fp.write(struct.pack('H', 0)) # internal file attributes
fp.write(struct.pack('I', 0)) # external file attributes
fp.write(struct.pack('I', offset)) # relative offset of file header
fp.write(name_bytes)
|
[
"def",
"write_directory_data",
"(",
"fp",
",",
"offset",
",",
"name_bytes",
",",
"data_len",
",",
"crc32",
",",
"dt",
")",
":",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'I'",
",",
"0x02014b50",
")",
")",
"# central directory header",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'H'",
",",
"10",
")",
")",
"# made by version (default)",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'H'",
",",
"10",
")",
")",
"# extract version (default)",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'H'",
",",
"0",
")",
")",
"# general purpose bits",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'H'",
",",
"0",
")",
")",
"# compression method",
"msdos_date",
"=",
"int",
"(",
"dt",
".",
"year",
"-",
"1980",
")",
"<<",
"9",
"|",
"int",
"(",
"dt",
".",
"month",
")",
"<<",
"5",
"|",
"int",
"(",
"dt",
".",
"day",
")",
"msdos_time",
"=",
"int",
"(",
"dt",
".",
"hour",
")",
"<<",
"11",
"|",
"int",
"(",
"dt",
".",
"minute",
")",
"<<",
"5",
"|",
"int",
"(",
"dt",
".",
"second",
")",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'H'",
",",
"msdos_time",
")",
")",
"# extract version (default)",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'H'",
",",
"msdos_date",
")",
")",
"# extract version (default)",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'I'",
",",
"crc32",
")",
")",
"# crc32",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'I'",
",",
"data_len",
")",
")",
"# compressed length",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'I'",
",",
"data_len",
")",
")",
"# uncompressed length",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'H'",
",",
"len",
"(",
"name_bytes",
")",
")",
")",
"# name length",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'H'",
",",
"0",
")",
")",
"# extra length",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'H'",
",",
"0",
")",
")",
"# comments length",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'H'",
",",
"0",
")",
")",
"# disk number",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'H'",
",",
"0",
")",
")",
"# internal file attributes",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'I'",
",",
"0",
")",
")",
"# external file attributes",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'I'",
",",
"offset",
")",
")",
"# relative offset of file header",
"fp",
".",
"write",
"(",
"name_bytes",
")"
] |
Write a zip fie directory entry at the current file position
:param fp: the file point to which to write the header
:param offset: the offset of the associated local file header
:param name: the name of the file
:param data_len: the length of data that will be written to the archive
:param crc32: the crc32 of the data to be written
:param dt: the datetime to write to the archive
|
[
"Write",
"a",
"zip",
"fie",
"directory",
"entry",
"at",
"the",
"current",
"file",
"position"
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/NDataHandler.py#L80-L110
|
train
|
nion-software/nionswift
|
nion/swift/model/NDataHandler.py
|
write_end_of_directory
|
def write_end_of_directory(fp, dir_size, dir_offset, count):
"""
Write zip file end of directory header at the current file position
:param fp: the file point to which to write the header
:param dir_size: the total size of the directory
:param dir_offset: the start of the first directory header
:param count: the count of files
"""
fp.write(struct.pack('I', 0x06054b50)) # central directory header
fp.write(struct.pack('H', 0)) # disk number
fp.write(struct.pack('H', 0)) # disk number
fp.write(struct.pack('H', count)) # number of files
fp.write(struct.pack('H', count)) # number of files
fp.write(struct.pack('I', dir_size)) # central directory size
fp.write(struct.pack('I', dir_offset)) # central directory offset
fp.write(struct.pack('H', 0))
|
python
|
def write_end_of_directory(fp, dir_size, dir_offset, count):
"""
Write zip file end of directory header at the current file position
:param fp: the file point to which to write the header
:param dir_size: the total size of the directory
:param dir_offset: the start of the first directory header
:param count: the count of files
"""
fp.write(struct.pack('I', 0x06054b50)) # central directory header
fp.write(struct.pack('H', 0)) # disk number
fp.write(struct.pack('H', 0)) # disk number
fp.write(struct.pack('H', count)) # number of files
fp.write(struct.pack('H', count)) # number of files
fp.write(struct.pack('I', dir_size)) # central directory size
fp.write(struct.pack('I', dir_offset)) # central directory offset
fp.write(struct.pack('H', 0))
|
[
"def",
"write_end_of_directory",
"(",
"fp",
",",
"dir_size",
",",
"dir_offset",
",",
"count",
")",
":",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'I'",
",",
"0x06054b50",
")",
")",
"# central directory header",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'H'",
",",
"0",
")",
")",
"# disk number",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'H'",
",",
"0",
")",
")",
"# disk number",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'H'",
",",
"count",
")",
")",
"# number of files",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'H'",
",",
"count",
")",
")",
"# number of files",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'I'",
",",
"dir_size",
")",
")",
"# central directory size",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'I'",
",",
"dir_offset",
")",
")",
"# central directory offset",
"fp",
".",
"write",
"(",
"struct",
".",
"pack",
"(",
"'H'",
",",
"0",
")",
")"
] |
Write zip file end of directory header at the current file position
:param fp: the file point to which to write the header
:param dir_size: the total size of the directory
:param dir_offset: the start of the first directory header
:param count: the count of files
|
[
"Write",
"zip",
"file",
"end",
"of",
"directory",
"header",
"at",
"the",
"current",
"file",
"position"
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/NDataHandler.py#L113-L129
|
train
|
nion-software/nionswift
|
nion/swift/model/NDataHandler.py
|
write_zip_fp
|
def write_zip_fp(fp, data, properties, dir_data_list=None):
"""
Write custom zip file of data and properties to fp
:param fp: the file point to which to write the header
:param data: the data to write to the file; may be None
:param properties: the properties to write to the file; may be None
:param dir_data_list: optional list of directory header information structures
If dir_data_list is specified, data should be None and properties should
be specified. Then the existing data structure will be left alone and only
the directory headers and end of directory header will be written.
Otherwise, if both data and properties are specified, both are written
out in full.
The properties param must not change during this method. Callers should
take care to ensure this does not happen.
"""
assert data is not None or properties is not None
# dir_data_list has the format: local file record offset, name, data length, crc32
dir_data_list = list() if dir_data_list is None else dir_data_list
dt = datetime.datetime.now()
if data is not None:
offset_data = fp.tell()
def write_data(fp):
numpy_start_pos = fp.tell()
numpy.save(fp, data)
numpy_end_pos = fp.tell()
fp.seek(numpy_start_pos)
data_c = numpy.require(data, dtype=data.dtype, requirements=["C_CONTIGUOUS"])
header_data = fp.read((numpy_end_pos - numpy_start_pos) - data_c.nbytes) # read the header
data_crc32 = binascii.crc32(data_c.data, binascii.crc32(header_data)) & 0xFFFFFFFF
fp.seek(numpy_end_pos)
return data_crc32
data_len, crc32 = write_local_file(fp, b"data.npy", write_data, dt)
dir_data_list.append((offset_data, b"data.npy", data_len, crc32))
if properties is not None:
json_str = str()
try:
class JSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, Geometry.IntPoint) or isinstance(obj, Geometry.IntSize) or isinstance(obj, Geometry.IntRect) or isinstance(obj, Geometry.FloatPoint) or isinstance(obj, Geometry.FloatSize) or isinstance(obj, Geometry.FloatRect):
return tuple(obj)
else:
return json.JSONEncoder.default(self, obj)
json_io = io.StringIO()
json.dump(properties, json_io, cls=JSONEncoder)
json_str = json_io.getvalue()
except Exception as e:
# catch exceptions to avoid corrupt zip files
import traceback
logging.error("Exception writing zip file %s" + str(e))
traceback.print_exc()
traceback.print_stack()
def write_json(fp):
json_bytes = bytes(json_str, 'ISO-8859-1')
fp.write(json_bytes)
return binascii.crc32(json_bytes) & 0xFFFFFFFF
offset_json = fp.tell()
json_len, json_crc32 = write_local_file(fp, b"metadata.json", write_json, dt)
dir_data_list.append((offset_json, b"metadata.json", json_len, json_crc32))
dir_offset = fp.tell()
for offset, name_bytes, data_len, crc32 in dir_data_list:
write_directory_data(fp, offset, name_bytes, data_len, crc32, dt)
dir_size = fp.tell() - dir_offset
write_end_of_directory(fp, dir_size, dir_offset, len(dir_data_list))
fp.truncate()
|
python
|
def write_zip_fp(fp, data, properties, dir_data_list=None):
"""
Write custom zip file of data and properties to fp
:param fp: the file point to which to write the header
:param data: the data to write to the file; may be None
:param properties: the properties to write to the file; may be None
:param dir_data_list: optional list of directory header information structures
If dir_data_list is specified, data should be None and properties should
be specified. Then the existing data structure will be left alone and only
the directory headers and end of directory header will be written.
Otherwise, if both data and properties are specified, both are written
out in full.
The properties param must not change during this method. Callers should
take care to ensure this does not happen.
"""
assert data is not None or properties is not None
# dir_data_list has the format: local file record offset, name, data length, crc32
dir_data_list = list() if dir_data_list is None else dir_data_list
dt = datetime.datetime.now()
if data is not None:
offset_data = fp.tell()
def write_data(fp):
numpy_start_pos = fp.tell()
numpy.save(fp, data)
numpy_end_pos = fp.tell()
fp.seek(numpy_start_pos)
data_c = numpy.require(data, dtype=data.dtype, requirements=["C_CONTIGUOUS"])
header_data = fp.read((numpy_end_pos - numpy_start_pos) - data_c.nbytes) # read the header
data_crc32 = binascii.crc32(data_c.data, binascii.crc32(header_data)) & 0xFFFFFFFF
fp.seek(numpy_end_pos)
return data_crc32
data_len, crc32 = write_local_file(fp, b"data.npy", write_data, dt)
dir_data_list.append((offset_data, b"data.npy", data_len, crc32))
if properties is not None:
json_str = str()
try:
class JSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, Geometry.IntPoint) or isinstance(obj, Geometry.IntSize) or isinstance(obj, Geometry.IntRect) or isinstance(obj, Geometry.FloatPoint) or isinstance(obj, Geometry.FloatSize) or isinstance(obj, Geometry.FloatRect):
return tuple(obj)
else:
return json.JSONEncoder.default(self, obj)
json_io = io.StringIO()
json.dump(properties, json_io, cls=JSONEncoder)
json_str = json_io.getvalue()
except Exception as e:
# catch exceptions to avoid corrupt zip files
import traceback
logging.error("Exception writing zip file %s" + str(e))
traceback.print_exc()
traceback.print_stack()
def write_json(fp):
json_bytes = bytes(json_str, 'ISO-8859-1')
fp.write(json_bytes)
return binascii.crc32(json_bytes) & 0xFFFFFFFF
offset_json = fp.tell()
json_len, json_crc32 = write_local_file(fp, b"metadata.json", write_json, dt)
dir_data_list.append((offset_json, b"metadata.json", json_len, json_crc32))
dir_offset = fp.tell()
for offset, name_bytes, data_len, crc32 in dir_data_list:
write_directory_data(fp, offset, name_bytes, data_len, crc32, dt)
dir_size = fp.tell() - dir_offset
write_end_of_directory(fp, dir_size, dir_offset, len(dir_data_list))
fp.truncate()
|
[
"def",
"write_zip_fp",
"(",
"fp",
",",
"data",
",",
"properties",
",",
"dir_data_list",
"=",
"None",
")",
":",
"assert",
"data",
"is",
"not",
"None",
"or",
"properties",
"is",
"not",
"None",
"# dir_data_list has the format: local file record offset, name, data length, crc32",
"dir_data_list",
"=",
"list",
"(",
")",
"if",
"dir_data_list",
"is",
"None",
"else",
"dir_data_list",
"dt",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"if",
"data",
"is",
"not",
"None",
":",
"offset_data",
"=",
"fp",
".",
"tell",
"(",
")",
"def",
"write_data",
"(",
"fp",
")",
":",
"numpy_start_pos",
"=",
"fp",
".",
"tell",
"(",
")",
"numpy",
".",
"save",
"(",
"fp",
",",
"data",
")",
"numpy_end_pos",
"=",
"fp",
".",
"tell",
"(",
")",
"fp",
".",
"seek",
"(",
"numpy_start_pos",
")",
"data_c",
"=",
"numpy",
".",
"require",
"(",
"data",
",",
"dtype",
"=",
"data",
".",
"dtype",
",",
"requirements",
"=",
"[",
"\"C_CONTIGUOUS\"",
"]",
")",
"header_data",
"=",
"fp",
".",
"read",
"(",
"(",
"numpy_end_pos",
"-",
"numpy_start_pos",
")",
"-",
"data_c",
".",
"nbytes",
")",
"# read the header",
"data_crc32",
"=",
"binascii",
".",
"crc32",
"(",
"data_c",
".",
"data",
",",
"binascii",
".",
"crc32",
"(",
"header_data",
")",
")",
"&",
"0xFFFFFFFF",
"fp",
".",
"seek",
"(",
"numpy_end_pos",
")",
"return",
"data_crc32",
"data_len",
",",
"crc32",
"=",
"write_local_file",
"(",
"fp",
",",
"b\"data.npy\"",
",",
"write_data",
",",
"dt",
")",
"dir_data_list",
".",
"append",
"(",
"(",
"offset_data",
",",
"b\"data.npy\"",
",",
"data_len",
",",
"crc32",
")",
")",
"if",
"properties",
"is",
"not",
"None",
":",
"json_str",
"=",
"str",
"(",
")",
"try",
":",
"class",
"JSONEncoder",
"(",
"json",
".",
"JSONEncoder",
")",
":",
"def",
"default",
"(",
"self",
",",
"obj",
")",
":",
"if",
"isinstance",
"(",
"obj",
",",
"Geometry",
".",
"IntPoint",
")",
"or",
"isinstance",
"(",
"obj",
",",
"Geometry",
".",
"IntSize",
")",
"or",
"isinstance",
"(",
"obj",
",",
"Geometry",
".",
"IntRect",
")",
"or",
"isinstance",
"(",
"obj",
",",
"Geometry",
".",
"FloatPoint",
")",
"or",
"isinstance",
"(",
"obj",
",",
"Geometry",
".",
"FloatSize",
")",
"or",
"isinstance",
"(",
"obj",
",",
"Geometry",
".",
"FloatRect",
")",
":",
"return",
"tuple",
"(",
"obj",
")",
"else",
":",
"return",
"json",
".",
"JSONEncoder",
".",
"default",
"(",
"self",
",",
"obj",
")",
"json_io",
"=",
"io",
".",
"StringIO",
"(",
")",
"json",
".",
"dump",
"(",
"properties",
",",
"json_io",
",",
"cls",
"=",
"JSONEncoder",
")",
"json_str",
"=",
"json_io",
".",
"getvalue",
"(",
")",
"except",
"Exception",
"as",
"e",
":",
"# catch exceptions to avoid corrupt zip files",
"import",
"traceback",
"logging",
".",
"error",
"(",
"\"Exception writing zip file %s\"",
"+",
"str",
"(",
"e",
")",
")",
"traceback",
".",
"print_exc",
"(",
")",
"traceback",
".",
"print_stack",
"(",
")",
"def",
"write_json",
"(",
"fp",
")",
":",
"json_bytes",
"=",
"bytes",
"(",
"json_str",
",",
"'ISO-8859-1'",
")",
"fp",
".",
"write",
"(",
"json_bytes",
")",
"return",
"binascii",
".",
"crc32",
"(",
"json_bytes",
")",
"&",
"0xFFFFFFFF",
"offset_json",
"=",
"fp",
".",
"tell",
"(",
")",
"json_len",
",",
"json_crc32",
"=",
"write_local_file",
"(",
"fp",
",",
"b\"metadata.json\"",
",",
"write_json",
",",
"dt",
")",
"dir_data_list",
".",
"append",
"(",
"(",
"offset_json",
",",
"b\"metadata.json\"",
",",
"json_len",
",",
"json_crc32",
")",
")",
"dir_offset",
"=",
"fp",
".",
"tell",
"(",
")",
"for",
"offset",
",",
"name_bytes",
",",
"data_len",
",",
"crc32",
"in",
"dir_data_list",
":",
"write_directory_data",
"(",
"fp",
",",
"offset",
",",
"name_bytes",
",",
"data_len",
",",
"crc32",
",",
"dt",
")",
"dir_size",
"=",
"fp",
".",
"tell",
"(",
")",
"-",
"dir_offset",
"write_end_of_directory",
"(",
"fp",
",",
"dir_size",
",",
"dir_offset",
",",
"len",
"(",
"dir_data_list",
")",
")",
"fp",
".",
"truncate",
"(",
")"
] |
Write custom zip file of data and properties to fp
:param fp: the file point to which to write the header
:param data: the data to write to the file; may be None
:param properties: the properties to write to the file; may be None
:param dir_data_list: optional list of directory header information structures
If dir_data_list is specified, data should be None and properties should
be specified. Then the existing data structure will be left alone and only
the directory headers and end of directory header will be written.
Otherwise, if both data and properties are specified, both are written
out in full.
The properties param must not change during this method. Callers should
take care to ensure this does not happen.
|
[
"Write",
"custom",
"zip",
"file",
"of",
"data",
"and",
"properties",
"to",
"fp"
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/NDataHandler.py#L132-L199
|
train
|
nion-software/nionswift
|
nion/swift/model/NDataHandler.py
|
write_zip
|
def write_zip(file_path, data, properties):
"""
Write custom zip file to the file path
:param file_path: the file to which to write the zip file
:param data: the data to write to the file; may be None
:param properties: the properties to write to the file; may be None
The properties param must not change during this method. Callers should
take care to ensure this does not happen.
See write_zip_fp.
"""
with open(file_path, "w+b") as fp:
write_zip_fp(fp, data, properties)
|
python
|
def write_zip(file_path, data, properties):
"""
Write custom zip file to the file path
:param file_path: the file to which to write the zip file
:param data: the data to write to the file; may be None
:param properties: the properties to write to the file; may be None
The properties param must not change during this method. Callers should
take care to ensure this does not happen.
See write_zip_fp.
"""
with open(file_path, "w+b") as fp:
write_zip_fp(fp, data, properties)
|
[
"def",
"write_zip",
"(",
"file_path",
",",
"data",
",",
"properties",
")",
":",
"with",
"open",
"(",
"file_path",
",",
"\"w+b\"",
")",
"as",
"fp",
":",
"write_zip_fp",
"(",
"fp",
",",
"data",
",",
"properties",
")"
] |
Write custom zip file to the file path
:param file_path: the file to which to write the zip file
:param data: the data to write to the file; may be None
:param properties: the properties to write to the file; may be None
The properties param must not change during this method. Callers should
take care to ensure this does not happen.
See write_zip_fp.
|
[
"Write",
"custom",
"zip",
"file",
"to",
"the",
"file",
"path"
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/NDataHandler.py#L202-L216
|
train
|
nion-software/nionswift
|
nion/swift/model/NDataHandler.py
|
parse_zip
|
def parse_zip(fp):
"""
Parse the zip file headers at fp
:param fp: the file pointer from which to parse the zip file
:return: A tuple of local files, directory headers, and end of central directory
The local files are dictionary where the keys are the local file offset and the
values are each a tuple consisting of the name, data position, data length, and crc32.
The directory headers are a dictionary where the keys are the names of the files
and the values are a tuple consisting of the directory header position, and the
associated local file position.
The end of central directory is a tuple consisting of the location of the end of
central directory header and the location of the first directory header.
This method will seek to location 0 of fp and leave fp at end of file.
"""
local_files = {}
dir_files = {}
eocd = None
fp.seek(0)
while True:
pos = fp.tell()
signature = struct.unpack('I', fp.read(4))[0]
if signature == 0x04034b50:
fp.seek(pos + 14)
crc32 = struct.unpack('I', fp.read(4))[0]
fp.seek(pos + 18)
data_len = struct.unpack('I', fp.read(4))[0]
fp.seek(pos + 26)
name_len = struct.unpack('H', fp.read(2))[0]
extra_len = struct.unpack('H', fp.read(2))[0]
name_bytes = fp.read(name_len)
fp.seek(extra_len, os.SEEK_CUR)
data_pos = fp.tell()
fp.seek(data_len, os.SEEK_CUR)
local_files[pos] = (name_bytes, data_pos, data_len, crc32)
elif signature == 0x02014b50:
fp.seek(pos + 28)
name_len = struct.unpack('H', fp.read(2))[0]
extra_len = struct.unpack('H', fp.read(2))[0]
comment_len = struct.unpack('H', fp.read(2))[0]
fp.seek(pos + 42)
pos2 = struct.unpack('I', fp.read(4))[0]
name_bytes = fp.read(name_len)
fp.seek(pos + 46 + name_len + extra_len + comment_len)
dir_files[name_bytes] = (pos, pos2)
elif signature == 0x06054b50:
fp.seek(pos + 16)
pos2 = struct.unpack('I', fp.read(4))[0]
eocd = (pos, pos2)
break
else:
raise IOError()
return local_files, dir_files, eocd
|
python
|
def parse_zip(fp):
"""
Parse the zip file headers at fp
:param fp: the file pointer from which to parse the zip file
:return: A tuple of local files, directory headers, and end of central directory
The local files are dictionary where the keys are the local file offset and the
values are each a tuple consisting of the name, data position, data length, and crc32.
The directory headers are a dictionary where the keys are the names of the files
and the values are a tuple consisting of the directory header position, and the
associated local file position.
The end of central directory is a tuple consisting of the location of the end of
central directory header and the location of the first directory header.
This method will seek to location 0 of fp and leave fp at end of file.
"""
local_files = {}
dir_files = {}
eocd = None
fp.seek(0)
while True:
pos = fp.tell()
signature = struct.unpack('I', fp.read(4))[0]
if signature == 0x04034b50:
fp.seek(pos + 14)
crc32 = struct.unpack('I', fp.read(4))[0]
fp.seek(pos + 18)
data_len = struct.unpack('I', fp.read(4))[0]
fp.seek(pos + 26)
name_len = struct.unpack('H', fp.read(2))[0]
extra_len = struct.unpack('H', fp.read(2))[0]
name_bytes = fp.read(name_len)
fp.seek(extra_len, os.SEEK_CUR)
data_pos = fp.tell()
fp.seek(data_len, os.SEEK_CUR)
local_files[pos] = (name_bytes, data_pos, data_len, crc32)
elif signature == 0x02014b50:
fp.seek(pos + 28)
name_len = struct.unpack('H', fp.read(2))[0]
extra_len = struct.unpack('H', fp.read(2))[0]
comment_len = struct.unpack('H', fp.read(2))[0]
fp.seek(pos + 42)
pos2 = struct.unpack('I', fp.read(4))[0]
name_bytes = fp.read(name_len)
fp.seek(pos + 46 + name_len + extra_len + comment_len)
dir_files[name_bytes] = (pos, pos2)
elif signature == 0x06054b50:
fp.seek(pos + 16)
pos2 = struct.unpack('I', fp.read(4))[0]
eocd = (pos, pos2)
break
else:
raise IOError()
return local_files, dir_files, eocd
|
[
"def",
"parse_zip",
"(",
"fp",
")",
":",
"local_files",
"=",
"{",
"}",
"dir_files",
"=",
"{",
"}",
"eocd",
"=",
"None",
"fp",
".",
"seek",
"(",
"0",
")",
"while",
"True",
":",
"pos",
"=",
"fp",
".",
"tell",
"(",
")",
"signature",
"=",
"struct",
".",
"unpack",
"(",
"'I'",
",",
"fp",
".",
"read",
"(",
"4",
")",
")",
"[",
"0",
"]",
"if",
"signature",
"==",
"0x04034b50",
":",
"fp",
".",
"seek",
"(",
"pos",
"+",
"14",
")",
"crc32",
"=",
"struct",
".",
"unpack",
"(",
"'I'",
",",
"fp",
".",
"read",
"(",
"4",
")",
")",
"[",
"0",
"]",
"fp",
".",
"seek",
"(",
"pos",
"+",
"18",
")",
"data_len",
"=",
"struct",
".",
"unpack",
"(",
"'I'",
",",
"fp",
".",
"read",
"(",
"4",
")",
")",
"[",
"0",
"]",
"fp",
".",
"seek",
"(",
"pos",
"+",
"26",
")",
"name_len",
"=",
"struct",
".",
"unpack",
"(",
"'H'",
",",
"fp",
".",
"read",
"(",
"2",
")",
")",
"[",
"0",
"]",
"extra_len",
"=",
"struct",
".",
"unpack",
"(",
"'H'",
",",
"fp",
".",
"read",
"(",
"2",
")",
")",
"[",
"0",
"]",
"name_bytes",
"=",
"fp",
".",
"read",
"(",
"name_len",
")",
"fp",
".",
"seek",
"(",
"extra_len",
",",
"os",
".",
"SEEK_CUR",
")",
"data_pos",
"=",
"fp",
".",
"tell",
"(",
")",
"fp",
".",
"seek",
"(",
"data_len",
",",
"os",
".",
"SEEK_CUR",
")",
"local_files",
"[",
"pos",
"]",
"=",
"(",
"name_bytes",
",",
"data_pos",
",",
"data_len",
",",
"crc32",
")",
"elif",
"signature",
"==",
"0x02014b50",
":",
"fp",
".",
"seek",
"(",
"pos",
"+",
"28",
")",
"name_len",
"=",
"struct",
".",
"unpack",
"(",
"'H'",
",",
"fp",
".",
"read",
"(",
"2",
")",
")",
"[",
"0",
"]",
"extra_len",
"=",
"struct",
".",
"unpack",
"(",
"'H'",
",",
"fp",
".",
"read",
"(",
"2",
")",
")",
"[",
"0",
"]",
"comment_len",
"=",
"struct",
".",
"unpack",
"(",
"'H'",
",",
"fp",
".",
"read",
"(",
"2",
")",
")",
"[",
"0",
"]",
"fp",
".",
"seek",
"(",
"pos",
"+",
"42",
")",
"pos2",
"=",
"struct",
".",
"unpack",
"(",
"'I'",
",",
"fp",
".",
"read",
"(",
"4",
")",
")",
"[",
"0",
"]",
"name_bytes",
"=",
"fp",
".",
"read",
"(",
"name_len",
")",
"fp",
".",
"seek",
"(",
"pos",
"+",
"46",
"+",
"name_len",
"+",
"extra_len",
"+",
"comment_len",
")",
"dir_files",
"[",
"name_bytes",
"]",
"=",
"(",
"pos",
",",
"pos2",
")",
"elif",
"signature",
"==",
"0x06054b50",
":",
"fp",
".",
"seek",
"(",
"pos",
"+",
"16",
")",
"pos2",
"=",
"struct",
".",
"unpack",
"(",
"'I'",
",",
"fp",
".",
"read",
"(",
"4",
")",
")",
"[",
"0",
"]",
"eocd",
"=",
"(",
"pos",
",",
"pos2",
")",
"break",
"else",
":",
"raise",
"IOError",
"(",
")",
"return",
"local_files",
",",
"dir_files",
",",
"eocd"
] |
Parse the zip file headers at fp
:param fp: the file pointer from which to parse the zip file
:return: A tuple of local files, directory headers, and end of central directory
The local files are dictionary where the keys are the local file offset and the
values are each a tuple consisting of the name, data position, data length, and crc32.
The directory headers are a dictionary where the keys are the names of the files
and the values are a tuple consisting of the directory header position, and the
associated local file position.
The end of central directory is a tuple consisting of the location of the end of
central directory header and the location of the first directory header.
This method will seek to location 0 of fp and leave fp at end of file.
|
[
"Parse",
"the",
"zip",
"file",
"headers",
"at",
"fp"
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/NDataHandler.py#L219-L275
|
train
|
nion-software/nionswift
|
nion/swift/model/NDataHandler.py
|
read_data
|
def read_data(fp, local_files, dir_files, name_bytes):
"""
Read a numpy data array from the zip file
:param fp: a file pointer
:param local_files: the local files structure
:param dir_files: the directory headers
:param name: the name of the data file to read
:return: the numpy data array, if found
The file pointer will be at a location following the
local file entry after this method.
The local_files and dir_files should be passed from
the results of parse_zip.
"""
if name_bytes in dir_files:
fp.seek(local_files[dir_files[name_bytes][1]][1])
return numpy.load(fp)
return None
|
python
|
def read_data(fp, local_files, dir_files, name_bytes):
"""
Read a numpy data array from the zip file
:param fp: a file pointer
:param local_files: the local files structure
:param dir_files: the directory headers
:param name: the name of the data file to read
:return: the numpy data array, if found
The file pointer will be at a location following the
local file entry after this method.
The local_files and dir_files should be passed from
the results of parse_zip.
"""
if name_bytes in dir_files:
fp.seek(local_files[dir_files[name_bytes][1]][1])
return numpy.load(fp)
return None
|
[
"def",
"read_data",
"(",
"fp",
",",
"local_files",
",",
"dir_files",
",",
"name_bytes",
")",
":",
"if",
"name_bytes",
"in",
"dir_files",
":",
"fp",
".",
"seek",
"(",
"local_files",
"[",
"dir_files",
"[",
"name_bytes",
"]",
"[",
"1",
"]",
"]",
"[",
"1",
"]",
")",
"return",
"numpy",
".",
"load",
"(",
"fp",
")",
"return",
"None"
] |
Read a numpy data array from the zip file
:param fp: a file pointer
:param local_files: the local files structure
:param dir_files: the directory headers
:param name: the name of the data file to read
:return: the numpy data array, if found
The file pointer will be at a location following the
local file entry after this method.
The local_files and dir_files should be passed from
the results of parse_zip.
|
[
"Read",
"a",
"numpy",
"data",
"array",
"from",
"the",
"zip",
"file"
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/NDataHandler.py#L278-L297
|
train
|
nion-software/nionswift
|
nion/swift/model/NDataHandler.py
|
read_json
|
def read_json(fp, local_files, dir_files, name_bytes):
"""
Read json properties from the zip file
:param fp: a file pointer
:param local_files: the local files structure
:param dir_files: the directory headers
:param name: the name of the json file to read
:return: the json properites as a dictionary, if found
The file pointer will be at a location following the
local file entry after this method.
The local_files and dir_files should be passed from
the results of parse_zip.
"""
if name_bytes in dir_files:
json_pos = local_files[dir_files[name_bytes][1]][1]
json_len = local_files[dir_files[name_bytes][1]][2]
fp.seek(json_pos)
json_properties = fp.read(json_len)
return json.loads(json_properties.decode("utf-8"))
return None
|
python
|
def read_json(fp, local_files, dir_files, name_bytes):
"""
Read json properties from the zip file
:param fp: a file pointer
:param local_files: the local files structure
:param dir_files: the directory headers
:param name: the name of the json file to read
:return: the json properites as a dictionary, if found
The file pointer will be at a location following the
local file entry after this method.
The local_files and dir_files should be passed from
the results of parse_zip.
"""
if name_bytes in dir_files:
json_pos = local_files[dir_files[name_bytes][1]][1]
json_len = local_files[dir_files[name_bytes][1]][2]
fp.seek(json_pos)
json_properties = fp.read(json_len)
return json.loads(json_properties.decode("utf-8"))
return None
|
[
"def",
"read_json",
"(",
"fp",
",",
"local_files",
",",
"dir_files",
",",
"name_bytes",
")",
":",
"if",
"name_bytes",
"in",
"dir_files",
":",
"json_pos",
"=",
"local_files",
"[",
"dir_files",
"[",
"name_bytes",
"]",
"[",
"1",
"]",
"]",
"[",
"1",
"]",
"json_len",
"=",
"local_files",
"[",
"dir_files",
"[",
"name_bytes",
"]",
"[",
"1",
"]",
"]",
"[",
"2",
"]",
"fp",
".",
"seek",
"(",
"json_pos",
")",
"json_properties",
"=",
"fp",
".",
"read",
"(",
"json_len",
")",
"return",
"json",
".",
"loads",
"(",
"json_properties",
".",
"decode",
"(",
"\"utf-8\"",
")",
")",
"return",
"None"
] |
Read json properties from the zip file
:param fp: a file pointer
:param local_files: the local files structure
:param dir_files: the directory headers
:param name: the name of the json file to read
:return: the json properites as a dictionary, if found
The file pointer will be at a location following the
local file entry after this method.
The local_files and dir_files should be passed from
the results of parse_zip.
|
[
"Read",
"json",
"properties",
"from",
"the",
"zip",
"file"
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/NDataHandler.py#L300-L322
|
train
|
nion-software/nionswift
|
nion/swift/model/NDataHandler.py
|
rewrite_zip
|
def rewrite_zip(file_path, properties):
"""
Rewrite the json properties in the zip file
:param file_path: the file path to the zip file
:param properties: the updated properties to write to the zip file
This method will attempt to keep the data file within the zip
file intact without rewriting it. However, if the data file is not the
first item in the zip file, this method will rewrite it.
The properties param must not change during this method. Callers should
take care to ensure this does not happen.
"""
with open(file_path, "r+b") as fp:
local_files, dir_files, eocd = parse_zip(fp)
# check to make sure directory has two files, named data.npy and metadata.json, and that data.npy is first
# TODO: check compression, etc.
if len(dir_files) == 2 and b"data.npy" in dir_files and b"metadata.json" in dir_files and dir_files[b"data.npy"][1] == 0:
fp.seek(dir_files[b"metadata.json"][1])
dir_data_list = list()
local_file_pos = dir_files[b"data.npy"][1]
local_file = local_files[local_file_pos]
dir_data_list.append((local_file_pos, b"data.npy", local_file[2], local_file[3]))
write_zip_fp(fp, None, properties, dir_data_list)
else:
data = None
if b"data.npy" in dir_files:
fp.seek(local_files[dir_files[b"data.npy"][1]][1])
data = numpy.load(fp)
fp.seek(0)
write_zip_fp(fp, data, properties)
|
python
|
def rewrite_zip(file_path, properties):
"""
Rewrite the json properties in the zip file
:param file_path: the file path to the zip file
:param properties: the updated properties to write to the zip file
This method will attempt to keep the data file within the zip
file intact without rewriting it. However, if the data file is not the
first item in the zip file, this method will rewrite it.
The properties param must not change during this method. Callers should
take care to ensure this does not happen.
"""
with open(file_path, "r+b") as fp:
local_files, dir_files, eocd = parse_zip(fp)
# check to make sure directory has two files, named data.npy and metadata.json, and that data.npy is first
# TODO: check compression, etc.
if len(dir_files) == 2 and b"data.npy" in dir_files and b"metadata.json" in dir_files and dir_files[b"data.npy"][1] == 0:
fp.seek(dir_files[b"metadata.json"][1])
dir_data_list = list()
local_file_pos = dir_files[b"data.npy"][1]
local_file = local_files[local_file_pos]
dir_data_list.append((local_file_pos, b"data.npy", local_file[2], local_file[3]))
write_zip_fp(fp, None, properties, dir_data_list)
else:
data = None
if b"data.npy" in dir_files:
fp.seek(local_files[dir_files[b"data.npy"][1]][1])
data = numpy.load(fp)
fp.seek(0)
write_zip_fp(fp, data, properties)
|
[
"def",
"rewrite_zip",
"(",
"file_path",
",",
"properties",
")",
":",
"with",
"open",
"(",
"file_path",
",",
"\"r+b\"",
")",
"as",
"fp",
":",
"local_files",
",",
"dir_files",
",",
"eocd",
"=",
"parse_zip",
"(",
"fp",
")",
"# check to make sure directory has two files, named data.npy and metadata.json, and that data.npy is first",
"# TODO: check compression, etc.",
"if",
"len",
"(",
"dir_files",
")",
"==",
"2",
"and",
"b\"data.npy\"",
"in",
"dir_files",
"and",
"b\"metadata.json\"",
"in",
"dir_files",
"and",
"dir_files",
"[",
"b\"data.npy\"",
"]",
"[",
"1",
"]",
"==",
"0",
":",
"fp",
".",
"seek",
"(",
"dir_files",
"[",
"b\"metadata.json\"",
"]",
"[",
"1",
"]",
")",
"dir_data_list",
"=",
"list",
"(",
")",
"local_file_pos",
"=",
"dir_files",
"[",
"b\"data.npy\"",
"]",
"[",
"1",
"]",
"local_file",
"=",
"local_files",
"[",
"local_file_pos",
"]",
"dir_data_list",
".",
"append",
"(",
"(",
"local_file_pos",
",",
"b\"data.npy\"",
",",
"local_file",
"[",
"2",
"]",
",",
"local_file",
"[",
"3",
"]",
")",
")",
"write_zip_fp",
"(",
"fp",
",",
"None",
",",
"properties",
",",
"dir_data_list",
")",
"else",
":",
"data",
"=",
"None",
"if",
"b\"data.npy\"",
"in",
"dir_files",
":",
"fp",
".",
"seek",
"(",
"local_files",
"[",
"dir_files",
"[",
"b\"data.npy\"",
"]",
"[",
"1",
"]",
"]",
"[",
"1",
"]",
")",
"data",
"=",
"numpy",
".",
"load",
"(",
"fp",
")",
"fp",
".",
"seek",
"(",
"0",
")",
"write_zip_fp",
"(",
"fp",
",",
"data",
",",
"properties",
")"
] |
Rewrite the json properties in the zip file
:param file_path: the file path to the zip file
:param properties: the updated properties to write to the zip file
This method will attempt to keep the data file within the zip
file intact without rewriting it. However, if the data file is not the
first item in the zip file, this method will rewrite it.
The properties param must not change during this method. Callers should
take care to ensure this does not happen.
|
[
"Rewrite",
"the",
"json",
"properties",
"in",
"the",
"zip",
"file"
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/NDataHandler.py#L325-L356
|
train
|
nion-software/nionswift
|
nion/swift/model/NDataHandler.py
|
NDataHandler.is_matching
|
def is_matching(cls, file_path):
"""
Return whether the given absolute file path is an ndata file.
"""
if file_path.endswith(".ndata") and os.path.exists(file_path):
try:
with open(file_path, "r+b") as fp:
local_files, dir_files, eocd = parse_zip(fp)
contains_data = b"data.npy" in dir_files
contains_metadata = b"metadata.json" in dir_files
file_count = contains_data + contains_metadata # use fact that True is 1, False is 0
# TODO: make sure ndata isn't compressed, or handle it
if len(dir_files) != file_count or file_count == 0:
return False
return True
except Exception as e:
logging.error("Exception parsing ndata file: %s", file_path)
logging.error(str(e))
return False
|
python
|
def is_matching(cls, file_path):
"""
Return whether the given absolute file path is an ndata file.
"""
if file_path.endswith(".ndata") and os.path.exists(file_path):
try:
with open(file_path, "r+b") as fp:
local_files, dir_files, eocd = parse_zip(fp)
contains_data = b"data.npy" in dir_files
contains_metadata = b"metadata.json" in dir_files
file_count = contains_data + contains_metadata # use fact that True is 1, False is 0
# TODO: make sure ndata isn't compressed, or handle it
if len(dir_files) != file_count or file_count == 0:
return False
return True
except Exception as e:
logging.error("Exception parsing ndata file: %s", file_path)
logging.error(str(e))
return False
|
[
"def",
"is_matching",
"(",
"cls",
",",
"file_path",
")",
":",
"if",
"file_path",
".",
"endswith",
"(",
"\".ndata\"",
")",
"and",
"os",
".",
"path",
".",
"exists",
"(",
"file_path",
")",
":",
"try",
":",
"with",
"open",
"(",
"file_path",
",",
"\"r+b\"",
")",
"as",
"fp",
":",
"local_files",
",",
"dir_files",
",",
"eocd",
"=",
"parse_zip",
"(",
"fp",
")",
"contains_data",
"=",
"b\"data.npy\"",
"in",
"dir_files",
"contains_metadata",
"=",
"b\"metadata.json\"",
"in",
"dir_files",
"file_count",
"=",
"contains_data",
"+",
"contains_metadata",
"# use fact that True is 1, False is 0",
"# TODO: make sure ndata isn't compressed, or handle it",
"if",
"len",
"(",
"dir_files",
")",
"!=",
"file_count",
"or",
"file_count",
"==",
"0",
":",
"return",
"False",
"return",
"True",
"except",
"Exception",
"as",
"e",
":",
"logging",
".",
"error",
"(",
"\"Exception parsing ndata file: %s\"",
",",
"file_path",
")",
"logging",
".",
"error",
"(",
"str",
"(",
"e",
")",
")",
"return",
"False"
] |
Return whether the given absolute file path is an ndata file.
|
[
"Return",
"whether",
"the",
"given",
"absolute",
"file",
"path",
"is",
"an",
"ndata",
"file",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/NDataHandler.py#L393-L411
|
train
|
nion-software/nionswift
|
nion/swift/model/NDataHandler.py
|
NDataHandler.write_data
|
def write_data(self, data, file_datetime):
"""
Write data to the ndata file specified by reference.
:param data: the numpy array data to write
:param file_datetime: the datetime for the file
"""
with self.__lock:
assert data is not None
absolute_file_path = self.__file_path
#logging.debug("WRITE data file %s for %s", absolute_file_path, key)
make_directory_if_needed(os.path.dirname(absolute_file_path))
properties = self.read_properties() if os.path.exists(absolute_file_path) else dict()
write_zip(absolute_file_path, data, properties)
# convert to utc time.
tz_minutes = Utility.local_utcoffset_minutes(file_datetime)
timestamp = calendar.timegm(file_datetime.timetuple()) - tz_minutes * 60
os.utime(absolute_file_path, (time.time(), timestamp))
|
python
|
def write_data(self, data, file_datetime):
"""
Write data to the ndata file specified by reference.
:param data: the numpy array data to write
:param file_datetime: the datetime for the file
"""
with self.__lock:
assert data is not None
absolute_file_path = self.__file_path
#logging.debug("WRITE data file %s for %s", absolute_file_path, key)
make_directory_if_needed(os.path.dirname(absolute_file_path))
properties = self.read_properties() if os.path.exists(absolute_file_path) else dict()
write_zip(absolute_file_path, data, properties)
# convert to utc time.
tz_minutes = Utility.local_utcoffset_minutes(file_datetime)
timestamp = calendar.timegm(file_datetime.timetuple()) - tz_minutes * 60
os.utime(absolute_file_path, (time.time(), timestamp))
|
[
"def",
"write_data",
"(",
"self",
",",
"data",
",",
"file_datetime",
")",
":",
"with",
"self",
".",
"__lock",
":",
"assert",
"data",
"is",
"not",
"None",
"absolute_file_path",
"=",
"self",
".",
"__file_path",
"#logging.debug(\"WRITE data file %s for %s\", absolute_file_path, key)",
"make_directory_if_needed",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"absolute_file_path",
")",
")",
"properties",
"=",
"self",
".",
"read_properties",
"(",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"absolute_file_path",
")",
"else",
"dict",
"(",
")",
"write_zip",
"(",
"absolute_file_path",
",",
"data",
",",
"properties",
")",
"# convert to utc time.",
"tz_minutes",
"=",
"Utility",
".",
"local_utcoffset_minutes",
"(",
"file_datetime",
")",
"timestamp",
"=",
"calendar",
".",
"timegm",
"(",
"file_datetime",
".",
"timetuple",
"(",
")",
")",
"-",
"tz_minutes",
"*",
"60",
"os",
".",
"utime",
"(",
"absolute_file_path",
",",
"(",
"time",
".",
"time",
"(",
")",
",",
"timestamp",
")",
")"
] |
Write data to the ndata file specified by reference.
:param data: the numpy array data to write
:param file_datetime: the datetime for the file
|
[
"Write",
"data",
"to",
"the",
"ndata",
"file",
"specified",
"by",
"reference",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/NDataHandler.py#L425-L442
|
train
|
nion-software/nionswift
|
nion/swift/model/NDataHandler.py
|
NDataHandler.write_properties
|
def write_properties(self, properties, file_datetime):
"""
Write properties to the ndata file specified by reference.
:param reference: the reference to which to write
:param properties: the dict to write to the file
:param file_datetime: the datetime for the file
The properties param must not change during this method. Callers should
take care to ensure this does not happen.
"""
with self.__lock:
absolute_file_path = self.__file_path
#logging.debug("WRITE properties %s for %s", absolute_file_path, key)
make_directory_if_needed(os.path.dirname(absolute_file_path))
exists = os.path.exists(absolute_file_path)
if exists:
rewrite_zip(absolute_file_path, Utility.clean_dict(properties))
else:
write_zip(absolute_file_path, None, Utility.clean_dict(properties))
# convert to utc time.
tz_minutes = Utility.local_utcoffset_minutes(file_datetime)
timestamp = calendar.timegm(file_datetime.timetuple()) - tz_minutes * 60
os.utime(absolute_file_path, (time.time(), timestamp))
|
python
|
def write_properties(self, properties, file_datetime):
"""
Write properties to the ndata file specified by reference.
:param reference: the reference to which to write
:param properties: the dict to write to the file
:param file_datetime: the datetime for the file
The properties param must not change during this method. Callers should
take care to ensure this does not happen.
"""
with self.__lock:
absolute_file_path = self.__file_path
#logging.debug("WRITE properties %s for %s", absolute_file_path, key)
make_directory_if_needed(os.path.dirname(absolute_file_path))
exists = os.path.exists(absolute_file_path)
if exists:
rewrite_zip(absolute_file_path, Utility.clean_dict(properties))
else:
write_zip(absolute_file_path, None, Utility.clean_dict(properties))
# convert to utc time.
tz_minutes = Utility.local_utcoffset_minutes(file_datetime)
timestamp = calendar.timegm(file_datetime.timetuple()) - tz_minutes * 60
os.utime(absolute_file_path, (time.time(), timestamp))
|
[
"def",
"write_properties",
"(",
"self",
",",
"properties",
",",
"file_datetime",
")",
":",
"with",
"self",
".",
"__lock",
":",
"absolute_file_path",
"=",
"self",
".",
"__file_path",
"#logging.debug(\"WRITE properties %s for %s\", absolute_file_path, key)",
"make_directory_if_needed",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"absolute_file_path",
")",
")",
"exists",
"=",
"os",
".",
"path",
".",
"exists",
"(",
"absolute_file_path",
")",
"if",
"exists",
":",
"rewrite_zip",
"(",
"absolute_file_path",
",",
"Utility",
".",
"clean_dict",
"(",
"properties",
")",
")",
"else",
":",
"write_zip",
"(",
"absolute_file_path",
",",
"None",
",",
"Utility",
".",
"clean_dict",
"(",
"properties",
")",
")",
"# convert to utc time.",
"tz_minutes",
"=",
"Utility",
".",
"local_utcoffset_minutes",
"(",
"file_datetime",
")",
"timestamp",
"=",
"calendar",
".",
"timegm",
"(",
"file_datetime",
".",
"timetuple",
"(",
")",
")",
"-",
"tz_minutes",
"*",
"60",
"os",
".",
"utime",
"(",
"absolute_file_path",
",",
"(",
"time",
".",
"time",
"(",
")",
",",
"timestamp",
")",
")"
] |
Write properties to the ndata file specified by reference.
:param reference: the reference to which to write
:param properties: the dict to write to the file
:param file_datetime: the datetime for the file
The properties param must not change during this method. Callers should
take care to ensure this does not happen.
|
[
"Write",
"properties",
"to",
"the",
"ndata",
"file",
"specified",
"by",
"reference",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/NDataHandler.py#L444-L467
|
train
|
nion-software/nionswift
|
nion/swift/model/NDataHandler.py
|
NDataHandler.read_properties
|
def read_properties(self):
"""
Read properties from the ndata file reference
:param reference: the reference from which to read
:return: a tuple of the item_uuid and a dict of the properties
"""
with self.__lock:
absolute_file_path = self.__file_path
with open(absolute_file_path, "rb") as fp:
local_files, dir_files, eocd = parse_zip(fp)
properties = read_json(fp, local_files, dir_files, b"metadata.json")
return properties
|
python
|
def read_properties(self):
"""
Read properties from the ndata file reference
:param reference: the reference from which to read
:return: a tuple of the item_uuid and a dict of the properties
"""
with self.__lock:
absolute_file_path = self.__file_path
with open(absolute_file_path, "rb") as fp:
local_files, dir_files, eocd = parse_zip(fp)
properties = read_json(fp, local_files, dir_files, b"metadata.json")
return properties
|
[
"def",
"read_properties",
"(",
"self",
")",
":",
"with",
"self",
".",
"__lock",
":",
"absolute_file_path",
"=",
"self",
".",
"__file_path",
"with",
"open",
"(",
"absolute_file_path",
",",
"\"rb\"",
")",
"as",
"fp",
":",
"local_files",
",",
"dir_files",
",",
"eocd",
"=",
"parse_zip",
"(",
"fp",
")",
"properties",
"=",
"read_json",
"(",
"fp",
",",
"local_files",
",",
"dir_files",
",",
"b\"metadata.json\"",
")",
"return",
"properties"
] |
Read properties from the ndata file reference
:param reference: the reference from which to read
:return: a tuple of the item_uuid and a dict of the properties
|
[
"Read",
"properties",
"from",
"the",
"ndata",
"file",
"reference"
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/NDataHandler.py#L469-L481
|
train
|
nion-software/nionswift
|
nion/swift/model/NDataHandler.py
|
NDataHandler.read_data
|
def read_data(self):
"""
Read data from the ndata file reference
:param reference: the reference from which to read
:return: a numpy array of the data; maybe None
"""
with self.__lock:
absolute_file_path = self.__file_path
#logging.debug("READ data file %s", absolute_file_path)
with open(absolute_file_path, "rb") as fp:
local_files, dir_files, eocd = parse_zip(fp)
return read_data(fp, local_files, dir_files, b"data.npy")
return None
|
python
|
def read_data(self):
"""
Read data from the ndata file reference
:param reference: the reference from which to read
:return: a numpy array of the data; maybe None
"""
with self.__lock:
absolute_file_path = self.__file_path
#logging.debug("READ data file %s", absolute_file_path)
with open(absolute_file_path, "rb") as fp:
local_files, dir_files, eocd = parse_zip(fp)
return read_data(fp, local_files, dir_files, b"data.npy")
return None
|
[
"def",
"read_data",
"(",
"self",
")",
":",
"with",
"self",
".",
"__lock",
":",
"absolute_file_path",
"=",
"self",
".",
"__file_path",
"#logging.debug(\"READ data file %s\", absolute_file_path)",
"with",
"open",
"(",
"absolute_file_path",
",",
"\"rb\"",
")",
"as",
"fp",
":",
"local_files",
",",
"dir_files",
",",
"eocd",
"=",
"parse_zip",
"(",
"fp",
")",
"return",
"read_data",
"(",
"fp",
",",
"local_files",
",",
"dir_files",
",",
"b\"data.npy\"",
")",
"return",
"None"
] |
Read data from the ndata file reference
:param reference: the reference from which to read
:return: a numpy array of the data; maybe None
|
[
"Read",
"data",
"from",
"the",
"ndata",
"file",
"reference"
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/NDataHandler.py#L483-L496
|
train
|
nion-software/nionswift
|
nion/swift/model/NDataHandler.py
|
NDataHandler.remove
|
def remove(self):
"""
Remove the ndata file reference
:param reference: the reference to remove
"""
with self.__lock:
absolute_file_path = self.__file_path
#logging.debug("DELETE data file %s", absolute_file_path)
if os.path.isfile(absolute_file_path):
os.remove(absolute_file_path)
|
python
|
def remove(self):
"""
Remove the ndata file reference
:param reference: the reference to remove
"""
with self.__lock:
absolute_file_path = self.__file_path
#logging.debug("DELETE data file %s", absolute_file_path)
if os.path.isfile(absolute_file_path):
os.remove(absolute_file_path)
|
[
"def",
"remove",
"(",
"self",
")",
":",
"with",
"self",
".",
"__lock",
":",
"absolute_file_path",
"=",
"self",
".",
"__file_path",
"#logging.debug(\"DELETE data file %s\", absolute_file_path)",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"absolute_file_path",
")",
":",
"os",
".",
"remove",
"(",
"absolute_file_path",
")"
] |
Remove the ndata file reference
:param reference: the reference to remove
|
[
"Remove",
"the",
"ndata",
"file",
"reference"
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/NDataHandler.py#L498-L508
|
train
|
nion-software/nionswift
|
nion/swift/DisplayPanel.py
|
DisplayPanelManager.build_menu
|
def build_menu(self, display_type_menu, document_controller, display_panel):
"""Build the dynamic menu for the selected display panel.
The user accesses this menu by right-clicking on the display panel.
The basic menu items are to an empty display panel or a browser display panel.
After that, each display controller factory is given a chance to add to the menu. The display
controllers (for instance, a scan acquisition controller), may add its own menu items.
"""
dynamic_live_actions = list()
def switch_to_display_content(display_panel_type):
self.switch_to_display_content(document_controller, display_panel, display_panel_type, display_panel.display_item)
empty_action = display_type_menu.add_menu_item(_("Clear Display Panel"), functools.partial(switch_to_display_content, "empty-display-panel"))
display_type_menu.add_separator()
data_item_display_action = display_type_menu.add_menu_item(_("Display Item"), functools.partial(switch_to_display_content, "data-display-panel"))
thumbnail_browser_action = display_type_menu.add_menu_item(_("Thumbnail Browser"), functools.partial(switch_to_display_content, "thumbnail-browser-display-panel"))
grid_browser_action = display_type_menu.add_menu_item(_("Grid Browser"), functools.partial(switch_to_display_content, "browser-display-panel"))
display_type_menu.add_separator()
display_panel_type = display_panel.display_panel_type
empty_action.checked = display_panel_type == "empty" and display_panel.display_panel_controller is None
data_item_display_action.checked = display_panel_type == "data_item"
thumbnail_browser_action.checked = display_panel_type == "horizontal"
grid_browser_action.checked = display_panel_type == "grid"
dynamic_live_actions.append(empty_action)
dynamic_live_actions.append(data_item_display_action)
dynamic_live_actions.append(thumbnail_browser_action)
dynamic_live_actions.append(grid_browser_action)
for factory in self.__display_controller_factories.values():
dynamic_live_actions.extend(factory.build_menu(display_type_menu, display_panel))
return dynamic_live_actions
|
python
|
def build_menu(self, display_type_menu, document_controller, display_panel):
"""Build the dynamic menu for the selected display panel.
The user accesses this menu by right-clicking on the display panel.
The basic menu items are to an empty display panel or a browser display panel.
After that, each display controller factory is given a chance to add to the menu. The display
controllers (for instance, a scan acquisition controller), may add its own menu items.
"""
dynamic_live_actions = list()
def switch_to_display_content(display_panel_type):
self.switch_to_display_content(document_controller, display_panel, display_panel_type, display_panel.display_item)
empty_action = display_type_menu.add_menu_item(_("Clear Display Panel"), functools.partial(switch_to_display_content, "empty-display-panel"))
display_type_menu.add_separator()
data_item_display_action = display_type_menu.add_menu_item(_("Display Item"), functools.partial(switch_to_display_content, "data-display-panel"))
thumbnail_browser_action = display_type_menu.add_menu_item(_("Thumbnail Browser"), functools.partial(switch_to_display_content, "thumbnail-browser-display-panel"))
grid_browser_action = display_type_menu.add_menu_item(_("Grid Browser"), functools.partial(switch_to_display_content, "browser-display-panel"))
display_type_menu.add_separator()
display_panel_type = display_panel.display_panel_type
empty_action.checked = display_panel_type == "empty" and display_panel.display_panel_controller is None
data_item_display_action.checked = display_panel_type == "data_item"
thumbnail_browser_action.checked = display_panel_type == "horizontal"
grid_browser_action.checked = display_panel_type == "grid"
dynamic_live_actions.append(empty_action)
dynamic_live_actions.append(data_item_display_action)
dynamic_live_actions.append(thumbnail_browser_action)
dynamic_live_actions.append(grid_browser_action)
for factory in self.__display_controller_factories.values():
dynamic_live_actions.extend(factory.build_menu(display_type_menu, display_panel))
return dynamic_live_actions
|
[
"def",
"build_menu",
"(",
"self",
",",
"display_type_menu",
",",
"document_controller",
",",
"display_panel",
")",
":",
"dynamic_live_actions",
"=",
"list",
"(",
")",
"def",
"switch_to_display_content",
"(",
"display_panel_type",
")",
":",
"self",
".",
"switch_to_display_content",
"(",
"document_controller",
",",
"display_panel",
",",
"display_panel_type",
",",
"display_panel",
".",
"display_item",
")",
"empty_action",
"=",
"display_type_menu",
".",
"add_menu_item",
"(",
"_",
"(",
"\"Clear Display Panel\"",
")",
",",
"functools",
".",
"partial",
"(",
"switch_to_display_content",
",",
"\"empty-display-panel\"",
")",
")",
"display_type_menu",
".",
"add_separator",
"(",
")",
"data_item_display_action",
"=",
"display_type_menu",
".",
"add_menu_item",
"(",
"_",
"(",
"\"Display Item\"",
")",
",",
"functools",
".",
"partial",
"(",
"switch_to_display_content",
",",
"\"data-display-panel\"",
")",
")",
"thumbnail_browser_action",
"=",
"display_type_menu",
".",
"add_menu_item",
"(",
"_",
"(",
"\"Thumbnail Browser\"",
")",
",",
"functools",
".",
"partial",
"(",
"switch_to_display_content",
",",
"\"thumbnail-browser-display-panel\"",
")",
")",
"grid_browser_action",
"=",
"display_type_menu",
".",
"add_menu_item",
"(",
"_",
"(",
"\"Grid Browser\"",
")",
",",
"functools",
".",
"partial",
"(",
"switch_to_display_content",
",",
"\"browser-display-panel\"",
")",
")",
"display_type_menu",
".",
"add_separator",
"(",
")",
"display_panel_type",
"=",
"display_panel",
".",
"display_panel_type",
"empty_action",
".",
"checked",
"=",
"display_panel_type",
"==",
"\"empty\"",
"and",
"display_panel",
".",
"display_panel_controller",
"is",
"None",
"data_item_display_action",
".",
"checked",
"=",
"display_panel_type",
"==",
"\"data_item\"",
"thumbnail_browser_action",
".",
"checked",
"=",
"display_panel_type",
"==",
"\"horizontal\"",
"grid_browser_action",
".",
"checked",
"=",
"display_panel_type",
"==",
"\"grid\"",
"dynamic_live_actions",
".",
"append",
"(",
"empty_action",
")",
"dynamic_live_actions",
".",
"append",
"(",
"data_item_display_action",
")",
"dynamic_live_actions",
".",
"append",
"(",
"thumbnail_browser_action",
")",
"dynamic_live_actions",
".",
"append",
"(",
"grid_browser_action",
")",
"for",
"factory",
"in",
"self",
".",
"__display_controller_factories",
".",
"values",
"(",
")",
":",
"dynamic_live_actions",
".",
"extend",
"(",
"factory",
".",
"build_menu",
"(",
"display_type_menu",
",",
"display_panel",
")",
")",
"return",
"dynamic_live_actions"
] |
Build the dynamic menu for the selected display panel.
The user accesses this menu by right-clicking on the display panel.
The basic menu items are to an empty display panel or a browser display panel.
After that, each display controller factory is given a chance to add to the menu. The display
controllers (for instance, a scan acquisition controller), may add its own menu items.
|
[
"Build",
"the",
"dynamic",
"menu",
"for",
"the",
"selected",
"display",
"panel",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/DisplayPanel.py#L1898-L1936
|
train
|
nion-software/nionswift
|
nion/swift/model/DataStructure.py
|
DataStructure.persistent_object_context_changed
|
def persistent_object_context_changed(self):
""" Override from PersistentObject. """
super().persistent_object_context_changed()
def source_registered(source):
self.__source = source
def source_unregistered(source=None):
pass
def reference_registered(property_name, reference):
self.__referenced_objects[property_name] = reference
def reference_unregistered(property_name, reference=None):
pass
if self.persistent_object_context:
self.persistent_object_context.subscribe(self.source_uuid, source_registered, source_unregistered)
for property_name, value in self.__properties.items():
if isinstance(value, dict) and value.get("type") in {"data_item", "display_item", "data_source", "graphic", "structure"} and "uuid" in value:
self.persistent_object_context.subscribe(uuid.UUID(value["uuid"]), functools.partial(reference_registered, property_name), functools.partial(reference_unregistered, property_name))
else:
source_unregistered()
for property_name, value in self.__properties.items():
if isinstance(value, dict) and value.get("type") in {"data_item", "display_item", "data_source", "graphic", "structure"} and "uuid" in value:
reference_unregistered(property_name)
|
python
|
def persistent_object_context_changed(self):
""" Override from PersistentObject. """
super().persistent_object_context_changed()
def source_registered(source):
self.__source = source
def source_unregistered(source=None):
pass
def reference_registered(property_name, reference):
self.__referenced_objects[property_name] = reference
def reference_unregistered(property_name, reference=None):
pass
if self.persistent_object_context:
self.persistent_object_context.subscribe(self.source_uuid, source_registered, source_unregistered)
for property_name, value in self.__properties.items():
if isinstance(value, dict) and value.get("type") in {"data_item", "display_item", "data_source", "graphic", "structure"} and "uuid" in value:
self.persistent_object_context.subscribe(uuid.UUID(value["uuid"]), functools.partial(reference_registered, property_name), functools.partial(reference_unregistered, property_name))
else:
source_unregistered()
for property_name, value in self.__properties.items():
if isinstance(value, dict) and value.get("type") in {"data_item", "display_item", "data_source", "graphic", "structure"} and "uuid" in value:
reference_unregistered(property_name)
|
[
"def",
"persistent_object_context_changed",
"(",
"self",
")",
":",
"super",
"(",
")",
".",
"persistent_object_context_changed",
"(",
")",
"def",
"source_registered",
"(",
"source",
")",
":",
"self",
".",
"__source",
"=",
"source",
"def",
"source_unregistered",
"(",
"source",
"=",
"None",
")",
":",
"pass",
"def",
"reference_registered",
"(",
"property_name",
",",
"reference",
")",
":",
"self",
".",
"__referenced_objects",
"[",
"property_name",
"]",
"=",
"reference",
"def",
"reference_unregistered",
"(",
"property_name",
",",
"reference",
"=",
"None",
")",
":",
"pass",
"if",
"self",
".",
"persistent_object_context",
":",
"self",
".",
"persistent_object_context",
".",
"subscribe",
"(",
"self",
".",
"source_uuid",
",",
"source_registered",
",",
"source_unregistered",
")",
"for",
"property_name",
",",
"value",
"in",
"self",
".",
"__properties",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"dict",
")",
"and",
"value",
".",
"get",
"(",
"\"type\"",
")",
"in",
"{",
"\"data_item\"",
",",
"\"display_item\"",
",",
"\"data_source\"",
",",
"\"graphic\"",
",",
"\"structure\"",
"}",
"and",
"\"uuid\"",
"in",
"value",
":",
"self",
".",
"persistent_object_context",
".",
"subscribe",
"(",
"uuid",
".",
"UUID",
"(",
"value",
"[",
"\"uuid\"",
"]",
")",
",",
"functools",
".",
"partial",
"(",
"reference_registered",
",",
"property_name",
")",
",",
"functools",
".",
"partial",
"(",
"reference_unregistered",
",",
"property_name",
")",
")",
"else",
":",
"source_unregistered",
"(",
")",
"for",
"property_name",
",",
"value",
"in",
"self",
".",
"__properties",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"dict",
")",
"and",
"value",
".",
"get",
"(",
"\"type\"",
")",
"in",
"{",
"\"data_item\"",
",",
"\"display_item\"",
",",
"\"data_source\"",
",",
"\"graphic\"",
",",
"\"structure\"",
"}",
"and",
"\"uuid\"",
"in",
"value",
":",
"reference_unregistered",
"(",
"property_name",
")"
] |
Override from PersistentObject.
|
[
"Override",
"from",
"PersistentObject",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/model/DataStructure.py#L114-L141
|
train
|
base4sistemas/satcfe
|
satcfe/clientesathub.py
|
ClienteSATHub.ativar_sat
|
def ativar_sat(self, tipo_certificado, cnpj, codigo_uf):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.ativar_sat`.
:return: Uma resposta SAT especializada em ``AtivarSAT``.
:rtype: satcfe.resposta.ativarsat.RespostaAtivarSAT
"""
resp = self._http_post('ativarsat',
tipo_certificado=tipo_certificado,
cnpj=cnpj,
codigo_uf=codigo_uf)
conteudo = resp.json()
return RespostaAtivarSAT.analisar(conteudo.get('retorno'))
|
python
|
def ativar_sat(self, tipo_certificado, cnpj, codigo_uf):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.ativar_sat`.
:return: Uma resposta SAT especializada em ``AtivarSAT``.
:rtype: satcfe.resposta.ativarsat.RespostaAtivarSAT
"""
resp = self._http_post('ativarsat',
tipo_certificado=tipo_certificado,
cnpj=cnpj,
codigo_uf=codigo_uf)
conteudo = resp.json()
return RespostaAtivarSAT.analisar(conteudo.get('retorno'))
|
[
"def",
"ativar_sat",
"(",
"self",
",",
"tipo_certificado",
",",
"cnpj",
",",
"codigo_uf",
")",
":",
"resp",
"=",
"self",
".",
"_http_post",
"(",
"'ativarsat'",
",",
"tipo_certificado",
"=",
"tipo_certificado",
",",
"cnpj",
"=",
"cnpj",
",",
"codigo_uf",
"=",
"codigo_uf",
")",
"conteudo",
"=",
"resp",
".",
"json",
"(",
")",
"return",
"RespostaAtivarSAT",
".",
"analisar",
"(",
"conteudo",
".",
"get",
"(",
"'retorno'",
")",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.ativar_sat`.
:return: Uma resposta SAT especializada em ``AtivarSAT``.
:rtype: satcfe.resposta.ativarsat.RespostaAtivarSAT
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"ativar_sat",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientesathub.py#L103-L114
|
train
|
base4sistemas/satcfe
|
satcfe/clientesathub.py
|
ClienteSATHub.comunicar_certificado_icpbrasil
|
def comunicar_certificado_icpbrasil(self, certificado):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.comunicar_certificado_icpbrasil`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
resp = self._http_post('comunicarcertificadoicpbrasil',
certificado=certificado)
conteudo = resp.json()
return RespostaSAT.comunicar_certificado_icpbrasil(
conteudo.get('retorno'))
|
python
|
def comunicar_certificado_icpbrasil(self, certificado):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.comunicar_certificado_icpbrasil`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
resp = self._http_post('comunicarcertificadoicpbrasil',
certificado=certificado)
conteudo = resp.json()
return RespostaSAT.comunicar_certificado_icpbrasil(
conteudo.get('retorno'))
|
[
"def",
"comunicar_certificado_icpbrasil",
"(",
"self",
",",
"certificado",
")",
":",
"resp",
"=",
"self",
".",
"_http_post",
"(",
"'comunicarcertificadoicpbrasil'",
",",
"certificado",
"=",
"certificado",
")",
"conteudo",
"=",
"resp",
".",
"json",
"(",
")",
"return",
"RespostaSAT",
".",
"comunicar_certificado_icpbrasil",
"(",
"conteudo",
".",
"get",
"(",
"'retorno'",
")",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.comunicar_certificado_icpbrasil`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"comunicar_certificado_icpbrasil",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientesathub.py#L117-L127
|
train
|
base4sistemas/satcfe
|
satcfe/clientesathub.py
|
ClienteSATHub.enviar_dados_venda
|
def enviar_dados_venda(self, dados_venda):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.enviar_dados_venda`.
:return: Uma resposta SAT especializada em ``EnviarDadosVenda``.
:rtype: satcfe.resposta.enviardadosvenda.RespostaEnviarDadosVenda
"""
resp = self._http_post('enviardadosvenda',
dados_venda=dados_venda.documento())
conteudo = resp.json()
return RespostaEnviarDadosVenda.analisar(conteudo.get('retorno'))
|
python
|
def enviar_dados_venda(self, dados_venda):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.enviar_dados_venda`.
:return: Uma resposta SAT especializada em ``EnviarDadosVenda``.
:rtype: satcfe.resposta.enviardadosvenda.RespostaEnviarDadosVenda
"""
resp = self._http_post('enviardadosvenda',
dados_venda=dados_venda.documento())
conteudo = resp.json()
return RespostaEnviarDadosVenda.analisar(conteudo.get('retorno'))
|
[
"def",
"enviar_dados_venda",
"(",
"self",
",",
"dados_venda",
")",
":",
"resp",
"=",
"self",
".",
"_http_post",
"(",
"'enviardadosvenda'",
",",
"dados_venda",
"=",
"dados_venda",
".",
"documento",
"(",
")",
")",
"conteudo",
"=",
"resp",
".",
"json",
"(",
")",
"return",
"RespostaEnviarDadosVenda",
".",
"analisar",
"(",
"conteudo",
".",
"get",
"(",
"'retorno'",
")",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.enviar_dados_venda`.
:return: Uma resposta SAT especializada em ``EnviarDadosVenda``.
:rtype: satcfe.resposta.enviardadosvenda.RespostaEnviarDadosVenda
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"enviar_dados_venda",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientesathub.py#L130-L139
|
train
|
base4sistemas/satcfe
|
satcfe/clientesathub.py
|
ClienteSATHub.cancelar_ultima_venda
|
def cancelar_ultima_venda(self, chave_cfe, dados_cancelamento):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.cancelar_ultima_venda`.
:return: Uma resposta SAT especializada em ``CancelarUltimaVenda``.
:rtype: satcfe.resposta.cancelarultimavenda.RespostaCancelarUltimaVenda
"""
resp = self._http_post('cancelarultimavenda',
chave_cfe=chave_cfe,
dados_cancelamento=dados_cancelamento.documento())
conteudo = resp.json()
return RespostaCancelarUltimaVenda.analisar(conteudo.get('retorno'))
|
python
|
def cancelar_ultima_venda(self, chave_cfe, dados_cancelamento):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.cancelar_ultima_venda`.
:return: Uma resposta SAT especializada em ``CancelarUltimaVenda``.
:rtype: satcfe.resposta.cancelarultimavenda.RespostaCancelarUltimaVenda
"""
resp = self._http_post('cancelarultimavenda',
chave_cfe=chave_cfe,
dados_cancelamento=dados_cancelamento.documento())
conteudo = resp.json()
return RespostaCancelarUltimaVenda.analisar(conteudo.get('retorno'))
|
[
"def",
"cancelar_ultima_venda",
"(",
"self",
",",
"chave_cfe",
",",
"dados_cancelamento",
")",
":",
"resp",
"=",
"self",
".",
"_http_post",
"(",
"'cancelarultimavenda'",
",",
"chave_cfe",
"=",
"chave_cfe",
",",
"dados_cancelamento",
"=",
"dados_cancelamento",
".",
"documento",
"(",
")",
")",
"conteudo",
"=",
"resp",
".",
"json",
"(",
")",
"return",
"RespostaCancelarUltimaVenda",
".",
"analisar",
"(",
"conteudo",
".",
"get",
"(",
"'retorno'",
")",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.cancelar_ultima_venda`.
:return: Uma resposta SAT especializada em ``CancelarUltimaVenda``.
:rtype: satcfe.resposta.cancelarultimavenda.RespostaCancelarUltimaVenda
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"cancelar_ultima_venda",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientesathub.py#L142-L152
|
train
|
base4sistemas/satcfe
|
satcfe/clientesathub.py
|
ClienteSATHub.consultar_sat
|
def consultar_sat(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
resp = self._http_post('consultarsat')
conteudo = resp.json()
return RespostaSAT.consultar_sat(conteudo.get('retorno'))
|
python
|
def consultar_sat(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
resp = self._http_post('consultarsat')
conteudo = resp.json()
return RespostaSAT.consultar_sat(conteudo.get('retorno'))
|
[
"def",
"consultar_sat",
"(",
"self",
")",
":",
"resp",
"=",
"self",
".",
"_http_post",
"(",
"'consultarsat'",
")",
"conteudo",
"=",
"resp",
".",
"json",
"(",
")",
"return",
"RespostaSAT",
".",
"consultar_sat",
"(",
"conteudo",
".",
"get",
"(",
"'retorno'",
")",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"consultar_sat",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientesathub.py#L155-L163
|
train
|
base4sistemas/satcfe
|
satcfe/clientesathub.py
|
ClienteSATHub.consultar_status_operacional
|
def consultar_status_operacional(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_status_operacional`.
:return: Uma resposta SAT especializada em ``ConsultarStatusOperacional``.
:rtype: satcfe.resposta.consultarstatusoperacional.RespostaConsultarStatusOperacional
"""
resp = self._http_post('consultarstatusoperacional')
conteudo = resp.json()
return RespostaConsultarStatusOperacional.analisar(
conteudo.get('retorno'))
|
python
|
def consultar_status_operacional(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_status_operacional`.
:return: Uma resposta SAT especializada em ``ConsultarStatusOperacional``.
:rtype: satcfe.resposta.consultarstatusoperacional.RespostaConsultarStatusOperacional
"""
resp = self._http_post('consultarstatusoperacional')
conteudo = resp.json()
return RespostaConsultarStatusOperacional.analisar(
conteudo.get('retorno'))
|
[
"def",
"consultar_status_operacional",
"(",
"self",
")",
":",
"resp",
"=",
"self",
".",
"_http_post",
"(",
"'consultarstatusoperacional'",
")",
"conteudo",
"=",
"resp",
".",
"json",
"(",
")",
"return",
"RespostaConsultarStatusOperacional",
".",
"analisar",
"(",
"conteudo",
".",
"get",
"(",
"'retorno'",
")",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_status_operacional`.
:return: Uma resposta SAT especializada em ``ConsultarStatusOperacional``.
:rtype: satcfe.resposta.consultarstatusoperacional.RespostaConsultarStatusOperacional
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"consultar_status_operacional",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientesathub.py#L178-L187
|
train
|
base4sistemas/satcfe
|
satcfe/clientesathub.py
|
ClienteSATHub.consultar_numero_sessao
|
def consultar_numero_sessao(self, numero_sessao):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_numero_sessao`.
:return: Uma resposta SAT que irá depender da sessão consultada.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
resp = self._http_post('consultarnumerosessao',
numero_sessao=numero_sessao)
conteudo = resp.json()
return RespostaConsultarNumeroSessao.analisar(conteudo.get('retorno'))
|
python
|
def consultar_numero_sessao(self, numero_sessao):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_numero_sessao`.
:return: Uma resposta SAT que irá depender da sessão consultada.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
resp = self._http_post('consultarnumerosessao',
numero_sessao=numero_sessao)
conteudo = resp.json()
return RespostaConsultarNumeroSessao.analisar(conteudo.get('retorno'))
|
[
"def",
"consultar_numero_sessao",
"(",
"self",
",",
"numero_sessao",
")",
":",
"resp",
"=",
"self",
".",
"_http_post",
"(",
"'consultarnumerosessao'",
",",
"numero_sessao",
"=",
"numero_sessao",
")",
"conteudo",
"=",
"resp",
".",
"json",
"(",
")",
"return",
"RespostaConsultarNumeroSessao",
".",
"analisar",
"(",
"conteudo",
".",
"get",
"(",
"'retorno'",
")",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_numero_sessao`.
:return: Uma resposta SAT que irá depender da sessão consultada.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"consultar_numero_sessao",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientesathub.py#L190-L199
|
train
|
base4sistemas/satcfe
|
satcfe/clientesathub.py
|
ClienteSATHub.configurar_interface_de_rede
|
def configurar_interface_de_rede(self, configuracao):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.configurar_interface_de_rede`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
resp = self._http_post('configurarinterfacederede',
configuracao=configuracao.documento())
conteudo = resp.json()
return RespostaSAT.configurar_interface_de_rede(conteudo.get('retorno'))
|
python
|
def configurar_interface_de_rede(self, configuracao):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.configurar_interface_de_rede`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
resp = self._http_post('configurarinterfacederede',
configuracao=configuracao.documento())
conteudo = resp.json()
return RespostaSAT.configurar_interface_de_rede(conteudo.get('retorno'))
|
[
"def",
"configurar_interface_de_rede",
"(",
"self",
",",
"configuracao",
")",
":",
"resp",
"=",
"self",
".",
"_http_post",
"(",
"'configurarinterfacederede'",
",",
"configuracao",
"=",
"configuracao",
".",
"documento",
"(",
")",
")",
"conteudo",
"=",
"resp",
".",
"json",
"(",
")",
"return",
"RespostaSAT",
".",
"configurar_interface_de_rede",
"(",
"conteudo",
".",
"get",
"(",
"'retorno'",
")",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.configurar_interface_de_rede`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"configurar_interface_de_rede",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientesathub.py#L202-L211
|
train
|
base4sistemas/satcfe
|
satcfe/clientesathub.py
|
ClienteSATHub.associar_assinatura
|
def associar_assinatura(self, sequencia_cnpj, assinatura_ac):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.associar_assinatura`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
resp = self._http_post('associarassinatura',
sequencia_cnpj=sequencia_cnpj, assinatura_ac=assinatura_ac)
# (!) resposta baseada na redação com efeitos até 31-12-2016
conteudo = resp.json()
return RespostaSAT.associar_assinatura(conteudo.get('retorno'))
|
python
|
def associar_assinatura(self, sequencia_cnpj, assinatura_ac):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.associar_assinatura`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
resp = self._http_post('associarassinatura',
sequencia_cnpj=sequencia_cnpj, assinatura_ac=assinatura_ac)
# (!) resposta baseada na redação com efeitos até 31-12-2016
conteudo = resp.json()
return RespostaSAT.associar_assinatura(conteudo.get('retorno'))
|
[
"def",
"associar_assinatura",
"(",
"self",
",",
"sequencia_cnpj",
",",
"assinatura_ac",
")",
":",
"resp",
"=",
"self",
".",
"_http_post",
"(",
"'associarassinatura'",
",",
"sequencia_cnpj",
"=",
"sequencia_cnpj",
",",
"assinatura_ac",
"=",
"assinatura_ac",
")",
"# (!) resposta baseada na redação com efeitos até 31-12-2016",
"conteudo",
"=",
"resp",
".",
"json",
"(",
")",
"return",
"RespostaSAT",
".",
"associar_assinatura",
"(",
"conteudo",
".",
"get",
"(",
"'retorno'",
")",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.associar_assinatura`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"associar_assinatura",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientesathub.py#L214-L224
|
train
|
base4sistemas/satcfe
|
satcfe/clientesathub.py
|
ClienteSATHub.atualizar_software_sat
|
def atualizar_software_sat(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.atualizar_software_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
resp = self._http_post('atualizarsoftwaresat')
conteudo = resp.json()
return RespostaSAT.atualizar_software_sat(conteudo.get('retorno'))
|
python
|
def atualizar_software_sat(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.atualizar_software_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
resp = self._http_post('atualizarsoftwaresat')
conteudo = resp.json()
return RespostaSAT.atualizar_software_sat(conteudo.get('retorno'))
|
[
"def",
"atualizar_software_sat",
"(",
"self",
")",
":",
"resp",
"=",
"self",
".",
"_http_post",
"(",
"'atualizarsoftwaresat'",
")",
"conteudo",
"=",
"resp",
".",
"json",
"(",
")",
"return",
"RespostaSAT",
".",
"atualizar_software_sat",
"(",
"conteudo",
".",
"get",
"(",
"'retorno'",
")",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.atualizar_software_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"atualizar_software_sat",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientesathub.py#L227-L235
|
train
|
base4sistemas/satcfe
|
satcfe/clientesathub.py
|
ClienteSATHub.extrair_logs
|
def extrair_logs(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.extrair_logs`.
:return: Uma resposta SAT especializada em ``ExtrairLogs``.
:rtype: satcfe.resposta.extrairlogs.RespostaExtrairLogs
"""
resp = self._http_post('extrairlogs')
conteudo = resp.json()
return RespostaExtrairLogs.analisar(conteudo.get('retorno'))
|
python
|
def extrair_logs(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.extrair_logs`.
:return: Uma resposta SAT especializada em ``ExtrairLogs``.
:rtype: satcfe.resposta.extrairlogs.RespostaExtrairLogs
"""
resp = self._http_post('extrairlogs')
conteudo = resp.json()
return RespostaExtrairLogs.analisar(conteudo.get('retorno'))
|
[
"def",
"extrair_logs",
"(",
"self",
")",
":",
"resp",
"=",
"self",
".",
"_http_post",
"(",
"'extrairlogs'",
")",
"conteudo",
"=",
"resp",
".",
"json",
"(",
")",
"return",
"RespostaExtrairLogs",
".",
"analisar",
"(",
"conteudo",
".",
"get",
"(",
"'retorno'",
")",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.extrair_logs`.
:return: Uma resposta SAT especializada em ``ExtrairLogs``.
:rtype: satcfe.resposta.extrairlogs.RespostaExtrairLogs
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"extrair_logs",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientesathub.py#L238-L246
|
train
|
base4sistemas/satcfe
|
satcfe/clientesathub.py
|
ClienteSATHub.bloquear_sat
|
def bloquear_sat(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.bloquear_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
resp = self._http_post('bloquearsat')
conteudo = resp.json()
return RespostaSAT.bloquear_sat(conteudo.get('retorno'))
|
python
|
def bloquear_sat(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.bloquear_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
resp = self._http_post('bloquearsat')
conteudo = resp.json()
return RespostaSAT.bloquear_sat(conteudo.get('retorno'))
|
[
"def",
"bloquear_sat",
"(",
"self",
")",
":",
"resp",
"=",
"self",
".",
"_http_post",
"(",
"'bloquearsat'",
")",
"conteudo",
"=",
"resp",
".",
"json",
"(",
")",
"return",
"RespostaSAT",
".",
"bloquear_sat",
"(",
"conteudo",
".",
"get",
"(",
"'retorno'",
")",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.bloquear_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"bloquear_sat",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientesathub.py#L249-L257
|
train
|
base4sistemas/satcfe
|
satcfe/clientesathub.py
|
ClienteSATHub.desbloquear_sat
|
def desbloquear_sat(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.desbloquear_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
resp = self._http_post('desbloquearsat')
conteudo = resp.json()
return RespostaSAT.desbloquear_sat(conteudo.get('retorno'))
|
python
|
def desbloquear_sat(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.desbloquear_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
resp = self._http_post('desbloquearsat')
conteudo = resp.json()
return RespostaSAT.desbloquear_sat(conteudo.get('retorno'))
|
[
"def",
"desbloquear_sat",
"(",
"self",
")",
":",
"resp",
"=",
"self",
".",
"_http_post",
"(",
"'desbloquearsat'",
")",
"conteudo",
"=",
"resp",
".",
"json",
"(",
")",
"return",
"RespostaSAT",
".",
"desbloquear_sat",
"(",
"conteudo",
".",
"get",
"(",
"'retorno'",
")",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.desbloquear_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"desbloquear_sat",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientesathub.py#L260-L268
|
train
|
base4sistemas/satcfe
|
satcfe/clientesathub.py
|
ClienteSATHub.trocar_codigo_de_ativacao
|
def trocar_codigo_de_ativacao(self, novo_codigo_ativacao,
opcao=constantes.CODIGO_ATIVACAO_REGULAR,
codigo_emergencia=None):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.trocar_codigo_de_ativacao`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
resp = self._http_post('trocarcodigodeativacao',
novo_codigo_ativacao=novo_codigo_ativacao,
opcao=opcao,
codigo_emergencia=codigo_emergencia)
conteudo = resp.json()
return RespostaSAT.trocar_codigo_de_ativacao(conteudo.get('retorno'))
|
python
|
def trocar_codigo_de_ativacao(self, novo_codigo_ativacao,
opcao=constantes.CODIGO_ATIVACAO_REGULAR,
codigo_emergencia=None):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.trocar_codigo_de_ativacao`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
resp = self._http_post('trocarcodigodeativacao',
novo_codigo_ativacao=novo_codigo_ativacao,
opcao=opcao,
codigo_emergencia=codigo_emergencia)
conteudo = resp.json()
return RespostaSAT.trocar_codigo_de_ativacao(conteudo.get('retorno'))
|
[
"def",
"trocar_codigo_de_ativacao",
"(",
"self",
",",
"novo_codigo_ativacao",
",",
"opcao",
"=",
"constantes",
".",
"CODIGO_ATIVACAO_REGULAR",
",",
"codigo_emergencia",
"=",
"None",
")",
":",
"resp",
"=",
"self",
".",
"_http_post",
"(",
"'trocarcodigodeativacao'",
",",
"novo_codigo_ativacao",
"=",
"novo_codigo_ativacao",
",",
"opcao",
"=",
"opcao",
",",
"codigo_emergencia",
"=",
"codigo_emergencia",
")",
"conteudo",
"=",
"resp",
".",
"json",
"(",
")",
"return",
"RespostaSAT",
".",
"trocar_codigo_de_ativacao",
"(",
"conteudo",
".",
"get",
"(",
"'retorno'",
")",
")"
] |
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.trocar_codigo_de_ativacao`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
[
"Sobrepõe",
":",
"meth",
":",
"~satcfe",
".",
"base",
".",
"FuncoesSAT",
".",
"trocar_codigo_de_ativacao",
"."
] |
cb8e8815f4133d3e3d94cf526fa86767b4521ed9
|
https://github.com/base4sistemas/satcfe/blob/cb8e8815f4133d3e3d94cf526fa86767b4521ed9/satcfe/clientesathub.py#L271-L284
|
train
|
nion-software/nionswift
|
nion/typeshed/API_1_0.py
|
Graphic.bounds
|
def bounds(self) -> typing.Tuple[typing.Tuple[float, float], typing.Tuple[float, float]]:
"""Return the bounds property in relative coordinates.
Bounds is a tuple ((top, left), (height, width))"""
...
|
python
|
def bounds(self) -> typing.Tuple[typing.Tuple[float, float], typing.Tuple[float, float]]:
"""Return the bounds property in relative coordinates.
Bounds is a tuple ((top, left), (height, width))"""
...
|
[
"def",
"bounds",
"(",
"self",
")",
"->",
"typing",
".",
"Tuple",
"[",
"typing",
".",
"Tuple",
"[",
"float",
",",
"float",
"]",
",",
"typing",
".",
"Tuple",
"[",
"float",
",",
"float",
"]",
"]",
":",
"..."
] |
Return the bounds property in relative coordinates.
Bounds is a tuple ((top, left), (height, width))
|
[
"Return",
"the",
"bounds",
"property",
"in",
"relative",
"coordinates",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/typeshed/API_1_0.py#L38-L42
|
train
|
nion-software/nionswift
|
nion/typeshed/API_1_0.py
|
Graphic.end
|
def end(self, value: typing.Union[float, typing.Tuple[float, float]]) -> None:
"""Set the end property in relative coordinates.
End may be a float when graphic is an Interval or a tuple (y, x) when graphic is a Line."""
...
|
python
|
def end(self, value: typing.Union[float, typing.Tuple[float, float]]) -> None:
"""Set the end property in relative coordinates.
End may be a float when graphic is an Interval or a tuple (y, x) when graphic is a Line."""
...
|
[
"def",
"end",
"(",
"self",
",",
"value",
":",
"typing",
".",
"Union",
"[",
"float",
",",
"typing",
".",
"Tuple",
"[",
"float",
",",
"float",
"]",
"]",
")",
"->",
"None",
":",
"..."
] |
Set the end property in relative coordinates.
End may be a float when graphic is an Interval or a tuple (y, x) when graphic is a Line.
|
[
"Set",
"the",
"end",
"property",
"in",
"relative",
"coordinates",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/typeshed/API_1_0.py#L73-L77
|
train
|
nion-software/nionswift
|
nion/typeshed/API_1_0.py
|
Graphic.start
|
def start(self, value: typing.Union[float, typing.Tuple[float, float]]) -> None:
"""Set the end property in relative coordinates.
End may be a float when graphic is an Interval or a tuple (y, x) when graphic is a Line."""
...
|
python
|
def start(self, value: typing.Union[float, typing.Tuple[float, float]]) -> None:
"""Set the end property in relative coordinates.
End may be a float when graphic is an Interval or a tuple (y, x) when graphic is a Line."""
...
|
[
"def",
"start",
"(",
"self",
",",
"value",
":",
"typing",
".",
"Union",
"[",
"float",
",",
"typing",
".",
"Tuple",
"[",
"float",
",",
"float",
"]",
"]",
")",
"->",
"None",
":",
"..."
] |
Set the end property in relative coordinates.
End may be a float when graphic is an Interval or a tuple (y, x) when graphic is a Line.
|
[
"Set",
"the",
"end",
"property",
"in",
"relative",
"coordinates",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/typeshed/API_1_0.py#L183-L187
|
train
|
nion-software/nionswift
|
nion/typeshed/API_1_0.py
|
Graphic.vector
|
def vector(self) -> typing.Tuple[typing.Tuple[float, float], typing.Tuple[float, float]]:
"""Return the vector property in relative coordinates.
Vector will be a tuple of tuples ((y_start, x_start), (y_end, x_end))."""
...
|
python
|
def vector(self) -> typing.Tuple[typing.Tuple[float, float], typing.Tuple[float, float]]:
"""Return the vector property in relative coordinates.
Vector will be a tuple of tuples ((y_start, x_start), (y_end, x_end))."""
...
|
[
"def",
"vector",
"(",
"self",
")",
"->",
"typing",
".",
"Tuple",
"[",
"typing",
".",
"Tuple",
"[",
"float",
",",
"float",
"]",
",",
"typing",
".",
"Tuple",
"[",
"float",
",",
"float",
"]",
"]",
":",
"..."
] |
Return the vector property in relative coordinates.
Vector will be a tuple of tuples ((y_start, x_start), (y_end, x_end)).
|
[
"Return",
"the",
"vector",
"property",
"in",
"relative",
"coordinates",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/typeshed/API_1_0.py#L212-L216
|
train
|
nion-software/nionswift
|
nion/typeshed/API_1_0.py
|
Library.get_data_item_for_hardware_source
|
def get_data_item_for_hardware_source(self, hardware_source, channel_id: str=None, processor_id: str=None, create_if_needed: bool=False, large_format: bool=False) -> DataItem:
"""Get the data item associated with hardware source and (optional) channel id and processor_id. Optionally create if missing.
:param hardware_source: The hardware_source.
:param channel_id: The (optional) channel id.
:param processor_id: The (optional) processor id for the channel.
:param create_if_needed: Whether to create a new data item if none is found.
:return: The associated data item. May be None.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
"""
...
|
python
|
def get_data_item_for_hardware_source(self, hardware_source, channel_id: str=None, processor_id: str=None, create_if_needed: bool=False, large_format: bool=False) -> DataItem:
"""Get the data item associated with hardware source and (optional) channel id and processor_id. Optionally create if missing.
:param hardware_source: The hardware_source.
:param channel_id: The (optional) channel id.
:param processor_id: The (optional) processor id for the channel.
:param create_if_needed: Whether to create a new data item if none is found.
:return: The associated data item. May be None.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
"""
...
|
[
"def",
"get_data_item_for_hardware_source",
"(",
"self",
",",
"hardware_source",
",",
"channel_id",
":",
"str",
"=",
"None",
",",
"processor_id",
":",
"str",
"=",
"None",
",",
"create_if_needed",
":",
"bool",
"=",
"False",
",",
"large_format",
":",
"bool",
"=",
"False",
")",
"->",
"DataItem",
":",
"..."
] |
Get the data item associated with hardware source and (optional) channel id and processor_id. Optionally create if missing.
:param hardware_source: The hardware_source.
:param channel_id: The (optional) channel id.
:param processor_id: The (optional) processor id for the channel.
:param create_if_needed: Whether to create a new data item if none is found.
:return: The associated data item. May be None.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
|
[
"Get",
"the",
"data",
"item",
"associated",
"with",
"hardware",
"source",
"and",
"(",
"optional",
")",
"channel",
"id",
"and",
"processor_id",
".",
"Optionally",
"create",
"if",
"missing",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/typeshed/API_1_0.py#L751-L765
|
train
|
nion-software/nionswift
|
nion/typeshed/API_1_0.py
|
Library.get_data_item_for_reference_key
|
def get_data_item_for_reference_key(self, data_item_reference_key: str=None, create_if_needed: bool=False, large_format: bool=False) -> DataItem:
"""Get the data item associated with data item reference key. Optionally create if missing.
:param data_item_reference_key: The data item reference key.
:param create_if_needed: Whether to create a new data item if none is found.
:return: The associated data item. May be None.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
"""
...
|
python
|
def get_data_item_for_reference_key(self, data_item_reference_key: str=None, create_if_needed: bool=False, large_format: bool=False) -> DataItem:
"""Get the data item associated with data item reference key. Optionally create if missing.
:param data_item_reference_key: The data item reference key.
:param create_if_needed: Whether to create a new data item if none is found.
:return: The associated data item. May be None.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
"""
...
|
[
"def",
"get_data_item_for_reference_key",
"(",
"self",
",",
"data_item_reference_key",
":",
"str",
"=",
"None",
",",
"create_if_needed",
":",
"bool",
"=",
"False",
",",
"large_format",
":",
"bool",
"=",
"False",
")",
"->",
"DataItem",
":",
"..."
] |
Get the data item associated with data item reference key. Optionally create if missing.
:param data_item_reference_key: The data item reference key.
:param create_if_needed: Whether to create a new data item if none is found.
:return: The associated data item. May be None.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
|
[
"Get",
"the",
"data",
"item",
"associated",
"with",
"data",
"item",
"reference",
"key",
".",
"Optionally",
"create",
"if",
"missing",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/typeshed/API_1_0.py#L767-L779
|
train
|
nion-software/nionswift
|
nion/typeshed/API_1_0.py
|
DocumentWindow.show_get_string_message_box
|
def show_get_string_message_box(self, caption: str, text: str, accepted_fn, rejected_fn=None, accepted_text: str=None, rejected_text: str=None) -> None:
"""Show a dialog box and ask for a string.
Caption describes the user prompt. Text is the initial/default string.
Accepted function must be a function taking one argument which is the resulting text if the user accepts the
message dialog. It will only be called if the user clicks OK.
Rejected function can be a function taking no arguments, called if the user clicks Cancel.
.. versionadded:: 1.0
Scriptable: No
"""
...
|
python
|
def show_get_string_message_box(self, caption: str, text: str, accepted_fn, rejected_fn=None, accepted_text: str=None, rejected_text: str=None) -> None:
"""Show a dialog box and ask for a string.
Caption describes the user prompt. Text is the initial/default string.
Accepted function must be a function taking one argument which is the resulting text if the user accepts the
message dialog. It will only be called if the user clicks OK.
Rejected function can be a function taking no arguments, called if the user clicks Cancel.
.. versionadded:: 1.0
Scriptable: No
"""
...
|
[
"def",
"show_get_string_message_box",
"(",
"self",
",",
"caption",
":",
"str",
",",
"text",
":",
"str",
",",
"accepted_fn",
",",
"rejected_fn",
"=",
"None",
",",
"accepted_text",
":",
"str",
"=",
"None",
",",
"rejected_text",
":",
"str",
"=",
"None",
")",
"->",
"None",
":",
"..."
] |
Show a dialog box and ask for a string.
Caption describes the user prompt. Text is the initial/default string.
Accepted function must be a function taking one argument which is the resulting text if the user accepts the
message dialog. It will only be called if the user clicks OK.
Rejected function can be a function taking no arguments, called if the user clicks Cancel.
.. versionadded:: 1.0
Scriptable: No
|
[
"Show",
"a",
"dialog",
"box",
"and",
"ask",
"for",
"a",
"string",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/typeshed/API_1_0.py#L987-L1001
|
train
|
nion-software/nionswift
|
nion/typeshed/API_1_0.py
|
API.create_calibration
|
def create_calibration(self, offset: float=None, scale: float=None, units: str=None) -> Calibration.Calibration:
"""Create a calibration object with offset, scale, and units.
:param offset: The offset of the calibration.
:param scale: The scale of the calibration.
:param units: The units of the calibration as a string.
:return: The calibration object.
.. versionadded:: 1.0
Scriptable: Yes
Calibrated units and uncalibrated units have the following relationship:
:samp:`calibrated_value = offset + value * scale`
"""
...
|
python
|
def create_calibration(self, offset: float=None, scale: float=None, units: str=None) -> Calibration.Calibration:
"""Create a calibration object with offset, scale, and units.
:param offset: The offset of the calibration.
:param scale: The scale of the calibration.
:param units: The units of the calibration as a string.
:return: The calibration object.
.. versionadded:: 1.0
Scriptable: Yes
Calibrated units and uncalibrated units have the following relationship:
:samp:`calibrated_value = offset + value * scale`
"""
...
|
[
"def",
"create_calibration",
"(",
"self",
",",
"offset",
":",
"float",
"=",
"None",
",",
"scale",
":",
"float",
"=",
"None",
",",
"units",
":",
"str",
"=",
"None",
")",
"->",
"Calibration",
".",
"Calibration",
":",
"..."
] |
Create a calibration object with offset, scale, and units.
:param offset: The offset of the calibration.
:param scale: The scale of the calibration.
:param units: The units of the calibration as a string.
:return: The calibration object.
.. versionadded:: 1.0
Scriptable: Yes
Calibrated units and uncalibrated units have the following relationship:
:samp:`calibrated_value = offset + value * scale`
|
[
"Create",
"a",
"calibration",
"object",
"with",
"offset",
"scale",
"and",
"units",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/typeshed/API_1_0.py#L1077-L1092
|
train
|
nion-software/nionswift
|
nion/typeshed/API_1_0.py
|
API.create_data_and_metadata
|
def create_data_and_metadata(self, data: numpy.ndarray, intensity_calibration: Calibration.Calibration=None, dimensional_calibrations: typing.List[Calibration.Calibration]=None, metadata: dict=None, timestamp: str=None, data_descriptor: DataAndMetadata.DataDescriptor=None) -> DataAndMetadata.DataAndMetadata:
"""Create a data_and_metadata object from data.
:param data: an ndarray of data.
:param intensity_calibration: An optional calibration object.
:param dimensional_calibrations: An optional list of calibration objects.
:param metadata: A dict of metadata.
:param timestamp: A datetime object.
:param data_descriptor: A data descriptor describing the dimensions.
.. versionadded:: 1.0
Scriptable: Yes
"""
...
|
python
|
def create_data_and_metadata(self, data: numpy.ndarray, intensity_calibration: Calibration.Calibration=None, dimensional_calibrations: typing.List[Calibration.Calibration]=None, metadata: dict=None, timestamp: str=None, data_descriptor: DataAndMetadata.DataDescriptor=None) -> DataAndMetadata.DataAndMetadata:
"""Create a data_and_metadata object from data.
:param data: an ndarray of data.
:param intensity_calibration: An optional calibration object.
:param dimensional_calibrations: An optional list of calibration objects.
:param metadata: A dict of metadata.
:param timestamp: A datetime object.
:param data_descriptor: A data descriptor describing the dimensions.
.. versionadded:: 1.0
Scriptable: Yes
"""
...
|
[
"def",
"create_data_and_metadata",
"(",
"self",
",",
"data",
":",
"numpy",
".",
"ndarray",
",",
"intensity_calibration",
":",
"Calibration",
".",
"Calibration",
"=",
"None",
",",
"dimensional_calibrations",
":",
"typing",
".",
"List",
"[",
"Calibration",
".",
"Calibration",
"]",
"=",
"None",
",",
"metadata",
":",
"dict",
"=",
"None",
",",
"timestamp",
":",
"str",
"=",
"None",
",",
"data_descriptor",
":",
"DataAndMetadata",
".",
"DataDescriptor",
"=",
"None",
")",
"->",
"DataAndMetadata",
".",
"DataAndMetadata",
":",
"..."
] |
Create a data_and_metadata object from data.
:param data: an ndarray of data.
:param intensity_calibration: An optional calibration object.
:param dimensional_calibrations: An optional list of calibration objects.
:param metadata: A dict of metadata.
:param timestamp: A datetime object.
:param data_descriptor: A data descriptor describing the dimensions.
.. versionadded:: 1.0
Scriptable: Yes
|
[
"Create",
"a",
"data_and_metadata",
"object",
"from",
"data",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/typeshed/API_1_0.py#L1094-L1108
|
train
|
nion-software/nionswift
|
nion/typeshed/API_1_0.py
|
API.create_data_and_metadata_from_data
|
def create_data_and_metadata_from_data(self, data: numpy.ndarray, intensity_calibration: Calibration.Calibration=None, dimensional_calibrations: typing.List[Calibration.Calibration]=None, metadata: dict=None, timestamp: str=None) -> DataAndMetadata.DataAndMetadata:
"""Create a data_and_metadata object from data.
.. versionadded:: 1.0
.. deprecated:: 1.1
Use :py:meth:`~nion.swift.Facade.DataItem.create_data_and_metadata` instead.
Scriptable: No
"""
...
|
python
|
def create_data_and_metadata_from_data(self, data: numpy.ndarray, intensity_calibration: Calibration.Calibration=None, dimensional_calibrations: typing.List[Calibration.Calibration]=None, metadata: dict=None, timestamp: str=None) -> DataAndMetadata.DataAndMetadata:
"""Create a data_and_metadata object from data.
.. versionadded:: 1.0
.. deprecated:: 1.1
Use :py:meth:`~nion.swift.Facade.DataItem.create_data_and_metadata` instead.
Scriptable: No
"""
...
|
[
"def",
"create_data_and_metadata_from_data",
"(",
"self",
",",
"data",
":",
"numpy",
".",
"ndarray",
",",
"intensity_calibration",
":",
"Calibration",
".",
"Calibration",
"=",
"None",
",",
"dimensional_calibrations",
":",
"typing",
".",
"List",
"[",
"Calibration",
".",
"Calibration",
"]",
"=",
"None",
",",
"metadata",
":",
"dict",
"=",
"None",
",",
"timestamp",
":",
"str",
"=",
"None",
")",
"->",
"DataAndMetadata",
".",
"DataAndMetadata",
":",
"..."
] |
Create a data_and_metadata object from data.
.. versionadded:: 1.0
.. deprecated:: 1.1
Use :py:meth:`~nion.swift.Facade.DataItem.create_data_and_metadata` instead.
Scriptable: No
|
[
"Create",
"a",
"data_and_metadata",
"object",
"from",
"data",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/typeshed/API_1_0.py#L1110-L1119
|
train
|
nion-software/nionswift
|
nion/typeshed/API_1_0.py
|
API.create_data_descriptor
|
def create_data_descriptor(self, is_sequence: bool, collection_dimension_count: int, datum_dimension_count: int) -> DataAndMetadata.DataDescriptor:
"""Create a data descriptor.
:param is_sequence: whether the descriptor describes a sequence of data.
:param collection_dimension_count: the number of collection dimensions represented by the descriptor.
:param datum_dimension_count: the number of datum dimensions represented by the descriptor.
.. versionadded:: 1.0
Scriptable: Yes
"""
...
|
python
|
def create_data_descriptor(self, is_sequence: bool, collection_dimension_count: int, datum_dimension_count: int) -> DataAndMetadata.DataDescriptor:
"""Create a data descriptor.
:param is_sequence: whether the descriptor describes a sequence of data.
:param collection_dimension_count: the number of collection dimensions represented by the descriptor.
:param datum_dimension_count: the number of datum dimensions represented by the descriptor.
.. versionadded:: 1.0
Scriptable: Yes
"""
...
|
[
"def",
"create_data_descriptor",
"(",
"self",
",",
"is_sequence",
":",
"bool",
",",
"collection_dimension_count",
":",
"int",
",",
"datum_dimension_count",
":",
"int",
")",
"->",
"DataAndMetadata",
".",
"DataDescriptor",
":",
"..."
] |
Create a data descriptor.
:param is_sequence: whether the descriptor describes a sequence of data.
:param collection_dimension_count: the number of collection dimensions represented by the descriptor.
:param datum_dimension_count: the number of datum dimensions represented by the descriptor.
.. versionadded:: 1.0
Scriptable: Yes
|
[
"Create",
"a",
"data",
"descriptor",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/typeshed/API_1_0.py#L1132-L1143
|
train
|
nion-software/nionswift
|
nion/swift/Inspector.py
|
make_calibration_row_widget
|
def make_calibration_row_widget(ui, calibration_observable, label: str=None):
"""Called when an item (calibration_observable) is inserted into the list widget. Returns a widget."""
calibration_row = ui.create_row_widget()
row_label = ui.create_label_widget(label, properties={"width": 60})
row_label.widget_id = "label"
offset_field = ui.create_line_edit_widget(properties={"width": 60})
offset_field.widget_id = "offset"
scale_field = ui.create_line_edit_widget(properties={"width": 60})
scale_field.widget_id = "scale"
units_field = ui.create_line_edit_widget(properties={"width": 60})
units_field.widget_id = "units"
float_point_4_converter = Converter.FloatToStringConverter(format="{0:.4f}")
offset_field.bind_text(Binding.PropertyBinding(calibration_observable, "offset", converter=float_point_4_converter))
scale_field.bind_text(Binding.PropertyBinding(calibration_observable, "scale", converter=float_point_4_converter))
units_field.bind_text(Binding.PropertyBinding(calibration_observable, "units"))
# notice the binding of calibration_index below.
calibration_row.add(row_label)
calibration_row.add_spacing(12)
calibration_row.add(offset_field)
calibration_row.add_spacing(12)
calibration_row.add(scale_field)
calibration_row.add_spacing(12)
calibration_row.add(units_field)
calibration_row.add_stretch()
return calibration_row
|
python
|
def make_calibration_row_widget(ui, calibration_observable, label: str=None):
"""Called when an item (calibration_observable) is inserted into the list widget. Returns a widget."""
calibration_row = ui.create_row_widget()
row_label = ui.create_label_widget(label, properties={"width": 60})
row_label.widget_id = "label"
offset_field = ui.create_line_edit_widget(properties={"width": 60})
offset_field.widget_id = "offset"
scale_field = ui.create_line_edit_widget(properties={"width": 60})
scale_field.widget_id = "scale"
units_field = ui.create_line_edit_widget(properties={"width": 60})
units_field.widget_id = "units"
float_point_4_converter = Converter.FloatToStringConverter(format="{0:.4f}")
offset_field.bind_text(Binding.PropertyBinding(calibration_observable, "offset", converter=float_point_4_converter))
scale_field.bind_text(Binding.PropertyBinding(calibration_observable, "scale", converter=float_point_4_converter))
units_field.bind_text(Binding.PropertyBinding(calibration_observable, "units"))
# notice the binding of calibration_index below.
calibration_row.add(row_label)
calibration_row.add_spacing(12)
calibration_row.add(offset_field)
calibration_row.add_spacing(12)
calibration_row.add(scale_field)
calibration_row.add_spacing(12)
calibration_row.add(units_field)
calibration_row.add_stretch()
return calibration_row
|
[
"def",
"make_calibration_row_widget",
"(",
"ui",
",",
"calibration_observable",
",",
"label",
":",
"str",
"=",
"None",
")",
":",
"calibration_row",
"=",
"ui",
".",
"create_row_widget",
"(",
")",
"row_label",
"=",
"ui",
".",
"create_label_widget",
"(",
"label",
",",
"properties",
"=",
"{",
"\"width\"",
":",
"60",
"}",
")",
"row_label",
".",
"widget_id",
"=",
"\"label\"",
"offset_field",
"=",
"ui",
".",
"create_line_edit_widget",
"(",
"properties",
"=",
"{",
"\"width\"",
":",
"60",
"}",
")",
"offset_field",
".",
"widget_id",
"=",
"\"offset\"",
"scale_field",
"=",
"ui",
".",
"create_line_edit_widget",
"(",
"properties",
"=",
"{",
"\"width\"",
":",
"60",
"}",
")",
"scale_field",
".",
"widget_id",
"=",
"\"scale\"",
"units_field",
"=",
"ui",
".",
"create_line_edit_widget",
"(",
"properties",
"=",
"{",
"\"width\"",
":",
"60",
"}",
")",
"units_field",
".",
"widget_id",
"=",
"\"units\"",
"float_point_4_converter",
"=",
"Converter",
".",
"FloatToStringConverter",
"(",
"format",
"=",
"\"{0:.4f}\"",
")",
"offset_field",
".",
"bind_text",
"(",
"Binding",
".",
"PropertyBinding",
"(",
"calibration_observable",
",",
"\"offset\"",
",",
"converter",
"=",
"float_point_4_converter",
")",
")",
"scale_field",
".",
"bind_text",
"(",
"Binding",
".",
"PropertyBinding",
"(",
"calibration_observable",
",",
"\"scale\"",
",",
"converter",
"=",
"float_point_4_converter",
")",
")",
"units_field",
".",
"bind_text",
"(",
"Binding",
".",
"PropertyBinding",
"(",
"calibration_observable",
",",
"\"units\"",
")",
")",
"# notice the binding of calibration_index below.",
"calibration_row",
".",
"add",
"(",
"row_label",
")",
"calibration_row",
".",
"add_spacing",
"(",
"12",
")",
"calibration_row",
".",
"add",
"(",
"offset_field",
")",
"calibration_row",
".",
"add_spacing",
"(",
"12",
")",
"calibration_row",
".",
"add",
"(",
"scale_field",
")",
"calibration_row",
".",
"add_spacing",
"(",
"12",
")",
"calibration_row",
".",
"add",
"(",
"units_field",
")",
"calibration_row",
".",
"add_stretch",
"(",
")",
"return",
"calibration_row"
] |
Called when an item (calibration_observable) is inserted into the list widget. Returns a widget.
|
[
"Called",
"when",
"an",
"item",
"(",
"calibration_observable",
")",
"is",
"inserted",
"into",
"the",
"list",
"widget",
".",
"Returns",
"a",
"widget",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/Inspector.py#L1067-L1091
|
train
|
nion-software/nionswift
|
nion/swift/Inspector.py
|
InspectorSection.add_widget_to_content
|
def add_widget_to_content(self, widget):
"""Subclasses should call this to add content in the section's top level column."""
self.__section_content_column.add_spacing(4)
self.__section_content_column.add(widget)
|
python
|
def add_widget_to_content(self, widget):
"""Subclasses should call this to add content in the section's top level column."""
self.__section_content_column.add_spacing(4)
self.__section_content_column.add(widget)
|
[
"def",
"add_widget_to_content",
"(",
"self",
",",
"widget",
")",
":",
"self",
".",
"__section_content_column",
".",
"add_spacing",
"(",
"4",
")",
"self",
".",
"__section_content_column",
".",
"add",
"(",
"widget",
")"
] |
Subclasses should call this to add content in the section's top level column.
|
[
"Subclasses",
"should",
"call",
"this",
"to",
"add",
"content",
"in",
"the",
"section",
"s",
"top",
"level",
"column",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/Inspector.py#L218-L221
|
train
|
nion-software/nionswift
|
nion/swift/Inspector.py
|
CalibrationsInspectorSection.__create_list_item_widget
|
def __create_list_item_widget(self, ui, calibration_observable):
"""Called when an item (calibration_observable) is inserted into the list widget. Returns a widget."""
calibration_row = make_calibration_row_widget(ui, calibration_observable)
column = ui.create_column_widget()
column.add_spacing(4)
column.add(calibration_row)
return column
|
python
|
def __create_list_item_widget(self, ui, calibration_observable):
"""Called when an item (calibration_observable) is inserted into the list widget. Returns a widget."""
calibration_row = make_calibration_row_widget(ui, calibration_observable)
column = ui.create_column_widget()
column.add_spacing(4)
column.add(calibration_row)
return column
|
[
"def",
"__create_list_item_widget",
"(",
"self",
",",
"ui",
",",
"calibration_observable",
")",
":",
"calibration_row",
"=",
"make_calibration_row_widget",
"(",
"ui",
",",
"calibration_observable",
")",
"column",
"=",
"ui",
".",
"create_column_widget",
"(",
")",
"column",
".",
"add_spacing",
"(",
"4",
")",
"column",
".",
"add",
"(",
"calibration_row",
")",
"return",
"column"
] |
Called when an item (calibration_observable) is inserted into the list widget. Returns a widget.
|
[
"Called",
"when",
"an",
"item",
"(",
"calibration_observable",
")",
"is",
"inserted",
"into",
"the",
"list",
"widget",
".",
"Returns",
"a",
"widget",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/Inspector.py#L1211-L1217
|
train
|
nion-software/nionswift
|
nion/swift/HistogramPanel.py
|
AdornmentsCanvasItem._repaint
|
def _repaint(self, drawing_context):
"""Repaint the canvas item. This will occur on a thread."""
# canvas size
canvas_width = self.canvas_size[1]
canvas_height = self.canvas_size[0]
left = self.display_limits[0]
right = self.display_limits[1]
# draw left display limit
if left > 0.0:
with drawing_context.saver():
drawing_context.begin_path()
drawing_context.move_to(left * canvas_width, 1)
drawing_context.line_to(left * canvas_width, canvas_height-1)
drawing_context.line_width = 2
drawing_context.stroke_style = "#000"
drawing_context.stroke()
# draw right display limit
if right < 1.0:
with drawing_context.saver():
drawing_context.begin_path()
drawing_context.move_to(right * canvas_width, 1)
drawing_context.line_to(right * canvas_width, canvas_height-1)
drawing_context.line_width = 2
drawing_context.stroke_style = "#FFF"
drawing_context.stroke()
# draw border
with drawing_context.saver():
drawing_context.begin_path()
drawing_context.move_to(0,canvas_height)
drawing_context.line_to(canvas_width,canvas_height)
drawing_context.line_width = 1
drawing_context.stroke_style = "#444"
drawing_context.stroke()
|
python
|
def _repaint(self, drawing_context):
"""Repaint the canvas item. This will occur on a thread."""
# canvas size
canvas_width = self.canvas_size[1]
canvas_height = self.canvas_size[0]
left = self.display_limits[0]
right = self.display_limits[1]
# draw left display limit
if left > 0.0:
with drawing_context.saver():
drawing_context.begin_path()
drawing_context.move_to(left * canvas_width, 1)
drawing_context.line_to(left * canvas_width, canvas_height-1)
drawing_context.line_width = 2
drawing_context.stroke_style = "#000"
drawing_context.stroke()
# draw right display limit
if right < 1.0:
with drawing_context.saver():
drawing_context.begin_path()
drawing_context.move_to(right * canvas_width, 1)
drawing_context.line_to(right * canvas_width, canvas_height-1)
drawing_context.line_width = 2
drawing_context.stroke_style = "#FFF"
drawing_context.stroke()
# draw border
with drawing_context.saver():
drawing_context.begin_path()
drawing_context.move_to(0,canvas_height)
drawing_context.line_to(canvas_width,canvas_height)
drawing_context.line_width = 1
drawing_context.stroke_style = "#444"
drawing_context.stroke()
|
[
"def",
"_repaint",
"(",
"self",
",",
"drawing_context",
")",
":",
"# canvas size",
"canvas_width",
"=",
"self",
".",
"canvas_size",
"[",
"1",
"]",
"canvas_height",
"=",
"self",
".",
"canvas_size",
"[",
"0",
"]",
"left",
"=",
"self",
".",
"display_limits",
"[",
"0",
"]",
"right",
"=",
"self",
".",
"display_limits",
"[",
"1",
"]",
"# draw left display limit",
"if",
"left",
">",
"0.0",
":",
"with",
"drawing_context",
".",
"saver",
"(",
")",
":",
"drawing_context",
".",
"begin_path",
"(",
")",
"drawing_context",
".",
"move_to",
"(",
"left",
"*",
"canvas_width",
",",
"1",
")",
"drawing_context",
".",
"line_to",
"(",
"left",
"*",
"canvas_width",
",",
"canvas_height",
"-",
"1",
")",
"drawing_context",
".",
"line_width",
"=",
"2",
"drawing_context",
".",
"stroke_style",
"=",
"\"#000\"",
"drawing_context",
".",
"stroke",
"(",
")",
"# draw right display limit",
"if",
"right",
"<",
"1.0",
":",
"with",
"drawing_context",
".",
"saver",
"(",
")",
":",
"drawing_context",
".",
"begin_path",
"(",
")",
"drawing_context",
".",
"move_to",
"(",
"right",
"*",
"canvas_width",
",",
"1",
")",
"drawing_context",
".",
"line_to",
"(",
"right",
"*",
"canvas_width",
",",
"canvas_height",
"-",
"1",
")",
"drawing_context",
".",
"line_width",
"=",
"2",
"drawing_context",
".",
"stroke_style",
"=",
"\"#FFF\"",
"drawing_context",
".",
"stroke",
"(",
")",
"# draw border",
"with",
"drawing_context",
".",
"saver",
"(",
")",
":",
"drawing_context",
".",
"begin_path",
"(",
")",
"drawing_context",
".",
"move_to",
"(",
"0",
",",
"canvas_height",
")",
"drawing_context",
".",
"line_to",
"(",
"canvas_width",
",",
"canvas_height",
")",
"drawing_context",
".",
"line_width",
"=",
"1",
"drawing_context",
".",
"stroke_style",
"=",
"\"#444\"",
"drawing_context",
".",
"stroke",
"(",
")"
] |
Repaint the canvas item. This will occur on a thread.
|
[
"Repaint",
"the",
"canvas",
"item",
".",
"This",
"will",
"occur",
"on",
"a",
"thread",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/HistogramPanel.py#L42-L79
|
train
|
nion-software/nionswift
|
nion/swift/HistogramPanel.py
|
SimpleLineGraphCanvasItem._repaint
|
def _repaint(self, drawing_context):
"""Repaint the canvas item. This will occur on a thread."""
# canvas size
canvas_width = self.canvas_size[1]
canvas_height = self.canvas_size[0]
# draw background
if self.background_color:
with drawing_context.saver():
drawing_context.begin_path()
drawing_context.move_to(0,0)
drawing_context.line_to(canvas_width,0)
drawing_context.line_to(canvas_width,canvas_height)
drawing_context.line_to(0,canvas_height)
drawing_context.close_path()
drawing_context.fill_style = self.background_color
drawing_context.fill()
# draw the data, if any
if (self.data is not None and len(self.data) > 0):
# draw the histogram itself
with drawing_context.saver():
drawing_context.begin_path()
binned_data = Image.rebin_1d(self.data, int(canvas_width), self.__retained_rebin_1d) if int(canvas_width) != self.data.shape[0] else self.data
for i in range(canvas_width):
drawing_context.move_to(i, canvas_height)
drawing_context.line_to(i, canvas_height * (1 - binned_data[i]))
drawing_context.line_width = 1
drawing_context.stroke_style = "#444"
drawing_context.stroke()
|
python
|
def _repaint(self, drawing_context):
"""Repaint the canvas item. This will occur on a thread."""
# canvas size
canvas_width = self.canvas_size[1]
canvas_height = self.canvas_size[0]
# draw background
if self.background_color:
with drawing_context.saver():
drawing_context.begin_path()
drawing_context.move_to(0,0)
drawing_context.line_to(canvas_width,0)
drawing_context.line_to(canvas_width,canvas_height)
drawing_context.line_to(0,canvas_height)
drawing_context.close_path()
drawing_context.fill_style = self.background_color
drawing_context.fill()
# draw the data, if any
if (self.data is not None and len(self.data) > 0):
# draw the histogram itself
with drawing_context.saver():
drawing_context.begin_path()
binned_data = Image.rebin_1d(self.data, int(canvas_width), self.__retained_rebin_1d) if int(canvas_width) != self.data.shape[0] else self.data
for i in range(canvas_width):
drawing_context.move_to(i, canvas_height)
drawing_context.line_to(i, canvas_height * (1 - binned_data[i]))
drawing_context.line_width = 1
drawing_context.stroke_style = "#444"
drawing_context.stroke()
|
[
"def",
"_repaint",
"(",
"self",
",",
"drawing_context",
")",
":",
"# canvas size",
"canvas_width",
"=",
"self",
".",
"canvas_size",
"[",
"1",
"]",
"canvas_height",
"=",
"self",
".",
"canvas_size",
"[",
"0",
"]",
"# draw background",
"if",
"self",
".",
"background_color",
":",
"with",
"drawing_context",
".",
"saver",
"(",
")",
":",
"drawing_context",
".",
"begin_path",
"(",
")",
"drawing_context",
".",
"move_to",
"(",
"0",
",",
"0",
")",
"drawing_context",
".",
"line_to",
"(",
"canvas_width",
",",
"0",
")",
"drawing_context",
".",
"line_to",
"(",
"canvas_width",
",",
"canvas_height",
")",
"drawing_context",
".",
"line_to",
"(",
"0",
",",
"canvas_height",
")",
"drawing_context",
".",
"close_path",
"(",
")",
"drawing_context",
".",
"fill_style",
"=",
"self",
".",
"background_color",
"drawing_context",
".",
"fill",
"(",
")",
"# draw the data, if any",
"if",
"(",
"self",
".",
"data",
"is",
"not",
"None",
"and",
"len",
"(",
"self",
".",
"data",
")",
">",
"0",
")",
":",
"# draw the histogram itself",
"with",
"drawing_context",
".",
"saver",
"(",
")",
":",
"drawing_context",
".",
"begin_path",
"(",
")",
"binned_data",
"=",
"Image",
".",
"rebin_1d",
"(",
"self",
".",
"data",
",",
"int",
"(",
"canvas_width",
")",
",",
"self",
".",
"__retained_rebin_1d",
")",
"if",
"int",
"(",
"canvas_width",
")",
"!=",
"self",
".",
"data",
".",
"shape",
"[",
"0",
"]",
"else",
"self",
".",
"data",
"for",
"i",
"in",
"range",
"(",
"canvas_width",
")",
":",
"drawing_context",
".",
"move_to",
"(",
"i",
",",
"canvas_height",
")",
"drawing_context",
".",
"line_to",
"(",
"i",
",",
"canvas_height",
"*",
"(",
"1",
"-",
"binned_data",
"[",
"i",
"]",
")",
")",
"drawing_context",
".",
"line_width",
"=",
"1",
"drawing_context",
".",
"stroke_style",
"=",
"\"#444\"",
"drawing_context",
".",
"stroke",
"(",
")"
] |
Repaint the canvas item. This will occur on a thread.
|
[
"Repaint",
"the",
"canvas",
"item",
".",
"This",
"will",
"occur",
"on",
"a",
"thread",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/HistogramPanel.py#L124-L155
|
train
|
nion-software/nionswift
|
nion/swift/HistogramPanel.py
|
ColorMapCanvasItem.color_map_data
|
def color_map_data(self, data: numpy.ndarray) -> None:
"""Set the data and mark the canvas item for updating.
Data should be an ndarray of shape (256, 3) with type uint8
"""
self.__color_map_data = data
self.update()
|
python
|
def color_map_data(self, data: numpy.ndarray) -> None:
"""Set the data and mark the canvas item for updating.
Data should be an ndarray of shape (256, 3) with type uint8
"""
self.__color_map_data = data
self.update()
|
[
"def",
"color_map_data",
"(",
"self",
",",
"data",
":",
"numpy",
".",
"ndarray",
")",
"->",
"None",
":",
"self",
".",
"__color_map_data",
"=",
"data",
"self",
".",
"update",
"(",
")"
] |
Set the data and mark the canvas item for updating.
Data should be an ndarray of shape (256, 3) with type uint8
|
[
"Set",
"the",
"data",
"and",
"mark",
"the",
"canvas",
"item",
"for",
"updating",
"."
] |
d43693eaf057b8683b9638e575000f055fede452
|
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/HistogramPanel.py#L171-L177
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.