repository_name stringclasses 316 values | func_path_in_repository stringlengths 6 223 | func_name stringlengths 1 134 | language stringclasses 1 value | func_code_string stringlengths 57 65.5k | func_documentation_string stringlengths 1 46.3k | split_name stringclasses 1 value | func_code_url stringlengths 91 315 | called_functions listlengths 1 156 ⌀ | enclosing_scope stringlengths 2 1.48M |
|---|---|---|---|---|---|---|---|---|---|
brandonxiang/geojson-python-utils | geojson_utils/geojson_utils.py | destination_point | python | def destination_point(point, brng, dist):
dist = float(dist) / 6371 # convert dist to angular distance in radians
brng = number2radius(brng)
lon1 = number2radius(point['coordinates'][0])
lat1 = number2radius(point['coordinates'][1])
lat2 = math.asin(math.sin(lat1) * math.cos(dist) +
math.cos(lat1) * math.sin(dist) * math.cos(brng))
lon2 = lon1 + math.atan2(math.sin(brng) * math.sin(dist) *
math.cos(lat1), math.cos(dist) - math.sin(lat1) * math.sin(lat2))
lon2 = (lon2 + 3 * math.pi) % (2 * math.pi) - math.pi # normalise to -180 degree +180 degree
return {'type': 'Point', 'coordinates': [number2degree(lon2), number2degree(lat2)]} | Calculate a destination Point base on a base point and a distance
Keyword arguments:
pt -- polygon geojson object
brng -- an angle in degrees
dist -- distance in Kilometer between destination and base point
return destination point object | train | https://github.com/brandonxiang/geojson-python-utils/blob/33d0dcd5f16e0567b48c0d49fd292a4f1db16b41/geojson_utils/geojson_utils.py#L351-L375 | [
"def number2radius(number):\n \"\"\"\n convert degree into radius\n\n Keyword arguments:\n number -- degree\n\n return radius\n \"\"\"\n return number * math.pi / 180\n",
"def number2degree(number):\n \"\"\"\n convert radius into degree\n\n Keyword arguments:\n number -- radius\n\... | import math
def linestrings_intersect(line1, line2):
"""
To valid whether linestrings from geojson are intersected with each other.
reference: http://www.kevlindev.com/gui/math/intersection/Intersection.js
Keyword arguments:
line1 -- first line geojson object
line2 -- second line geojson object
if(line1 intersects with other) return intersect point array else empty array
"""
intersects = []
for i in range(0, len(line1['coordinates']) - 1):
for j in range(0, len(line2['coordinates']) - 1):
a1_x = line1['coordinates'][i][1]
a1_y = line1['coordinates'][i][0]
a2_x = line1['coordinates'][i + 1][1]
a2_y = line1['coordinates'][i + 1][0]
b1_x = line2['coordinates'][j][1]
b1_y = line2['coordinates'][j][0]
b2_x = line2['coordinates'][j + 1][1]
b2_y = line2['coordinates'][j + 1][0]
ua_t = (b2_x - b1_x) * (a1_y - b1_y) - \
(b2_y - b1_y) * (a1_x - b1_x)
ub_t = (a2_x - a1_x) * (a1_y - b1_y) - \
(a2_y - a1_y) * (a1_x - b1_x)
u_b = (b2_y - b1_y) * (a2_x - a1_x) - (b2_x - b1_x) * (a2_y - a1_y)
if not u_b == 0:
u_a = ua_t / u_b
u_b = ub_t / u_b
if 0 <= u_a and u_a <= 1 and 0 <= u_b and u_b <= 1:
intersects.append({'type': 'Point', 'coordinates': [
a1_x + u_a * (a2_x - a1_x), a1_y + u_a * (a2_y - a1_y)]})
# if len(intersects) == 0:
# intersects = False
return intersects
def _bbox_around_polycoords(coords):
"""
bounding box
"""
x_all = []
y_all = []
for first in coords[0]:
x_all.append(first[1])
y_all.append(first[0])
return [min(x_all), min(y_all), max(x_all), max(y_all)]
def _point_in_bbox(point, bounds):
"""
valid whether the point is inside the bounding box
"""
return not(point['coordinates'][1] < bounds[0] or point['coordinates'][1] > bounds[2]
or point['coordinates'][0] < bounds[1] or point['coordinates'][0] > bounds[3])
def _pnpoly(x, y, coords):
"""
the algorithm to judge whether the point is located in polygon
reference: https://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html#Explanation
"""
vert = [[0, 0]]
for coord in coords:
for node in coord:
vert.append(node)
vert.append(coord[0])
vert.append([0, 0])
inside = False
i = 0
j = len(vert) - 1
while i < len(vert):
if ((vert[i][0] > y) != (vert[j][0] > y)) and (x < (vert[j][1] - vert[i][1])
* (y - vert[i][0]) / (vert[j][0] - vert[i][0]) + vert[i][1]):
inside = not inside
j = i
i += 1
return inside
def _point_in_polygon(point, coords):
inside_box = False
for coord in coords:
if inside_box:
break
if _point_in_bbox(point, _bbox_around_polycoords(coord)):
inside_box = True
if not inside_box:
return False
inside_poly = False
for coord in coords:
if inside_poly:
break
if _pnpoly(point['coordinates'][1], point['coordinates'][0], coord):
inside_poly = True
return inside_poly
def point_in_polygon(point, poly):
"""
valid whether the point is located in a polygon
Keyword arguments:
point -- point geojson object
poly -- polygon geojson object
if(point inside poly) return true else false
"""
coords = [poly['coordinates']] if poly[
'type'] == 'Polygon' else poly['coordinates']
return _point_in_polygon(point, coords)
def point_in_multipolygon(point, multipoly):
"""
valid whether the point is located in a mulitpolygon (donut polygon is not supported)
Keyword arguments:
point -- point geojson object
multipoly -- multipolygon geojson object
if(point inside multipoly) return true else false
"""
coords_array = [multipoly['coordinates']] if multipoly[
'type'] == "MultiPolygon" else multipoly['coordinates']
for coords in coords_array:
if _point_in_polygon(point, coords):
return True
return False
def number2radius(number):
"""
convert degree into radius
Keyword arguments:
number -- degree
return radius
"""
return number * math.pi / 180
def number2degree(number):
"""
convert radius into degree
Keyword arguments:
number -- radius
return degree
"""
return number * 180 / math.pi
def draw_circle(radius_in_meters, center_point, steps=15):
"""
get a circle shape polygon based on centerPoint and radius
Keyword arguments:
point1 -- point one geojson object
point2 -- point two geojson object
if(point inside multipoly) return true else false
"""
steps = steps if steps > 15 else 15
center = [center_point['coordinates'][1], center_point['coordinates'][0]]
dist = (radius_in_meters / 1000) / 6371
# convert meters to radiant
rad_center = [number2radius(center[0]), number2radius(center[1])]
# 15 sided circle
poly = []
for step in range(0, steps):
brng = 2 * math.pi * step / steps
lat = math.asin(math.sin(rad_center[0]) * math.cos(dist) +
math.cos(rad_center[0]) * math.sin(dist) * math.cos(brng))
lng = rad_center[1] + math.atan2(math.sin(brng) * math.sin(dist)
* math.cos(rad_center[0]), math.cos(dist) - math.sin(rad_center[0]) * math.sin(lat))
poly.append([number2degree(lng), number2degree(lat)])
return {"type": "Polygon", "coordinates": [poly]}
def rectangle_centroid(rectangle):
"""
get the centroid of the rectangle
Keyword arguments:
rectangle -- polygon geojson object
return centroid
"""
bbox = rectangle['coordinates'][0]
xmin = bbox[0][0]
ymin = bbox[0][1]
xmax = bbox[2][0]
ymax = bbox[2][1]
xwidth = xmax - xmin
ywidth = ymax - ymin
return {'type': 'Point', 'coordinates': [xmin + xwidth / 2, ymin + ywidth / 2]}
def point_distance(point1, point2):
"""
calculate the distance between two points on the sphere like google map
reference http://www.movable-type.co.uk/scripts/latlong.html
Keyword arguments:
point1 -- point one geojson object
point2 -- point two geojson object
return distance
"""
lon1 = point1['coordinates'][0]
lat1 = point1['coordinates'][1]
lon2 = point2['coordinates'][0]
lat2 = point2['coordinates'][1]
deg_lat = number2radius(lat2 - lat1)
deg_lon = number2radius(lon2 - lon1)
a = math.pow(math.sin(deg_lat / 2), 2) + math.cos(number2radius(lat1)) * \
math.cos(number2radius(lat2)) * math.pow(math.sin(deg_lon / 2), 2)
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
return (6371 * c) * 1000
def point_distance_ellipsode(point1,point2):
"""
calculate the distance between two points on the ellipsode based on point1
Keyword arguments:
point1 -- point one geojson object
point2 -- point two geojson object
return distance
"""
a = 6378137
f = 1/298.25722
b = a - a*f
e = math.sqrt((a*a-b*b)/(a*a))
lon1 = point1['coordinates'][0]
lat1 = point1['coordinates'][1]
lon2 = point1['coordinates'][0]
lat2 = point2['coordinates'][1]
M = a*(1-e*e)*math.pow(1-math.pow(e*math.sin(number2radius(lat1)),2),-1.5)
N = a/(math.pow(1-math.pow(e*math.sin(number2radius(lat1)),2),0.5))
distance_lat = M*number2radius(lat2-lat1)
distance_lon = N*math.cos(number2radius(lat1))*(lon2-lon1)*3600*math.sin(1/3600*math.pi/180)
return math.sqrt(distance_lat*distance_lat+distance_lon*distance_lon)
def geometry_within_radius(geometry, center, radius):
"""
To valid whether point or linestring or polygon is inside a radius around a center
Keyword arguments:
geometry -- point/linstring/polygon geojson object
center -- point geojson object
radius -- radius
if(geometry inside radius) return true else false
"""
if geometry['type'] == 'Point':
return point_distance(geometry, center) <= radius
elif geometry['type'] == 'LineString' or geometry['type'] == 'Polygon':
point = {}
# it's enough to check the exterior ring of the Polygon
coordinates = geometry['coordinates'][0] if geometry['type'] == 'Polygon' else geometry['coordinates']
for coordinate in coordinates:
point['coordinates'] = coordinate
if point_distance(point, center) > radius:
return False
return True
def area(poly):
"""
calculate the area of polygon
Keyword arguments:
poly -- polygon geojson object
return polygon area
"""
poly_area = 0
# TODO: polygon holes at coordinates[1]
points = poly['coordinates'][0]
j = len(points) - 1
count = len(points)
for i in range(0, count):
p1_x = points[i][1]
p1_y = points[i][0]
p2_x = points[j][1]
p2_y = points[j][0]
poly_area += p1_x * p2_y
poly_area -= p1_y * p2_x
j = i
poly_area /= 2
return poly_area
def centroid(poly):
"""
get the centroid of polygon
adapted from http://paulbourke.net/geometry/polyarea/javascript.txt
Keyword arguments:
poly -- polygon geojson object
return polygon centroid
"""
f_total = 0
x_total = 0
y_total = 0
# TODO: polygon holes at coordinates[1]
points = poly['coordinates'][0]
j = len(points) - 1
count = len(points)
for i in range(0, count):
p1_x = points[i][1]
p1_y = points[i][0]
p2_x = points[j][1]
p2_y = points[j][0]
f_total = p1_x * p2_y - p2_x * p1_y
x_total += (p1_x + p2_x) * f_total
y_total += (p1_y + p2_y) * f_total
j = i
six_area = area(poly) * 6
return {'type': 'Point', 'coordinates': [y_total / six_area, x_total / six_area]}
def simplify(source, kink=20):
"""
source[] array of geojson points
kink in metres, kinks above this depth kept
kink depth is the height of the triangle abc where a-b and b-c are two consecutive line segments
"""
source_coord = map(lambda o: {"lng": o.coordinates[0], "lat": o.coordinates[1]}, source)
# count, n_stack, n_dest, start, end, i, sig;
# dev_sqr, max_dev_sqr, band_sqr;
# x12, y12, d12, x13, y13, d13, x23, y23, d23;
F = (math.pi / 180.0) * 0.5
index = [] # aray of indexes of source points to include in the reduced line
sig_start = [] # indices of start & end of working section
sig_end = []
# check for simple cases
count = len(source_coord)
if count < 3:
return source_coord # one or two points
# more complex case. initialize stack
band_sqr = kink * 360.0 / (2.0 * math.pi * 6378137.0) # Now in degrees
band_sqr *= band_sqr
n_dest = 0
sig_start[0] = 0
sig_end[0] = count - 1
n_stack = 1
# while the stack is not empty
while n_stack > 0:
# ... pop the top-most entries off the stacks
start = sig_start[n_stack - 1]
end = sig_end[n_stack - 1]
n_stack -= 1
if (end - start) > 1: #any intermediate points ?
# ... yes, so find most deviant intermediate point to either side of line joining start & end points
x12 = source[end]["lng"] - source[start]["lng"]
y12 = source[end]["lat"] - source[start]["lat"]
if math.fabs(x12) > 180.0:
x12 = 360.0 - math.fabs(x12)
x12 *= math.cos(F * (source[end]["lat"] + source[start]["lat"])) # use avg lat to reduce lng
d12 = (x12 * x12) + (y12 * y12)
i = start + 1
sig = start
max_dev_sqr = -1.0
while i < end:
x13 = source[i]["lng"] - source[start]["lng"]
y13 = source[i]["lat"] - source[start]["lat"]
if math.fabs(x13) > 180.0:
x13 = 360.0 - math.fabs(x13)
x13 *= math.cos(F * (source[i]["lat"] + source[start]["lat"]))
d13 = (x13 * x13) + (y13 * y13)
x23 = source[i]["lng"] - source[end]["lng"]
y23 = source[i]["lat"] - source[end]["lat"]
if math.fabs(x23) > 180.0:
x23 = 360.0 - math.fabs(x23)
x23 *= math.cos(F * (source[i]["lat"] + source[end]["lat"]))
d23 = (x23 * x23) + (y23 * y23)
if d13 >= (d12 + d23):
dev_sqr = d23
elif d23 >= (d12 + d13):
dev_sqr = d13
else:
dev_sqr = (x13 * y12 - y13 * x12) * (x13 * y12 - y13 * x12) / d12 # solve triangle
if dev_sqr > max_dev_sqr:
sig = i
max_dev_sqr = dev_sqr
i += 1
if max_dev_sqr < band_sqr: # is there a sig. intermediate point ?
#... no, so transfer current start point
index[n_dest] = start
n_dest += 1
else: # ... yes, so push two sub-sections on stack for further processing
n_stack += 1
sig_start[n_stack - 1] = sig
sig_end[n_stack - 1] = end
n_stack += 1
sig_start[n_stack - 1] = start
sig_end[n_stack - 1] = sig
else: # ... no intermediate points, so transfer current start point
index[n_dest] = start
n_dest += 1
# transfer last point
index[n_dest] = count - 1
n_dest += 1
# make return array
r = []
for i in range(0, n_dest):
r.append(source_coord[index[i]])
return map(lambda o: {"type": "Point","coordinates": [o.lng, o.lat]}, r)
|
brandonxiang/geojson-python-utils | geojson_utils/geojson_utils.py | simplify | python | def simplify(source, kink=20):
source_coord = map(lambda o: {"lng": o.coordinates[0], "lat": o.coordinates[1]}, source)
# count, n_stack, n_dest, start, end, i, sig;
# dev_sqr, max_dev_sqr, band_sqr;
# x12, y12, d12, x13, y13, d13, x23, y23, d23;
F = (math.pi / 180.0) * 0.5
index = [] # aray of indexes of source points to include in the reduced line
sig_start = [] # indices of start & end of working section
sig_end = []
# check for simple cases
count = len(source_coord)
if count < 3:
return source_coord # one or two points
# more complex case. initialize stack
band_sqr = kink * 360.0 / (2.0 * math.pi * 6378137.0) # Now in degrees
band_sqr *= band_sqr
n_dest = 0
sig_start[0] = 0
sig_end[0] = count - 1
n_stack = 1
# while the stack is not empty
while n_stack > 0:
# ... pop the top-most entries off the stacks
start = sig_start[n_stack - 1]
end = sig_end[n_stack - 1]
n_stack -= 1
if (end - start) > 1: #any intermediate points ?
# ... yes, so find most deviant intermediate point to either side of line joining start & end points
x12 = source[end]["lng"] - source[start]["lng"]
y12 = source[end]["lat"] - source[start]["lat"]
if math.fabs(x12) > 180.0:
x12 = 360.0 - math.fabs(x12)
x12 *= math.cos(F * (source[end]["lat"] + source[start]["lat"])) # use avg lat to reduce lng
d12 = (x12 * x12) + (y12 * y12)
i = start + 1
sig = start
max_dev_sqr = -1.0
while i < end:
x13 = source[i]["lng"] - source[start]["lng"]
y13 = source[i]["lat"] - source[start]["lat"]
if math.fabs(x13) > 180.0:
x13 = 360.0 - math.fabs(x13)
x13 *= math.cos(F * (source[i]["lat"] + source[start]["lat"]))
d13 = (x13 * x13) + (y13 * y13)
x23 = source[i]["lng"] - source[end]["lng"]
y23 = source[i]["lat"] - source[end]["lat"]
if math.fabs(x23) > 180.0:
x23 = 360.0 - math.fabs(x23)
x23 *= math.cos(F * (source[i]["lat"] + source[end]["lat"]))
d23 = (x23 * x23) + (y23 * y23)
if d13 >= (d12 + d23):
dev_sqr = d23
elif d23 >= (d12 + d13):
dev_sqr = d13
else:
dev_sqr = (x13 * y12 - y13 * x12) * (x13 * y12 - y13 * x12) / d12 # solve triangle
if dev_sqr > max_dev_sqr:
sig = i
max_dev_sqr = dev_sqr
i += 1
if max_dev_sqr < band_sqr: # is there a sig. intermediate point ?
#... no, so transfer current start point
index[n_dest] = start
n_dest += 1
else: # ... yes, so push two sub-sections on stack for further processing
n_stack += 1
sig_start[n_stack - 1] = sig
sig_end[n_stack - 1] = end
n_stack += 1
sig_start[n_stack - 1] = start
sig_end[n_stack - 1] = sig
else: # ... no intermediate points, so transfer current start point
index[n_dest] = start
n_dest += 1
# transfer last point
index[n_dest] = count - 1
n_dest += 1
# make return array
r = []
for i in range(0, n_dest):
r.append(source_coord[index[i]])
return map(lambda o: {"type": "Point","coordinates": [o.lng, o.lat]}, r) | source[] array of geojson points
kink in metres, kinks above this depth kept
kink depth is the height of the triangle abc where a-b and b-c are two consecutive line segments | train | https://github.com/brandonxiang/geojson-python-utils/blob/33d0dcd5f16e0567b48c0d49fd292a4f1db16b41/geojson_utils/geojson_utils.py#L378-L478 | null | import math
def linestrings_intersect(line1, line2):
"""
To valid whether linestrings from geojson are intersected with each other.
reference: http://www.kevlindev.com/gui/math/intersection/Intersection.js
Keyword arguments:
line1 -- first line geojson object
line2 -- second line geojson object
if(line1 intersects with other) return intersect point array else empty array
"""
intersects = []
for i in range(0, len(line1['coordinates']) - 1):
for j in range(0, len(line2['coordinates']) - 1):
a1_x = line1['coordinates'][i][1]
a1_y = line1['coordinates'][i][0]
a2_x = line1['coordinates'][i + 1][1]
a2_y = line1['coordinates'][i + 1][0]
b1_x = line2['coordinates'][j][1]
b1_y = line2['coordinates'][j][0]
b2_x = line2['coordinates'][j + 1][1]
b2_y = line2['coordinates'][j + 1][0]
ua_t = (b2_x - b1_x) * (a1_y - b1_y) - \
(b2_y - b1_y) * (a1_x - b1_x)
ub_t = (a2_x - a1_x) * (a1_y - b1_y) - \
(a2_y - a1_y) * (a1_x - b1_x)
u_b = (b2_y - b1_y) * (a2_x - a1_x) - (b2_x - b1_x) * (a2_y - a1_y)
if not u_b == 0:
u_a = ua_t / u_b
u_b = ub_t / u_b
if 0 <= u_a and u_a <= 1 and 0 <= u_b and u_b <= 1:
intersects.append({'type': 'Point', 'coordinates': [
a1_x + u_a * (a2_x - a1_x), a1_y + u_a * (a2_y - a1_y)]})
# if len(intersects) == 0:
# intersects = False
return intersects
def _bbox_around_polycoords(coords):
"""
bounding box
"""
x_all = []
y_all = []
for first in coords[0]:
x_all.append(first[1])
y_all.append(first[0])
return [min(x_all), min(y_all), max(x_all), max(y_all)]
def _point_in_bbox(point, bounds):
"""
valid whether the point is inside the bounding box
"""
return not(point['coordinates'][1] < bounds[0] or point['coordinates'][1] > bounds[2]
or point['coordinates'][0] < bounds[1] or point['coordinates'][0] > bounds[3])
def _pnpoly(x, y, coords):
"""
the algorithm to judge whether the point is located in polygon
reference: https://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html#Explanation
"""
vert = [[0, 0]]
for coord in coords:
for node in coord:
vert.append(node)
vert.append(coord[0])
vert.append([0, 0])
inside = False
i = 0
j = len(vert) - 1
while i < len(vert):
if ((vert[i][0] > y) != (vert[j][0] > y)) and (x < (vert[j][1] - vert[i][1])
* (y - vert[i][0]) / (vert[j][0] - vert[i][0]) + vert[i][1]):
inside = not inside
j = i
i += 1
return inside
def _point_in_polygon(point, coords):
inside_box = False
for coord in coords:
if inside_box:
break
if _point_in_bbox(point, _bbox_around_polycoords(coord)):
inside_box = True
if not inside_box:
return False
inside_poly = False
for coord in coords:
if inside_poly:
break
if _pnpoly(point['coordinates'][1], point['coordinates'][0], coord):
inside_poly = True
return inside_poly
def point_in_polygon(point, poly):
"""
valid whether the point is located in a polygon
Keyword arguments:
point -- point geojson object
poly -- polygon geojson object
if(point inside poly) return true else false
"""
coords = [poly['coordinates']] if poly[
'type'] == 'Polygon' else poly['coordinates']
return _point_in_polygon(point, coords)
def point_in_multipolygon(point, multipoly):
"""
valid whether the point is located in a mulitpolygon (donut polygon is not supported)
Keyword arguments:
point -- point geojson object
multipoly -- multipolygon geojson object
if(point inside multipoly) return true else false
"""
coords_array = [multipoly['coordinates']] if multipoly[
'type'] == "MultiPolygon" else multipoly['coordinates']
for coords in coords_array:
if _point_in_polygon(point, coords):
return True
return False
def number2radius(number):
"""
convert degree into radius
Keyword arguments:
number -- degree
return radius
"""
return number * math.pi / 180
def number2degree(number):
"""
convert radius into degree
Keyword arguments:
number -- radius
return degree
"""
return number * 180 / math.pi
def draw_circle(radius_in_meters, center_point, steps=15):
"""
get a circle shape polygon based on centerPoint and radius
Keyword arguments:
point1 -- point one geojson object
point2 -- point two geojson object
if(point inside multipoly) return true else false
"""
steps = steps if steps > 15 else 15
center = [center_point['coordinates'][1], center_point['coordinates'][0]]
dist = (radius_in_meters / 1000) / 6371
# convert meters to radiant
rad_center = [number2radius(center[0]), number2radius(center[1])]
# 15 sided circle
poly = []
for step in range(0, steps):
brng = 2 * math.pi * step / steps
lat = math.asin(math.sin(rad_center[0]) * math.cos(dist) +
math.cos(rad_center[0]) * math.sin(dist) * math.cos(brng))
lng = rad_center[1] + math.atan2(math.sin(brng) * math.sin(dist)
* math.cos(rad_center[0]), math.cos(dist) - math.sin(rad_center[0]) * math.sin(lat))
poly.append([number2degree(lng), number2degree(lat)])
return {"type": "Polygon", "coordinates": [poly]}
def rectangle_centroid(rectangle):
"""
get the centroid of the rectangle
Keyword arguments:
rectangle -- polygon geojson object
return centroid
"""
bbox = rectangle['coordinates'][0]
xmin = bbox[0][0]
ymin = bbox[0][1]
xmax = bbox[2][0]
ymax = bbox[2][1]
xwidth = xmax - xmin
ywidth = ymax - ymin
return {'type': 'Point', 'coordinates': [xmin + xwidth / 2, ymin + ywidth / 2]}
def point_distance(point1, point2):
"""
calculate the distance between two points on the sphere like google map
reference http://www.movable-type.co.uk/scripts/latlong.html
Keyword arguments:
point1 -- point one geojson object
point2 -- point two geojson object
return distance
"""
lon1 = point1['coordinates'][0]
lat1 = point1['coordinates'][1]
lon2 = point2['coordinates'][0]
lat2 = point2['coordinates'][1]
deg_lat = number2radius(lat2 - lat1)
deg_lon = number2radius(lon2 - lon1)
a = math.pow(math.sin(deg_lat / 2), 2) + math.cos(number2radius(lat1)) * \
math.cos(number2radius(lat2)) * math.pow(math.sin(deg_lon / 2), 2)
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
return (6371 * c) * 1000
def point_distance_ellipsode(point1,point2):
"""
calculate the distance between two points on the ellipsode based on point1
Keyword arguments:
point1 -- point one geojson object
point2 -- point two geojson object
return distance
"""
a = 6378137
f = 1/298.25722
b = a - a*f
e = math.sqrt((a*a-b*b)/(a*a))
lon1 = point1['coordinates'][0]
lat1 = point1['coordinates'][1]
lon2 = point1['coordinates'][0]
lat2 = point2['coordinates'][1]
M = a*(1-e*e)*math.pow(1-math.pow(e*math.sin(number2radius(lat1)),2),-1.5)
N = a/(math.pow(1-math.pow(e*math.sin(number2radius(lat1)),2),0.5))
distance_lat = M*number2radius(lat2-lat1)
distance_lon = N*math.cos(number2radius(lat1))*(lon2-lon1)*3600*math.sin(1/3600*math.pi/180)
return math.sqrt(distance_lat*distance_lat+distance_lon*distance_lon)
def geometry_within_radius(geometry, center, radius):
"""
To valid whether point or linestring or polygon is inside a radius around a center
Keyword arguments:
geometry -- point/linstring/polygon geojson object
center -- point geojson object
radius -- radius
if(geometry inside radius) return true else false
"""
if geometry['type'] == 'Point':
return point_distance(geometry, center) <= radius
elif geometry['type'] == 'LineString' or geometry['type'] == 'Polygon':
point = {}
# it's enough to check the exterior ring of the Polygon
coordinates = geometry['coordinates'][0] if geometry['type'] == 'Polygon' else geometry['coordinates']
for coordinate in coordinates:
point['coordinates'] = coordinate
if point_distance(point, center) > radius:
return False
return True
def area(poly):
"""
calculate the area of polygon
Keyword arguments:
poly -- polygon geojson object
return polygon area
"""
poly_area = 0
# TODO: polygon holes at coordinates[1]
points = poly['coordinates'][0]
j = len(points) - 1
count = len(points)
for i in range(0, count):
p1_x = points[i][1]
p1_y = points[i][0]
p2_x = points[j][1]
p2_y = points[j][0]
poly_area += p1_x * p2_y
poly_area -= p1_y * p2_x
j = i
poly_area /= 2
return poly_area
def centroid(poly):
"""
get the centroid of polygon
adapted from http://paulbourke.net/geometry/polyarea/javascript.txt
Keyword arguments:
poly -- polygon geojson object
return polygon centroid
"""
f_total = 0
x_total = 0
y_total = 0
# TODO: polygon holes at coordinates[1]
points = poly['coordinates'][0]
j = len(points) - 1
count = len(points)
for i in range(0, count):
p1_x = points[i][1]
p1_y = points[i][0]
p2_x = points[j][1]
p2_y = points[j][0]
f_total = p1_x * p2_y - p2_x * p1_y
x_total += (p1_x + p2_x) * f_total
y_total += (p1_y + p2_y) * f_total
j = i
six_area = area(poly) * 6
return {'type': 'Point', 'coordinates': [y_total / six_area, x_total / six_area]}
def destination_point(point, brng, dist):
"""
Calculate a destination Point base on a base point and a distance
Keyword arguments:
pt -- polygon geojson object
brng -- an angle in degrees
dist -- distance in Kilometer between destination and base point
return destination point object
"""
dist = float(dist) / 6371 # convert dist to angular distance in radians
brng = number2radius(brng)
lon1 = number2radius(point['coordinates'][0])
lat1 = number2radius(point['coordinates'][1])
lat2 = math.asin(math.sin(lat1) * math.cos(dist) +
math.cos(lat1) * math.sin(dist) * math.cos(brng))
lon2 = lon1 + math.atan2(math.sin(brng) * math.sin(dist) *
math.cos(lat1), math.cos(dist) - math.sin(lat1) * math.sin(lat2))
lon2 = (lon2 + 3 * math.pi) % (2 * math.pi) - math.pi # normalise to -180 degree +180 degree
return {'type': 'Point', 'coordinates': [number2degree(lon2), number2degree(lat2)]}
|
brandonxiang/geojson-python-utils | geojson_utils/coordTransform_utils.py | geocode | python | def geocode(address):
geocoding = {'s': 'rsv3',
'key': key,
'city': '全国',
'address': address}
res = requests.get(
"http://restapi.amap.com/v3/geocode/geo", params=geocoding)
if res.status_code == 200:
json = res.json()
status = json.get('status')
count = json.get('count')
if status == '1' and int(count) >= 1:
geocodes = json.get('geocodes')[0]
lng = float(geocodes.get('location').split(',')[0])
lat = float(geocodes.get('location').split(',')[1])
return [lng, lat]
else:
return None
else:
return None | 利用百度geocoding服务解析地址获取位置坐标
:param address:需要解析的地址
:return: | train | https://github.com/brandonxiang/geojson-python-utils/blob/33d0dcd5f16e0567b48c0d49fd292a4f1db16b41/geojson_utils/coordTransform_utils.py#L14-L38 | null | # -*- coding: utf-8 -*-
from __future__ import print_function
import json
import requests
import math
key = 'your key here' # 这里填写你的高德api的key
x_pi = 3.14159265358979324 * 3000.0 / 180.0
pi = 3.1415926535897932384626 # π
a = 6378245.0 # 长半轴
ee = 0.00669342162296594323 # 扁率
def gcj02tobd09(lng, lat):
"""
火星坐标系(GCJ-02)转百度坐标系(BD-09)
谷歌、高德——>百度
:param lng:火星坐标经度
:param lat:火星坐标纬度
:return:
"""
z = math.sqrt(lng * lng + lat * lat) + 0.00002 * math.sin(lat * x_pi)
theta = math.atan2(lat, lng) + 0.000003 * math.cos(lng * x_pi)
bd_lng = z * math.cos(theta) + 0.0065
bd_lat = z * math.sin(theta) + 0.006
return [bd_lng, bd_lat]
def bd09togcj02(bd_lon, bd_lat):
"""
百度坐标系(BD-09)转火星坐标系(GCJ-02)
百度——>谷歌、高德
:param bd_lat:百度坐标纬度
:param bd_lon:百度坐标经度
:return:转换后的坐标列表形式
"""
x = bd_lon - 0.0065
y = bd_lat - 0.006
z = math.sqrt(x * x + y * y) - 0.00002 * math.sin(y * x_pi)
theta = math.atan2(y, x) - 0.000003 * math.cos(x * x_pi)
gg_lng = z * math.cos(theta)
gg_lat = z * math.sin(theta)
return [gg_lng, gg_lat]
def wgs84togcj02(lng, lat):
"""
WGS84转GCJ02(火星坐标系)
:param lng:WGS84坐标系的经度
:param lat:WGS84坐标系的纬度
:return:
"""
if out_of_china(lng, lat): # 判断是否在国内
return lng, lat
dlat = transformlat(lng - 105.0, lat - 35.0)
dlng = transformlng(lng - 105.0, lat - 35.0)
radlat = lat / 180.0 * pi
magic = math.sin(radlat)
magic = 1 - ee * magic * magic
sqrtmagic = math.sqrt(magic)
dlat = (dlat * 180.0) / ((a * (1 - ee)) / (magic * sqrtmagic) * pi)
dlng = (dlng * 180.0) / (a / sqrtmagic * math.cos(radlat) * pi)
mglat = lat + dlat
mglng = lng + dlng
return [mglng, mglat]
def gcj02towgs84(lng, lat):
"""
GCJ02(火星坐标系)转GPS84
:param lng:火星坐标系的经度
:param lat:火星坐标系纬度
:return:
"""
if out_of_china(lng, lat):
return lng, lat
dlat = transformlat(lng - 105.0, lat - 35.0)
dlng = transformlng(lng - 105.0, lat - 35.0)
radlat = lat / 180.0 * pi
magic = math.sin(radlat)
magic = 1 - ee * magic * magic
sqrtmagic = math.sqrt(magic)
dlat = (dlat * 180.0) / ((a * (1 - ee)) / (magic * sqrtmagic) * pi)
dlng = (dlng * 180.0) / (a / sqrtmagic * math.cos(radlat) * pi)
mglat = lat + dlat
mglng = lng + dlng
return [lng * 2 - mglng, lat * 2 - mglat]
def transformlat(lng, lat):
ret = -100.0 + 2.0 * lng + 3.0 * lat + 0.2 * lat * lat + \
0.1 * lng * lat + 0.2 * math.sqrt(math.fabs(lng))
ret += (20.0 * math.sin(6.0 * lng * pi) + 20.0 *
math.sin(2.0 * lng * pi)) * 2.0 / 3.0
ret += (20.0 * math.sin(lat * pi) + 40.0 *
math.sin(lat / 3.0 * pi)) * 2.0 / 3.0
ret += (160.0 * math.sin(lat / 12.0 * pi) + 320 *
math.sin(lat * pi / 30.0)) * 2.0 / 3.0
return ret
def transformlng(lng, lat):
ret = 300.0 + lng + 2.0 * lat + 0.1 * lng * lng + \
0.1 * lng * lat + 0.1 * math.sqrt(math.fabs(lng))
ret += (20.0 * math.sin(6.0 * lng * pi) + 20.0 *
math.sin(2.0 * lng * pi)) * 2.0 / 3.0
ret += (20.0 * math.sin(lng * pi) + 40.0 *
math.sin(lng / 3.0 * pi)) * 2.0 / 3.0
ret += (150.0 * math.sin(lng / 12.0 * pi) + 300.0 *
math.sin(lng / 30.0 * pi)) * 2.0 / 3.0
return ret
def out_of_china(lng, lat):
"""
判断是否在国内,不在国内不做偏移
:param lng:
:param lat:
:return:
"""
if lng < 72.004 or lng > 137.8347:
return True
if lat < 0.8293 or lat > 55.8271:
return True
return False
if __name__ == '__main__':
lng = 128.543
lat = 37.065
result1 = gcj02tobd09(lng, lat)
result2 = bd09togcj02(lng, lat)
result3 = wgs84togcj02(lng, lat)
result4 = gcj02towgs84(lng, lat)
result5 = geocode('北京市朝阳区朝阳公园')
print(result1, result2, result3, result4, result5) |
brandonxiang/geojson-python-utils | geojson_utils/coordTransform_utils.py | gcj02tobd09 | python | def gcj02tobd09(lng, lat):
z = math.sqrt(lng * lng + lat * lat) + 0.00002 * math.sin(lat * x_pi)
theta = math.atan2(lat, lng) + 0.000003 * math.cos(lng * x_pi)
bd_lng = z * math.cos(theta) + 0.0065
bd_lat = z * math.sin(theta) + 0.006
return [bd_lng, bd_lat] | 火星坐标系(GCJ-02)转百度坐标系(BD-09)
谷歌、高德——>百度
:param lng:火星坐标经度
:param lat:火星坐标纬度
:return: | train | https://github.com/brandonxiang/geojson-python-utils/blob/33d0dcd5f16e0567b48c0d49fd292a4f1db16b41/geojson_utils/coordTransform_utils.py#L41-L53 | null | # -*- coding: utf-8 -*-
from __future__ import print_function
import json
import requests
import math
key = 'your key here' # 这里填写你的高德api的key
x_pi = 3.14159265358979324 * 3000.0 / 180.0
pi = 3.1415926535897932384626 # π
a = 6378245.0 # 长半轴
ee = 0.00669342162296594323 # 扁率
def geocode(address):
"""
利用百度geocoding服务解析地址获取位置坐标
:param address:需要解析的地址
:return:
"""
geocoding = {'s': 'rsv3',
'key': key,
'city': '全国',
'address': address}
res = requests.get(
"http://restapi.amap.com/v3/geocode/geo", params=geocoding)
if res.status_code == 200:
json = res.json()
status = json.get('status')
count = json.get('count')
if status == '1' and int(count) >= 1:
geocodes = json.get('geocodes')[0]
lng = float(geocodes.get('location').split(',')[0])
lat = float(geocodes.get('location').split(',')[1])
return [lng, lat]
else:
return None
else:
return None
def bd09togcj02(bd_lon, bd_lat):
"""
百度坐标系(BD-09)转火星坐标系(GCJ-02)
百度——>谷歌、高德
:param bd_lat:百度坐标纬度
:param bd_lon:百度坐标经度
:return:转换后的坐标列表形式
"""
x = bd_lon - 0.0065
y = bd_lat - 0.006
z = math.sqrt(x * x + y * y) - 0.00002 * math.sin(y * x_pi)
theta = math.atan2(y, x) - 0.000003 * math.cos(x * x_pi)
gg_lng = z * math.cos(theta)
gg_lat = z * math.sin(theta)
return [gg_lng, gg_lat]
def wgs84togcj02(lng, lat):
"""
WGS84转GCJ02(火星坐标系)
:param lng:WGS84坐标系的经度
:param lat:WGS84坐标系的纬度
:return:
"""
if out_of_china(lng, lat): # 判断是否在国内
return lng, lat
dlat = transformlat(lng - 105.0, lat - 35.0)
dlng = transformlng(lng - 105.0, lat - 35.0)
radlat = lat / 180.0 * pi
magic = math.sin(radlat)
magic = 1 - ee * magic * magic
sqrtmagic = math.sqrt(magic)
dlat = (dlat * 180.0) / ((a * (1 - ee)) / (magic * sqrtmagic) * pi)
dlng = (dlng * 180.0) / (a / sqrtmagic * math.cos(radlat) * pi)
mglat = lat + dlat
mglng = lng + dlng
return [mglng, mglat]
def gcj02towgs84(lng, lat):
"""
GCJ02(火星坐标系)转GPS84
:param lng:火星坐标系的经度
:param lat:火星坐标系纬度
:return:
"""
if out_of_china(lng, lat):
return lng, lat
dlat = transformlat(lng - 105.0, lat - 35.0)
dlng = transformlng(lng - 105.0, lat - 35.0)
radlat = lat / 180.0 * pi
magic = math.sin(radlat)
magic = 1 - ee * magic * magic
sqrtmagic = math.sqrt(magic)
dlat = (dlat * 180.0) / ((a * (1 - ee)) / (magic * sqrtmagic) * pi)
dlng = (dlng * 180.0) / (a / sqrtmagic * math.cos(radlat) * pi)
mglat = lat + dlat
mglng = lng + dlng
return [lng * 2 - mglng, lat * 2 - mglat]
def transformlat(lng, lat):
ret = -100.0 + 2.0 * lng + 3.0 * lat + 0.2 * lat * lat + \
0.1 * lng * lat + 0.2 * math.sqrt(math.fabs(lng))
ret += (20.0 * math.sin(6.0 * lng * pi) + 20.0 *
math.sin(2.0 * lng * pi)) * 2.0 / 3.0
ret += (20.0 * math.sin(lat * pi) + 40.0 *
math.sin(lat / 3.0 * pi)) * 2.0 / 3.0
ret += (160.0 * math.sin(lat / 12.0 * pi) + 320 *
math.sin(lat * pi / 30.0)) * 2.0 / 3.0
return ret
def transformlng(lng, lat):
ret = 300.0 + lng + 2.0 * lat + 0.1 * lng * lng + \
0.1 * lng * lat + 0.1 * math.sqrt(math.fabs(lng))
ret += (20.0 * math.sin(6.0 * lng * pi) + 20.0 *
math.sin(2.0 * lng * pi)) * 2.0 / 3.0
ret += (20.0 * math.sin(lng * pi) + 40.0 *
math.sin(lng / 3.0 * pi)) * 2.0 / 3.0
ret += (150.0 * math.sin(lng / 12.0 * pi) + 300.0 *
math.sin(lng / 30.0 * pi)) * 2.0 / 3.0
return ret
def out_of_china(lng, lat):
"""
判断是否在国内,不在国内不做偏移
:param lng:
:param lat:
:return:
"""
if lng < 72.004 or lng > 137.8347:
return True
if lat < 0.8293 or lat > 55.8271:
return True
return False
if __name__ == '__main__':
lng = 128.543
lat = 37.065
result1 = gcj02tobd09(lng, lat)
result2 = bd09togcj02(lng, lat)
result3 = wgs84togcj02(lng, lat)
result4 = gcj02towgs84(lng, lat)
result5 = geocode('北京市朝阳区朝阳公园')
print(result1, result2, result3, result4, result5) |
brandonxiang/geojson-python-utils | geojson_utils/coordTransform_utils.py | bd09togcj02 | python | def bd09togcj02(bd_lon, bd_lat):
x = bd_lon - 0.0065
y = bd_lat - 0.006
z = math.sqrt(x * x + y * y) - 0.00002 * math.sin(y * x_pi)
theta = math.atan2(y, x) - 0.000003 * math.cos(x * x_pi)
gg_lng = z * math.cos(theta)
gg_lat = z * math.sin(theta)
return [gg_lng, gg_lat] | 百度坐标系(BD-09)转火星坐标系(GCJ-02)
百度——>谷歌、高德
:param bd_lat:百度坐标纬度
:param bd_lon:百度坐标经度
:return:转换后的坐标列表形式 | train | https://github.com/brandonxiang/geojson-python-utils/blob/33d0dcd5f16e0567b48c0d49fd292a4f1db16b41/geojson_utils/coordTransform_utils.py#L56-L70 | null | # -*- coding: utf-8 -*-
from __future__ import print_function
import json
import requests
import math
key = 'your key here' # 这里填写你的高德api的key
x_pi = 3.14159265358979324 * 3000.0 / 180.0
pi = 3.1415926535897932384626 # π
a = 6378245.0 # 长半轴
ee = 0.00669342162296594323 # 扁率
def geocode(address):
"""
利用百度geocoding服务解析地址获取位置坐标
:param address:需要解析的地址
:return:
"""
geocoding = {'s': 'rsv3',
'key': key,
'city': '全国',
'address': address}
res = requests.get(
"http://restapi.amap.com/v3/geocode/geo", params=geocoding)
if res.status_code == 200:
json = res.json()
status = json.get('status')
count = json.get('count')
if status == '1' and int(count) >= 1:
geocodes = json.get('geocodes')[0]
lng = float(geocodes.get('location').split(',')[0])
lat = float(geocodes.get('location').split(',')[1])
return [lng, lat]
else:
return None
else:
return None
def gcj02tobd09(lng, lat):
"""
火星坐标系(GCJ-02)转百度坐标系(BD-09)
谷歌、高德——>百度
:param lng:火星坐标经度
:param lat:火星坐标纬度
:return:
"""
z = math.sqrt(lng * lng + lat * lat) + 0.00002 * math.sin(lat * x_pi)
theta = math.atan2(lat, lng) + 0.000003 * math.cos(lng * x_pi)
bd_lng = z * math.cos(theta) + 0.0065
bd_lat = z * math.sin(theta) + 0.006
return [bd_lng, bd_lat]
def wgs84togcj02(lng, lat):
"""
WGS84转GCJ02(火星坐标系)
:param lng:WGS84坐标系的经度
:param lat:WGS84坐标系的纬度
:return:
"""
if out_of_china(lng, lat): # 判断是否在国内
return lng, lat
dlat = transformlat(lng - 105.0, lat - 35.0)
dlng = transformlng(lng - 105.0, lat - 35.0)
radlat = lat / 180.0 * pi
magic = math.sin(radlat)
magic = 1 - ee * magic * magic
sqrtmagic = math.sqrt(magic)
dlat = (dlat * 180.0) / ((a * (1 - ee)) / (magic * sqrtmagic) * pi)
dlng = (dlng * 180.0) / (a / sqrtmagic * math.cos(radlat) * pi)
mglat = lat + dlat
mglng = lng + dlng
return [mglng, mglat]
def gcj02towgs84(lng, lat):
"""
GCJ02(火星坐标系)转GPS84
:param lng:火星坐标系的经度
:param lat:火星坐标系纬度
:return:
"""
if out_of_china(lng, lat):
return lng, lat
dlat = transformlat(lng - 105.0, lat - 35.0)
dlng = transformlng(lng - 105.0, lat - 35.0)
radlat = lat / 180.0 * pi
magic = math.sin(radlat)
magic = 1 - ee * magic * magic
sqrtmagic = math.sqrt(magic)
dlat = (dlat * 180.0) / ((a * (1 - ee)) / (magic * sqrtmagic) * pi)
dlng = (dlng * 180.0) / (a / sqrtmagic * math.cos(radlat) * pi)
mglat = lat + dlat
mglng = lng + dlng
return [lng * 2 - mglng, lat * 2 - mglat]
def transformlat(lng, lat):
ret = -100.0 + 2.0 * lng + 3.0 * lat + 0.2 * lat * lat + \
0.1 * lng * lat + 0.2 * math.sqrt(math.fabs(lng))
ret += (20.0 * math.sin(6.0 * lng * pi) + 20.0 *
math.sin(2.0 * lng * pi)) * 2.0 / 3.0
ret += (20.0 * math.sin(lat * pi) + 40.0 *
math.sin(lat / 3.0 * pi)) * 2.0 / 3.0
ret += (160.0 * math.sin(lat / 12.0 * pi) + 320 *
math.sin(lat * pi / 30.0)) * 2.0 / 3.0
return ret
def transformlng(lng, lat):
ret = 300.0 + lng + 2.0 * lat + 0.1 * lng * lng + \
0.1 * lng * lat + 0.1 * math.sqrt(math.fabs(lng))
ret += (20.0 * math.sin(6.0 * lng * pi) + 20.0 *
math.sin(2.0 * lng * pi)) * 2.0 / 3.0
ret += (20.0 * math.sin(lng * pi) + 40.0 *
math.sin(lng / 3.0 * pi)) * 2.0 / 3.0
ret += (150.0 * math.sin(lng / 12.0 * pi) + 300.0 *
math.sin(lng / 30.0 * pi)) * 2.0 / 3.0
return ret
def out_of_china(lng, lat):
"""
判断是否在国内,不在国内不做偏移
:param lng:
:param lat:
:return:
"""
if lng < 72.004 or lng > 137.8347:
return True
if lat < 0.8293 or lat > 55.8271:
return True
return False
if __name__ == '__main__':
lng = 128.543
lat = 37.065
result1 = gcj02tobd09(lng, lat)
result2 = bd09togcj02(lng, lat)
result3 = wgs84togcj02(lng, lat)
result4 = gcj02towgs84(lng, lat)
result5 = geocode('北京市朝阳区朝阳公园')
print(result1, result2, result3, result4, result5) |
brandonxiang/geojson-python-utils | geojson_utils/coordTransform_utils.py | wgs84togcj02 | python | def wgs84togcj02(lng, lat):
if out_of_china(lng, lat): # 判断是否在国内
return lng, lat
dlat = transformlat(lng - 105.0, lat - 35.0)
dlng = transformlng(lng - 105.0, lat - 35.0)
radlat = lat / 180.0 * pi
magic = math.sin(radlat)
magic = 1 - ee * magic * magic
sqrtmagic = math.sqrt(magic)
dlat = (dlat * 180.0) / ((a * (1 - ee)) / (magic * sqrtmagic) * pi)
dlng = (dlng * 180.0) / (a / sqrtmagic * math.cos(radlat) * pi)
mglat = lat + dlat
mglng = lng + dlng
return [mglng, mglat] | WGS84转GCJ02(火星坐标系)
:param lng:WGS84坐标系的经度
:param lat:WGS84坐标系的纬度
:return: | train | https://github.com/brandonxiang/geojson-python-utils/blob/33d0dcd5f16e0567b48c0d49fd292a4f1db16b41/geojson_utils/coordTransform_utils.py#L73-L92 | [
"def out_of_china(lng, lat):\n \"\"\"\n 判断是否在国内,不在国内不做偏移\n :param lng:\n :param lat:\n :return:\n \"\"\"\n if lng < 72.004 or lng > 137.8347:\n return True\n if lat < 0.8293 or lat > 55.8271:\n return True\n return False\n",
"def transformlat(lng, lat):\n ret = -100.0 +... | # -*- coding: utf-8 -*-
from __future__ import print_function
import json
import requests
import math
key = 'your key here' # 这里填写你的高德api的key
x_pi = 3.14159265358979324 * 3000.0 / 180.0
pi = 3.1415926535897932384626 # π
a = 6378245.0 # 长半轴
ee = 0.00669342162296594323 # 扁率
def geocode(address):
"""
利用百度geocoding服务解析地址获取位置坐标
:param address:需要解析的地址
:return:
"""
geocoding = {'s': 'rsv3',
'key': key,
'city': '全国',
'address': address}
res = requests.get(
"http://restapi.amap.com/v3/geocode/geo", params=geocoding)
if res.status_code == 200:
json = res.json()
status = json.get('status')
count = json.get('count')
if status == '1' and int(count) >= 1:
geocodes = json.get('geocodes')[0]
lng = float(geocodes.get('location').split(',')[0])
lat = float(geocodes.get('location').split(',')[1])
return [lng, lat]
else:
return None
else:
return None
def gcj02tobd09(lng, lat):
"""
火星坐标系(GCJ-02)转百度坐标系(BD-09)
谷歌、高德——>百度
:param lng:火星坐标经度
:param lat:火星坐标纬度
:return:
"""
z = math.sqrt(lng * lng + lat * lat) + 0.00002 * math.sin(lat * x_pi)
theta = math.atan2(lat, lng) + 0.000003 * math.cos(lng * x_pi)
bd_lng = z * math.cos(theta) + 0.0065
bd_lat = z * math.sin(theta) + 0.006
return [bd_lng, bd_lat]
def bd09togcj02(bd_lon, bd_lat):
"""
百度坐标系(BD-09)转火星坐标系(GCJ-02)
百度——>谷歌、高德
:param bd_lat:百度坐标纬度
:param bd_lon:百度坐标经度
:return:转换后的坐标列表形式
"""
x = bd_lon - 0.0065
y = bd_lat - 0.006
z = math.sqrt(x * x + y * y) - 0.00002 * math.sin(y * x_pi)
theta = math.atan2(y, x) - 0.000003 * math.cos(x * x_pi)
gg_lng = z * math.cos(theta)
gg_lat = z * math.sin(theta)
return [gg_lng, gg_lat]
def gcj02towgs84(lng, lat):
"""
GCJ02(火星坐标系)转GPS84
:param lng:火星坐标系的经度
:param lat:火星坐标系纬度
:return:
"""
if out_of_china(lng, lat):
return lng, lat
dlat = transformlat(lng - 105.0, lat - 35.0)
dlng = transformlng(lng - 105.0, lat - 35.0)
radlat = lat / 180.0 * pi
magic = math.sin(radlat)
magic = 1 - ee * magic * magic
sqrtmagic = math.sqrt(magic)
dlat = (dlat * 180.0) / ((a * (1 - ee)) / (magic * sqrtmagic) * pi)
dlng = (dlng * 180.0) / (a / sqrtmagic * math.cos(radlat) * pi)
mglat = lat + dlat
mglng = lng + dlng
return [lng * 2 - mglng, lat * 2 - mglat]
def transformlat(lng, lat):
ret = -100.0 + 2.0 * lng + 3.0 * lat + 0.2 * lat * lat + \
0.1 * lng * lat + 0.2 * math.sqrt(math.fabs(lng))
ret += (20.0 * math.sin(6.0 * lng * pi) + 20.0 *
math.sin(2.0 * lng * pi)) * 2.0 / 3.0
ret += (20.0 * math.sin(lat * pi) + 40.0 *
math.sin(lat / 3.0 * pi)) * 2.0 / 3.0
ret += (160.0 * math.sin(lat / 12.0 * pi) + 320 *
math.sin(lat * pi / 30.0)) * 2.0 / 3.0
return ret
def transformlng(lng, lat):
ret = 300.0 + lng + 2.0 * lat + 0.1 * lng * lng + \
0.1 * lng * lat + 0.1 * math.sqrt(math.fabs(lng))
ret += (20.0 * math.sin(6.0 * lng * pi) + 20.0 *
math.sin(2.0 * lng * pi)) * 2.0 / 3.0
ret += (20.0 * math.sin(lng * pi) + 40.0 *
math.sin(lng / 3.0 * pi)) * 2.0 / 3.0
ret += (150.0 * math.sin(lng / 12.0 * pi) + 300.0 *
math.sin(lng / 30.0 * pi)) * 2.0 / 3.0
return ret
def out_of_china(lng, lat):
"""
判断是否在国内,不在国内不做偏移
:param lng:
:param lat:
:return:
"""
if lng < 72.004 or lng > 137.8347:
return True
if lat < 0.8293 or lat > 55.8271:
return True
return False
if __name__ == '__main__':
lng = 128.543
lat = 37.065
result1 = gcj02tobd09(lng, lat)
result2 = bd09togcj02(lng, lat)
result3 = wgs84togcj02(lng, lat)
result4 = gcj02towgs84(lng, lat)
result5 = geocode('北京市朝阳区朝阳公园')
print(result1, result2, result3, result4, result5) |
brandonxiang/geojson-python-utils | geojson_utils/coordTransform_utils.py | out_of_china | python | def out_of_china(lng, lat):
if lng < 72.004 or lng > 137.8347:
return True
if lat < 0.8293 or lat > 55.8271:
return True
return False | 判断是否在国内,不在国内不做偏移
:param lng:
:param lat:
:return: | train | https://github.com/brandonxiang/geojson-python-utils/blob/33d0dcd5f16e0567b48c0d49fd292a4f1db16b41/geojson_utils/coordTransform_utils.py#L141-L152 | null | # -*- coding: utf-8 -*-
from __future__ import print_function
import json
import requests
import math
key = 'your key here' # 这里填写你的高德api的key
x_pi = 3.14159265358979324 * 3000.0 / 180.0
pi = 3.1415926535897932384626 # π
a = 6378245.0 # 长半轴
ee = 0.00669342162296594323 # 扁率
def geocode(address):
"""
利用百度geocoding服务解析地址获取位置坐标
:param address:需要解析的地址
:return:
"""
geocoding = {'s': 'rsv3',
'key': key,
'city': '全国',
'address': address}
res = requests.get(
"http://restapi.amap.com/v3/geocode/geo", params=geocoding)
if res.status_code == 200:
json = res.json()
status = json.get('status')
count = json.get('count')
if status == '1' and int(count) >= 1:
geocodes = json.get('geocodes')[0]
lng = float(geocodes.get('location').split(',')[0])
lat = float(geocodes.get('location').split(',')[1])
return [lng, lat]
else:
return None
else:
return None
def gcj02tobd09(lng, lat):
"""
火星坐标系(GCJ-02)转百度坐标系(BD-09)
谷歌、高德——>百度
:param lng:火星坐标经度
:param lat:火星坐标纬度
:return:
"""
z = math.sqrt(lng * lng + lat * lat) + 0.00002 * math.sin(lat * x_pi)
theta = math.atan2(lat, lng) + 0.000003 * math.cos(lng * x_pi)
bd_lng = z * math.cos(theta) + 0.0065
bd_lat = z * math.sin(theta) + 0.006
return [bd_lng, bd_lat]
def bd09togcj02(bd_lon, bd_lat):
"""
百度坐标系(BD-09)转火星坐标系(GCJ-02)
百度——>谷歌、高德
:param bd_lat:百度坐标纬度
:param bd_lon:百度坐标经度
:return:转换后的坐标列表形式
"""
x = bd_lon - 0.0065
y = bd_lat - 0.006
z = math.sqrt(x * x + y * y) - 0.00002 * math.sin(y * x_pi)
theta = math.atan2(y, x) - 0.000003 * math.cos(x * x_pi)
gg_lng = z * math.cos(theta)
gg_lat = z * math.sin(theta)
return [gg_lng, gg_lat]
def wgs84togcj02(lng, lat):
"""
WGS84转GCJ02(火星坐标系)
:param lng:WGS84坐标系的经度
:param lat:WGS84坐标系的纬度
:return:
"""
if out_of_china(lng, lat): # 判断是否在国内
return lng, lat
dlat = transformlat(lng - 105.0, lat - 35.0)
dlng = transformlng(lng - 105.0, lat - 35.0)
radlat = lat / 180.0 * pi
magic = math.sin(radlat)
magic = 1 - ee * magic * magic
sqrtmagic = math.sqrt(magic)
dlat = (dlat * 180.0) / ((a * (1 - ee)) / (magic * sqrtmagic) * pi)
dlng = (dlng * 180.0) / (a / sqrtmagic * math.cos(radlat) * pi)
mglat = lat + dlat
mglng = lng + dlng
return [mglng, mglat]
def gcj02towgs84(lng, lat):
"""
GCJ02(火星坐标系)转GPS84
:param lng:火星坐标系的经度
:param lat:火星坐标系纬度
:return:
"""
if out_of_china(lng, lat):
return lng, lat
dlat = transformlat(lng - 105.0, lat - 35.0)
dlng = transformlng(lng - 105.0, lat - 35.0)
radlat = lat / 180.0 * pi
magic = math.sin(radlat)
magic = 1 - ee * magic * magic
sqrtmagic = math.sqrt(magic)
dlat = (dlat * 180.0) / ((a * (1 - ee)) / (magic * sqrtmagic) * pi)
dlng = (dlng * 180.0) / (a / sqrtmagic * math.cos(radlat) * pi)
mglat = lat + dlat
mglng = lng + dlng
return [lng * 2 - mglng, lat * 2 - mglat]
def transformlat(lng, lat):
ret = -100.0 + 2.0 * lng + 3.0 * lat + 0.2 * lat * lat + \
0.1 * lng * lat + 0.2 * math.sqrt(math.fabs(lng))
ret += (20.0 * math.sin(6.0 * lng * pi) + 20.0 *
math.sin(2.0 * lng * pi)) * 2.0 / 3.0
ret += (20.0 * math.sin(lat * pi) + 40.0 *
math.sin(lat / 3.0 * pi)) * 2.0 / 3.0
ret += (160.0 * math.sin(lat / 12.0 * pi) + 320 *
math.sin(lat * pi / 30.0)) * 2.0 / 3.0
return ret
def transformlng(lng, lat):
ret = 300.0 + lng + 2.0 * lat + 0.1 * lng * lng + \
0.1 * lng * lat + 0.1 * math.sqrt(math.fabs(lng))
ret += (20.0 * math.sin(6.0 * lng * pi) + 20.0 *
math.sin(2.0 * lng * pi)) * 2.0 / 3.0
ret += (20.0 * math.sin(lng * pi) + 40.0 *
math.sin(lng / 3.0 * pi)) * 2.0 / 3.0
ret += (150.0 * math.sin(lng / 12.0 * pi) + 300.0 *
math.sin(lng / 30.0 * pi)) * 2.0 / 3.0
return ret
if __name__ == '__main__':
lng = 128.543
lat = 37.065
result1 = gcj02tobd09(lng, lat)
result2 = bd09togcj02(lng, lat)
result3 = wgs84togcj02(lng, lat)
result4 = gcj02towgs84(lng, lat)
result5 = geocode('北京市朝阳区朝阳公园')
print(result1, result2, result3, result4, result5) |
brandonxiang/geojson-python-utils | geojson_utils/convertor.py | convertor | python | def convertor(geometry, method="wgs2gcj"):
if geometry['type'] == 'Point':
coords = geometry['coordinates']
coords[0], coords[1] = methods[method](coords[0], coords[1])
elif geometry['type'] == 'LineString' or geometry['type'] == 'MutliPoint':
coordinates = geometry['coordinates']
for coords in coordinates:
coords[0], coords[1] = methods[method](coords[0], coords[1])
elif geometry['type'] == 'Polygon' or geometry['type'] == 'MultiLineString':
coordinates = geometry['coordinates']
for rings in coordinates:
for coords in rings:
coords[0], coords[1] = methods[method](coords[0], coords[1])
elif geometry['type'] == 'MultiPolygon':
coordinates = geometry['coordinates']
for rings in coordinates:
for lines in rings:
for coords in lines:
coords[0], coords[1] = methods[method](coords[0], coords[1])
return geometry | convert wgs84 to gcj
referencing by https://github.com/wandergis/coordTransform_py | train | https://github.com/brandonxiang/geojson-python-utils/blob/33d0dcd5f16e0567b48c0d49fd292a4f1db16b41/geojson_utils/convertor.py#L23-L46 | null | from .coordTransform_utils import wgs84togcj02, gcj02towgs84, gcj02tobd09, bd09togcj02
def wgs84tobd09(lng, lat):
tmp_lng, tmp_lat = wgs84togcj02(lng, lat)
return gcj02tobd09(tmp_lng, tmp_lat)
def bd09towgs84(lng, lat):
tmp_lng, tmp_lat = bd09togcj02(lng, lat)
return gcj02towgs84(tmp_lng, tmp_lat)
methods = {
"wgs2gcj": wgs84togcj02,
"gcj2wgs": gcj02towgs84,
"wgs2bd": wgs84tobd09,
"bd2wgs": bd09towgs84,
"gcj2bd": gcj02tobd09,
"bd2gcj": bd09towgs84
}
|
brandonxiang/geojson-python-utils | geojson_utils/merger.py | merge_featurecollection | python | def merge_featurecollection(*jsons):
features = []
for json in jsons:
if json['type'] == 'FeatureCollection':
for feature in json['features']:
features.append(feature)
return {"type":'FeatureCollection', "features":features} | merge features into one featurecollection
Keyword arguments:
jsons -- jsons object list
return geojson featurecollection | train | https://github.com/brandonxiang/geojson-python-utils/blob/33d0dcd5f16e0567b48c0d49fd292a4f1db16b41/geojson_utils/merger.py#L6-L20 | null | import math
from copy import deepcopy
from geojson import Point,Feature,FeatureCollection
from geojson_utils import point_distance
def simplify_other(major, minor, dist):
"""
Simplify the point featurecollection of poi with another point features accoording by distance.
Attention: point featurecollection only
Keyword arguments:
major -- major geojson
minor -- minor geojson
dist -- distance
return a geojson featurecollection with two parts of featurecollection
"""
result = deepcopy(major)
if major['type'] == 'FeatureCollection' and minor['type'] == 'FeatureCollection':
arc = dist/6371000*180/math.pi*2
for minorfeature in minor['features']:
minorgeom = minorfeature['geometry']
minorlng = minorgeom['coordinates'][0]
minorlat = minorgeom['coordinates'][1]
is_accept = True
for mainfeature in major['features']:
maingeom = mainfeature['geometry']
mainlng = maingeom['coordinates'][0]
mainlat = maingeom['coordinates'][1]
if abs(minorlat-mainlat) <= arc and abs(minorlng-mainlng) <= arc:
distance = point_distance(maingeom, minorgeom)
if distance < dist:
is_accept = False
break
if is_accept:
result["features"].append(minorfeature)
return result
def get_endpoint_from_points(points):
"""
"""
count = 0
result = deepcopy(points)
if points['type'] == 'FeatureCollection':
feature_count = len(points['features'])
for i in range(0, feature_count):
first_geom = points['features'][i]['geometry']
first_lng = first_geom['coordinates'][0]
first_lat = first_geom['coordinates'][1]
for j in range(0, feature_count):
if i == j:
continue
second_geom = points['features'][j]['geometry']
second_lng = second_geom['coordinates'][0]
second_lat = second_geom['coordinates'][1]
if first_lat == second_lat and first_lng == second_lng:
result['features'].remove(points['features'][i])
count += 1
break
return result
def get_endpoint_from_linestring(linestrings):
"""
"""
points = get_bothend_from_linestring(linestrings)
return get_endpoint_from_points(points)
def get_bothend_from_linestring(linestrings):
"""
"""
points = []
for linestring in linestrings['features']:
coord = linestring['geometry']['coordinates']
properties = linestring['properties']
first = coord[0]
first_feat = get_point_feature(first, properties)
last = coord[len(coord)-1]
last_feat = get_point_feature(last, properties)
points.append(first_feat)
points.append(last_feat)
return FeatureCollection(points)
def get_point_feature(coord, properties):
return Feature(geometry=Point(coord), properties=properties)
|
brandonxiang/geojson-python-utils | geojson_utils/merger.py | simplify_other | python | def simplify_other(major, minor, dist):
result = deepcopy(major)
if major['type'] == 'FeatureCollection' and minor['type'] == 'FeatureCollection':
arc = dist/6371000*180/math.pi*2
for minorfeature in minor['features']:
minorgeom = minorfeature['geometry']
minorlng = minorgeom['coordinates'][0]
minorlat = minorgeom['coordinates'][1]
is_accept = True
for mainfeature in major['features']:
maingeom = mainfeature['geometry']
mainlng = maingeom['coordinates'][0]
mainlat = maingeom['coordinates'][1]
if abs(minorlat-mainlat) <= arc and abs(minorlng-mainlng) <= arc:
distance = point_distance(maingeom, minorgeom)
if distance < dist:
is_accept = False
break
if is_accept:
result["features"].append(minorfeature)
return result | Simplify the point featurecollection of poi with another point features accoording by distance.
Attention: point featurecollection only
Keyword arguments:
major -- major geojson
minor -- minor geojson
dist -- distance
return a geojson featurecollection with two parts of featurecollection | train | https://github.com/brandonxiang/geojson-python-utils/blob/33d0dcd5f16e0567b48c0d49fd292a4f1db16b41/geojson_utils/merger.py#L22-L55 | null | import math
from copy import deepcopy
from geojson import Point,Feature,FeatureCollection
from geojson_utils import point_distance
def merge_featurecollection(*jsons):
"""
merge features into one featurecollection
Keyword arguments:
jsons -- jsons object list
return geojson featurecollection
"""
features = []
for json in jsons:
if json['type'] == 'FeatureCollection':
for feature in json['features']:
features.append(feature)
return {"type":'FeatureCollection', "features":features}
def simplify_other(major, minor, dist):
"""
Simplify the point featurecollection of poi with another point features accoording by distance.
Attention: point featurecollection only
Keyword arguments:
major -- major geojson
minor -- minor geojson
dist -- distance
return a geojson featurecollection with two parts of featurecollection
"""
result = deepcopy(major)
if major['type'] == 'FeatureCollection' and minor['type'] == 'FeatureCollection':
arc = dist/6371000*180/math.pi*2
for minorfeature in minor['features']:
minorgeom = minorfeature['geometry']
minorlng = minorgeom['coordinates'][0]
minorlat = minorgeom['coordinates'][1]
is_accept = True
for mainfeature in major['features']:
maingeom = mainfeature['geometry']
mainlng = maingeom['coordinates'][0]
mainlat = maingeom['coordinates'][1]
if abs(minorlat-mainlat) <= arc and abs(minorlng-mainlng) <= arc:
distance = point_distance(maingeom, minorgeom)
if distance < dist:
is_accept = False
break
if is_accept:
result["features"].append(minorfeature)
return result
def get_endpoint_from_points(points):
"""
"""
count = 0
result = deepcopy(points)
if points['type'] == 'FeatureCollection':
feature_count = len(points['features'])
for i in range(0, feature_count):
first_geom = points['features'][i]['geometry']
first_lng = first_geom['coordinates'][0]
first_lat = first_geom['coordinates'][1]
for j in range(0, feature_count):
if i == j:
continue
second_geom = points['features'][j]['geometry']
second_lng = second_geom['coordinates'][0]
second_lat = second_geom['coordinates'][1]
if first_lat == second_lat and first_lng == second_lng:
result['features'].remove(points['features'][i])
count += 1
break
return result
def get_endpoint_from_linestring(linestrings):
"""
"""
points = get_bothend_from_linestring(linestrings)
return get_endpoint_from_points(points)
def get_bothend_from_linestring(linestrings):
"""
"""
points = []
for linestring in linestrings['features']:
coord = linestring['geometry']['coordinates']
properties = linestring['properties']
first = coord[0]
first_feat = get_point_feature(first, properties)
last = coord[len(coord)-1]
last_feat = get_point_feature(last, properties)
points.append(first_feat)
points.append(last_feat)
return FeatureCollection(points)
def get_point_feature(coord, properties):
return Feature(geometry=Point(coord), properties=properties)
|
fogleman/pg | pg/core.py | delete_all | python | def delete_all(obj):
'''Calls `delete()` on all members of `obj` that are recognized as
instances of `pg` objects.'''
types = tuple([
Shader,
Mesh,
VertexBuffer,
IndexBuffer,
Texture,
Program,
Context,
])
for name in dir(obj):
child = getattr(obj, name)
if isinstance(child, types):
child.delete() | Calls `delete()` on all members of `obj` that are recognized as
instances of `pg` objects. | train | https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/core.py#L14-L29 | null | from ctypes import *
from OpenGL.GL import *
from PIL import Image
from math import copysign
from .matrix import Matrix
from . import glfw
from . import util
import cPickle as pickle
import os
import Queue
import threading
import time
class Shader(object):
def __init__(self, shader_type, shader_source):
if os.path.exists(shader_source):
with open(shader_source, 'r') as fp:
shader_source = fp.read()
self.handle = glCreateShader(shader_type)
glShaderSource(self.handle, shader_source)
glCompileShader(self.handle)
log = glGetShaderInfoLog(self.handle)
if log:
raise Exception(log)
def delete(self):
if self.handle is not None:
glDeleteShader(self.handle)
self.handle = None
class VertexShader(Shader):
def __init__(self, shader_source):
super(VertexShader, self).__init__(GL_VERTEX_SHADER, shader_source)
class FragmentShader(Shader):
def __init__(self, shader_source):
super(FragmentShader, self).__init__(GL_FRAGMENT_SHADER, shader_source)
class Cache(object):
def __init__(self):
self.data = {}
def set(self, key, value):
if key in self.data and self.data[key] == value:
return False
self.data[key] = value
return True
class Mesh(object):
@staticmethod
def load_pickle(path):
with open(path, 'rb') as fp:
positions, normals, uvs = pickle.load(fp)
return Mesh(positions, normals, uvs)
def __init__(self, positions=None, normals=None, uvs=None):
self.positions = positions or []
self.normals = normals or []
self.uvs = uvs or []
self.index = None
self.vertex_buffer = None
self.slices = None
def delete(self):
if self.index:
self.index.delete()
self.index = None
if self.vertex_buffer:
self.vertex_buffer.delete()
self.vertex_buffer = None
def __add__(self, other):
positions = self.positions + other.positions
normals = self.normals + other.normals
uvs = self.uvs + other.uvs
return Mesh(positions, normals, uvs)
def __rmul__(self, other):
if isinstance(other, Matrix):
return self.multiply(other)
return NotImplemented
def multiply(self, matrix):
positions = [matrix * x for x in self.positions]
normals = list(self.normals)
uvs = list(self.uvs)
return Mesh(positions, normals, uvs)
def bounding_box(self):
return util.bounding_box(self.positions)
def center(self):
positions = util.recenter(self.positions)
normals = list(self.normals)
uvs = list(self.uvs)
return Mesh(positions, normals, uvs)
def smooth_normals(self):
positions = list(self.positions)
normals = util.smooth_normals(self.positions, self.normals)
uvs = list(self.uvs)
return Mesh(positions, normals, uvs)
def reverse_winding(self):
positions = []
for i in xrange(0, len(self.positions), 3):
v1, v2, v3 = self.positions[i:i+3]
positions.extend([v3, v2, v1])
normals = [util.neg(x) for x in self.normals]
uvs = []
for i in xrange(0, len(self.uvs), 3):
v1, v2, v3 = self.uvs[i:i+3]
uvs.extend([v3, v2, v1])
return Mesh(positions, normals, uvs)
def swap_axes(self, i, j, k):
si, sj, sk = copysign(1, i), copysign(1, j), copysign(1, k)
i, j, k = abs(i), abs(j), abs(k)
positions = [(v[i] * si, v[j] * sj, v[k] * sk) for v in self.positions]
normals = [(v[i] * si, v[j] * sj, v[k] * sk) for v in self.normals]
uvs = list(self.uvs)
return Mesh(positions, normals, uvs)
def save_pickle(self, path):
obj = (self.positions, self.normals, self.uvs)
with open(path, 'wb') as fp:
pickle.dump(obj, fp, -1)
def draw(self, context, mode=GL_TRIANGLES):
if not self.vertex_buffer:
self.index, self.vertex_buffer, self.slices = index(
self.positions, self.normals, self.uvs)
context.position, context.normal, context.uv = self.slices
context.draw(mode, self.index)
class VertexBuffer(object):
def __init__(self, data=None):
self.handle = glGenBuffers(1)
self.components = 0
self.vertex_count = 0
self.vertex_capacity = 0
self.extend(data)
def delete(self):
if self.handle is not None:
glDeleteBuffers(1, self.handle)
self.handle = None
def extend(self, data):
if not data:
return
if self.components:
if len(data[0]) != self.components:
raise Exception
else:
self.components = len(data[0])
offset = self.vertex_count * self.components
size = len(data) * self.components
flat = util.flatten(data)
if len(flat) != size:
raise Exception
self.vertex_count += len(data)
if self.vertex_count > self.vertex_capacity:
old_size = self.components * self.vertex_capacity
self.vertex_capacity = max(
self.vertex_count, self.vertex_capacity * 2)
new_size = self.components * self.vertex_capacity
if old_size:
self.resize(old_size, new_size)
else:
self.allocate(new_size)
glBindBuffer(GL_ARRAY_BUFFER, self.handle)
glBufferSubData(
GL_ARRAY_BUFFER,
sizeof(c_float) * offset,
sizeof(c_float) * size,
util.pack_list('<f', flat))
glBindBuffer(GL_ARRAY_BUFFER, 0)
def allocate(self, size):
glBindBuffer(GL_ARRAY_BUFFER, self.handle)
glBufferData(
GL_ARRAY_BUFFER,
sizeof(c_float) * size,
None,
GL_DYNAMIC_DRAW)
glBindBuffer(GL_ARRAY_BUFFER, 0)
def resize(self, old_size, new_size):
old_size = sizeof(c_float) * old_size
new_size = sizeof(c_float) * new_size
temp = (ctypes.c_byte * new_size)()
glBindBuffer(GL_ARRAY_BUFFER, self.handle)
data = glMapBuffer(GL_ARRAY_BUFFER, GL_READ_ONLY)
memmove(temp, data, min(old_size, new_size))
glUnmapBuffer(GL_ARRAY_BUFFER)
glBufferData(GL_ARRAY_BUFFER, new_size, temp, GL_DYNAMIC_DRAW)
glBindBuffer(GL_ARRAY_BUFFER, 0)
def set_data(self, data):
old_size = self.components * self.vertex_capacity
self.components = len(data[0])
self.vertex_count = len(data)
self.vertex_capacity = len(data)
flat = util.flatten(data)
size = len(flat)
glBindBuffer(GL_ARRAY_BUFFER, self.handle)
if size == old_size:
glBufferSubData(
GL_ARRAY_BUFFER,
0,
sizeof(c_float) * size,
util.pack_list('<f', flat))
else:
glBufferData(
GL_ARRAY_BUFFER,
sizeof(c_float) * size,
util.pack_list('<f', flat),
GL_DYNAMIC_DRAW)
glBindBuffer(GL_ARRAY_BUFFER, 0)
def slice(self, components, offset):
return VertexBufferSlice(self, components, offset)
def slices(self, *args):
offset = 0
result = []
for components in args:
if components:
result.append(self.slice(components, offset))
offset += components
else:
result.append(None)
return result
def bind(self, location):
glBindBuffer(GL_ARRAY_BUFFER, self.handle)
glVertexAttribPointer(
location, self.components, GL_FLOAT, GL_FALSE,
0, c_void_p())
glBindBuffer(GL_ARRAY_BUFFER, 0)
class VertexBufferSlice(object):
def __init__(self, parent, components, offset):
self.parent = parent
self.components = components
self.offset = offset
@property
def vertex_count(self):
return self.parent.vertex_count
def bind(self, location):
glBindBuffer(GL_ARRAY_BUFFER, self.parent.handle)
glVertexAttribPointer(
location, self.components, GL_FLOAT, GL_FALSE,
sizeof(c_float) * self.parent.components,
c_void_p(sizeof(c_float) * self.offset))
glBindBuffer(GL_ARRAY_BUFFER, 0)
class IndexBuffer(object):
def __init__(self, data=None):
self.handle = glGenBuffers(1)
if data is not None:
self.set_data(data)
def delete(self):
if self.handle is not None:
glDeleteBuffers(1, self.handle)
self.handle = None
def set_data(self, data):
self.size = len(data)
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, self.handle)
glBufferData(
GL_ELEMENT_ARRAY_BUFFER,
sizeof(c_uint) * self.size,
(c_uint * self.size)(*data),
GL_STATIC_DRAW)
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0)
def index(*args):
sizes = [len(x[0]) if x else None for x in args]
data = util.interleave(*filter(None, args))
unique = list(util.distinct(data))
lookup = dict((x, i) for i, x in enumerate(unique))
indices = [lookup[x] for x in data]
vertex_buffer = VertexBuffer(unique)
index_buffer = IndexBuffer(indices)
return index_buffer, vertex_buffer, vertex_buffer.slices(*sizes)
class Texture(object):
UNITS = [
GL_TEXTURE0, GL_TEXTURE1, GL_TEXTURE2, GL_TEXTURE3,
GL_TEXTURE4, GL_TEXTURE5, GL_TEXTURE6, GL_TEXTURE7,
GL_TEXTURE8, GL_TEXTURE9, GL_TEXTURE10, GL_TEXTURE11,
GL_TEXTURE12, GL_TEXTURE13, GL_TEXTURE14, GL_TEXTURE15,
GL_TEXTURE16, GL_TEXTURE17, GL_TEXTURE18, GL_TEXTURE19,
GL_TEXTURE20, GL_TEXTURE21, GL_TEXTURE22, GL_TEXTURE23,
GL_TEXTURE24, GL_TEXTURE25, GL_TEXTURE26, GL_TEXTURE27,
GL_TEXTURE28, GL_TEXTURE29, GL_TEXTURE30, GL_TEXTURE31,
]
def __init__(
self, unit, im,
min_filter=GL_LINEAR, mag_filter=GL_LINEAR,
wrap_s=GL_REPEAT, wrap_t=GL_REPEAT,
mipmap=False):
self.unit = unit
if isinstance(im, basestring):
im = Image.open(im)
im = im.convert('RGBA').transpose(Image.FLIP_TOP_BOTTOM)
self.size = width, height = im.size
data = im.tobytes()
self.handle = glGenTextures(1)
self.bind()
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, min_filter)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, mag_filter)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, wrap_s)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, wrap_t)
glTexImage2D(
GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA,
GL_UNSIGNED_BYTE, data)
if mipmap:
glGenerateMipmap(GL_TEXTURE_2D)
def delete(self):
if self.handle is not None:
glDeleteTextures(1, self.handle)
self.handle = None
def get_uniform_value(self):
return self.unit
def bind(self):
glActiveTexture(Texture.UNITS[self.unit])
glBindTexture(GL_TEXTURE_2D, self.handle)
class Attribute(object):
def __init__(self, location, name, size, data_type):
self.location = location
self.name = name
self.size = size
self.data_type = data_type
def bind(self, value):
glEnableVertexAttribArray(self.location)
cache = App.instance.current_window.cache
if not cache.set(self.location, value):
return
value.bind(self.location)
def unbind(self):
glDisableVertexAttribArray(self.location)
def __repr__(self):
return 'Attribute%s' % str(
(self.location, self.name, self.size, self.data_type))
class Uniform(object):
FLOATS = set([GL_FLOAT, GL_FLOAT_VEC2, GL_FLOAT_VEC3, GL_FLOAT_VEC4])
INTS = set([GL_INT, GL_INT_VEC2, GL_INT_VEC3, GL_INT_VEC4])
BOOLS = set([GL_BOOL, GL_BOOL_VEC2, GL_BOOL_VEC3, GL_BOOL_VEC4])
MATS = set([GL_FLOAT_MAT2, GL_FLOAT_MAT3, GL_FLOAT_MAT4])
SAMPLERS = set([GL_SAMPLER_2D, GL_SAMPLER_CUBE])
def __init__(self, location, name, size, data_type):
self.location = location
self.name = name
self.size = size
self.data_type = data_type
def bind(self, value):
if self.size > 1:
self.bind_array(value)
return
if hasattr(value, 'get_uniform_value'):
value = value.get_uniform_value()
try:
count = len(value)
except Exception:
value = [value]
count = 1
cache = App.instance.current_window.current_program.cache
if not cache.set(self.location, value):
return
if self.data_type in Uniform.MATS:
funcs = {
4: glUniformMatrix2fv,
9: glUniformMatrix3fv,
16: glUniformMatrix4fv,
}
funcs[count](self.location, 1, False, (c_float * count)(*value))
elif self.data_type in Uniform.FLOATS:
funcs = {
1: glUniform1f,
2: glUniform2f,
3: glUniform3f,
4: glUniform4f,
}
funcs[count](self.location, *value)
elif self.data_type in Uniform.INTS or self.data_type in Uniform.BOOLS:
funcs = {
1: glUniform1i,
2: glUniform2i,
3: glUniform3i,
4: glUniform4i,
}
funcs[count](self.location, *value)
elif self.data_type in Uniform.SAMPLERS:
glUniform1i(self.location, *value)
def bind_array(self, value):
first = value[0]
size = min(len(value), self.size)
value = value[:size]
try:
count = len(first)
value = util.flatten(value)
except Exception:
count = 1
if len(value) != size * count:
raise Exception
value = (c_float * len(value))(*value)
if self.data_type in Uniform.FLOATS:
funcs = {
1: glUniform1fv,
2: glUniform2fv,
3: glUniform3fv,
4: glUniform4fv,
}
funcs[count](self.location, size, value)
elif self.data_type in Uniform.INTS or self.data_type in Uniform.BOOLS:
funcs = {
1: glUniform1iv,
2: glUniform2iv,
3: glUniform3iv,
4: glUniform4iv,
}
funcs[count](self.location, size, value)
elif self.data_type in Uniform.SAMPLERS:
glUniform1iv(self.location, size, value)
def __repr__(self):
return 'Uniform%s' % str(
(self.location, self.name, self.size, self.data_type))
class Program(object):
def __init__(self, vs, fs):
if not isinstance(vs, Shader):
vs = VertexShader(vs)
if not isinstance(fs, Shader):
fs = FragmentShader(fs)
self.vs = vs
self.fs = fs
self.handle = glCreateProgram()
glAttachShader(self.handle, self.vs.handle)
glAttachShader(self.handle, self.fs.handle)
glLinkProgram(self.handle)
log = glGetProgramInfoLog(self.handle)
if log:
raise Exception(log)
self.cache = Cache()
def delete(self):
if self.handle is not None:
glDeleteProgram(self.handle)
self.handle = None
self.vs.delete()
self.fs.delete()
def use(self):
glUseProgram(self.handle)
App.instance.current_window.set_current_program(self)
def set_defaults(self, context):
pass
def get_attributes(self):
result = []
count = glGetProgramiv(self.handle, GL_ACTIVE_ATTRIBUTES)
name = create_string_buffer(256)
size = c_int()
data_type = c_int()
for index in xrange(count):
glGetActiveAttrib(
self.handle, index, 256, None,
byref(size), byref(data_type), name)
location = glGetAttribLocation(self.handle, name.value)
attribute = Attribute(
location, name.value, size.value, data_type.value)
result.append(attribute)
return result
def get_uniforms(self):
result = []
count = glGetProgramiv(self.handle, GL_ACTIVE_UNIFORMS)
for index in xrange(count):
name, size, data_type = glGetActiveUniform(self.handle, index)
if name.endswith('[0]'):
name = name[:-3]
location = glGetUniformLocation(self.handle, name)
uniform = Uniform(location, name, size, data_type)
result.append(uniform)
return result
class Context(object):
def __init__(self, program):
self._program = program
self._attributes = dict((x.name, x) for x in program.get_attributes())
self._uniforms = dict((x.name, x) for x in program.get_uniforms())
self._attribute_values = {}
self._uniform_values = {}
self._program.set_defaults(self)
def delete(self):
self._program.delete()
def __setattr__(self, name, value):
if name.startswith('_'):
super(Context, self).__setattr__(name, value)
elif name in self._attributes:
self._attribute_values[name] = value
elif name in self._uniforms:
self._uniform_values[name] = value
else:
super(Context, self).__setattr__(name, value)
def __getattr__(self, name):
if name.startswith('_'):
super(Context, self).__getattr__(name)
elif name in self._attributes:
return self._attribute_values[name]
elif name in self._uniforms:
return self._uniform_values[name]
else:
super(Context, self).__getattr__(name)
def draw(self, mode=GL_TRIANGLES, index_buffer=None):
self._program.use()
for name, value in self._uniform_values.iteritems():
if value is not None:
self._uniforms[name].bind(value)
for name, value in self._attribute_values.iteritems():
if value is not None:
self._attributes[name].bind(value)
if index_buffer is None:
vertex_count = min(x.vertex_count for x in
self._attribute_values.itervalues() if x is not None)
glDrawArrays(mode, 0, vertex_count)
else:
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, index_buffer.handle)
glDrawElements(mode, index_buffer.size, GL_UNSIGNED_INT, c_void_p())
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0)
for name, value in self._attribute_values.iteritems():
if value is not None:
self._attributes[name].unbind()
class Scene(object):
def __init__(self, window):
self.window = window
self.listeners = []
self.call('setup')
def __del__(self):
self.call('teardown')
def call(self, name, *args, **kwargs):
for listener in self.listeners + [self]:
if hasattr(listener, name):
if getattr(listener, name)(*args, **kwargs):
return
# listener functions
def setup(self):
pass
def enter(self):
pass
def update(self, t, dt):
pass
def draw(self):
pass
def exit(self):
pass
def teardown(self):
pass
def on_size(self, width, height):
pass
def on_cursor_pos(self, x, y):
pass
def on_mouse_button(self, button, action, mods):
pass
def on_key(self, key, scancode, action, mods):
pass
def on_char(self, codepoint):
pass
class Worker(object):
def __init__(self):
self.handle = glfw.create_window(1, 1, 'Worker', None, None)
if not self.handle:
raise Exception
def use(self):
glfw.make_context_current(self.handle)
def destroy(self):
glfw.destroy_window(self.handle)
def start(self):
async(self.thread_main)
def thread_main(self):
self.use()
try:
self.run()
finally:
self.destroy()
def run(self):
pass
class Window(object):
def __init__(
self, size=(800, 600), title='Python Graphics', visible=True,
share=None, full_screen=False):
self.app = App.instance
if full_screen:
monitor = glfw.get_primary_monitor()
size = glfw.get_video_mode(monitor)[0]
else:
monitor = None
self.size = width, height = size
self.aspect = float(width) / height
glfw.window_hint(glfw.VISIBLE, visible)
share = share and share.handle
self.handle = glfw.create_window(width, height, title, monitor, share)
if not self.handle:
raise Exception
self.app.add_window(self)
self.cache = Cache()
self.current_program = None
self.use()
self.framebuffer_size = glfw.get_framebuffer_size(self.handle)
self.configure()
self.exclusive = False
self.listeners = []
self.scene_stack = []
self.set_callbacks()
self.call('setup')
@property
def current_scene(self):
return self.scene_stack[-1] if self.scene_stack else None
def push_scene(self, scene):
if scene.window != self:
raise Exception
self.scene_stack.append(scene)
scene.call('enter')
def pop_scene(self):
scene = self.current_scene
scene.call('exit')
self.scene_stack.pop()
def set_scene(self, scene):
if self.current_scene:
self.pop_scene()
self.push_scene(scene)
@property
def t(self):
return self.app.ticker.t
@property
def dt(self):
return self.app.ticker.dt
@property
def fps(self):
return self.app.ticker.fps
@property
def ticks(self):
return self.app.ticker.ticks
def configure(self):
glEnable(GL_DEPTH_TEST)
glEnable(GL_CULL_FACE)
# glPolygonMode(GL_FRONT_AND_BACK, GL_LINE)
def close(self):
glfw.set_window_should_close(self.handle, 1)
def set_exclusive(self, exclusive=True):
if exclusive == self.exclusive:
return
self.exclusive = exclusive
if exclusive:
glfw.set_input_mode(self.handle, glfw.CURSOR, glfw.CURSOR_DISABLED)
else:
glfw.set_input_mode(self.handle, glfw.CURSOR, glfw.CURSOR_NORMAL)
def set_current_program(self, program):
self.current_program = program
def use(self):
glfw.make_context_current(self.handle)
self.app.set_current_window(self)
def clear(self):
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
def clear_color_buffer(self):
glClear(GL_COLOR_BUFFER_BIT)
def clear_depth_buffer(self):
glClear(GL_DEPTH_BUFFER_BIT)
def set_clear_color(self, r, g, b, a=1.0):
glClearColor(r, g, b, a)
def tick(self):
self.use()
if glfw.window_should_close(self.handle):
self.call('teardown')
self.app.remove_window(self)
glfw.destroy_window(self.handle)
return
self.call('update', self.t, self.dt)
self.redraw()
def redraw(self):
self.call('draw')
glfw.swap_buffers(self.handle)
def save_image(self, path):
width, height = self.size
data = (c_ubyte * (width * height * 3))()
glReadBuffer(GL_BACK)
glReadPixels(0, 0, width, height, GL_RGB, GL_UNSIGNED_BYTE, data)
im = Image.frombytes('RGB', (width, height), data)
im = im.transpose(Image.FLIP_TOP_BOTTOM)
im.save(path)
def screenshot(self):
counter = 0
while True:
path = 'pg%04d.png' % counter
if not os.path.exists(path):
self.save_image(path)
break
counter += 1
def set_callbacks(self):
glfw.set_window_size_callback(self.handle, self._on_size)
glfw.set_framebuffer_size_callback(self.handle, self._on_framebuffer_size)
glfw.set_cursor_pos_callback(self.handle, self._on_cursor_pos)
glfw.set_mouse_button_callback(self.handle, self._on_mouse_button)
glfw.set_key_callback(self.handle, self._on_key)
glfw.set_char_callback(self.handle, self._on_char)
def call(self, name, *args, **kwargs):
for listener in self.listeners + [self]:
if hasattr(listener, name):
if getattr(listener, name)(*args, **kwargs):
return
if name in ['setup', 'teardown']:
return
scene = self.current_scene
if scene is not None:
scene.call(name, *args, **kwargs)
def _on_size(self, window, width, height):
self.size = (width, height)
self.aspect = float(width) / height
self.call('on_size', width, height)
def _on_framebuffer_size(self, window, width, height):
self.framebuffer_size = (width, height)
self.call('on_framebuffer_size', width, height)
def _on_cursor_pos(self, window, x, y):
self.call('on_cursor_pos', x, y)
def _on_mouse_button(self, window, button, action, mods):
self.call('on_mouse_button', button, action, mods)
def _on_key(self, window, key, scancode, action, mods):
self.call('on_key', key, scancode, action, mods)
if action == glfw.PRESS and key == glfw.KEY_F12:
self.screenshot()
def _on_char(self, window, codepoint):
self.call('on_char', codepoint)
# listener functions
def setup(self):
pass
def update(self, t, dt):
pass
def draw(self):
pass
def teardown(self):
pass
def on_size(self, width, height):
pass
def on_framebuffer_size(self, width, height):
pass
def on_cursor_pos(self, x, y):
pass
def on_mouse_button(self, button, action, mods):
pass
def on_key(self, key, scancode, action, mods):
pass
def on_char(self, codepoint):
pass
class Ticker(object):
def __init__(self):
self.start_time = time.time()
self.last_time = self.start_time
self.t = 0
self.dt = 0
self.ticks = 0
self.fps_time = self.start_time
self.fps_ticks = 0
self.fps = 0
def tick(self):
now = time.time()
self.t = now - self.start_time
self.dt = now - self.last_time
self.last_time = now
self.ticks += 1
self.fps_ticks += 1
if now - self.fps_time >= 1:
self.fps = self.fps_ticks / (now - self.fps_time)
self.fps_ticks = 0
self.fps_time = now
class App(object):
instance = None
def __init__(self):
if not glfw.init():
raise Exception
App.instance = self
self.windows = []
self.current_window = None
self.queue = Queue.Queue()
self.ticker = Ticker()
def add_window(self, window):
self.windows.append(window)
def remove_window(self, window):
self.windows.remove(window)
def set_current_window(self, window):
self.current_window = window
def call_after(self, func, *args, **kwargs):
self.queue.put((func, args, kwargs))
def process_queue(self):
while self.queue.qsize():
func, args, kwargs = self.queue.get()
func(*args, **kwargs)
def run(self):
while self.windows:
self.tick()
glfw.terminate()
def tick(self):
self.ticker.tick()
poll_events()
self.process_queue()
for window in list(self.windows):
window.tick()
def poll_events():
glfw.poll_events()
def call_after(func, *args, **kwargs):
App.instance.call_after(func, *args, **kwargs)
def async(func, *args, **kwargs):
thread = threading.Thread(target=func, args=args, kwargs=kwargs)
thread.setDaemon(True)
thread.start()
def run(cls, *args, **kwargs):
app = App()
if issubclass(cls, Window):
window = cls(*args, **kwargs)
else:
window = Window()
if issubclass(cls, Scene):
scene = cls(window, *args, **kwargs)
window.set_scene(scene)
app.run()
|
fogleman/pg | pg/glfw.py | _glfw_get_version | python | def _glfw_get_version(filename):
'''
Queries and returns the library version tuple or None by using a
subprocess.
'''
version_checker_source = """
import sys
import ctypes
def get_version(library_handle):
'''
Queries and returns the library version tuple or None.
'''
major_value = ctypes.c_int(0)
major = ctypes.pointer(major_value)
minor_value = ctypes.c_int(0)
minor = ctypes.pointer(minor_value)
rev_value = ctypes.c_int(0)
rev = ctypes.pointer(rev_value)
if hasattr(library_handle, 'glfwGetVersion'):
library_handle.glfwGetVersion(major, minor, rev)
version = (major_value.value,
minor_value.value,
rev_value.value)
return version
else:
return None
try:
input_func = raw_input
except NameError:
input_func = input
filename = input_func().strip()
try:
library_handle = ctypes.CDLL(filename)
except OSError:
pass
else:
version = get_version(library_handle)
print(version)
"""
args = [sys.executable, '-c', textwrap.dedent(version_checker_source)]
process = subprocess.Popen(args, universal_newlines=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
out = process.communicate(_to_char_p(filename))[0]
out = out.strip()
if out:
return eval(out)
else:
return None | Queries and returns the library version tuple or None by using a
subprocess. | train | https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/glfw.py#L84-L135 | null | '''
Python bindings for GLFW.
'''
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
__author__ = 'Florian Rhiem (florian.rhiem@gmail.com)'
__copyright__ = 'Copyright (c) 2013 Florian Rhiem'
__license__ = 'MIT'
__version__ = '1.0.1'
import ctypes
import os
import glob
import sys
import subprocess
import textwrap
# Python 3 compatibility:
try:
_getcwd = os.getcwdu
except AttributeError:
_getcwd = os.getcwd
if sys.version_info.major > 2:
_to_char_p = lambda s: s.encode('utf-8')
else:
_to_char_p = lambda s: s
def _find_library_candidates(library_names,
library_file_extensions,
library_search_paths):
'''
Finds and returns filenames which might be the library you are looking for.
'''
candidates = set()
for library_name in library_names:
for search_path in library_search_paths:
glob_query = os.path.join(search_path, '*'+library_name+'*')
for filename in glob.iglob(glob_query):
filename = os.path.realpath(filename)
if filename in candidates:
continue
basename = os.path.basename(filename)
if basename.startswith('lib'+library_name):
basename_end = basename[len('lib'+library_name):]
elif basename.startswith(library_name):
basename_end = basename[len(library_name):]
else:
continue
for file_extension in library_file_extensions:
if basename_end.startswith(file_extension):
if basename_end[len(file_extension):][:1] in ('', '.'):
candidates.add(filename)
if basename_end.endswith(file_extension):
basename_middle = basename_end[:-len(file_extension)]
if all(c in '0123456789.' for c in basename_middle):
candidates.add(filename)
return candidates
def _load_library(library_names, library_file_extensions,
library_search_paths, version_check_callback):
'''
Finds, loads and returns the most recent version of the library.
'''
candidates = _find_library_candidates(library_names,
library_file_extensions,
library_search_paths)
library_versions = []
for filename in candidates:
version = version_check_callback(filename)
if version is not None and version >= (3, 0, 0):
library_versions.append((version, filename))
if not library_versions:
return None
library_versions.sort()
return ctypes.CDLL(library_versions[-1][1])
_glfw = _load_library(['glfw', 'glfw3'], ['.so', '.dylib', '.dll'],
['', '/usr/lib', '/usr/local/lib'], _glfw_get_version)
if _glfw is None:
raise ImportError("Failed to load GLFW3 shared library.")
_callback_repositories = []
class _GLFWwindow(ctypes.Structure):
'''
Wrapper for:
typedef struct GLFWwindow GLFWwindow;
'''
_fields_ = [("dummy", ctypes.c_int)]
class _GLFWmonitor(ctypes.Structure):
'''
Wrapper for:
typedef struct GLFWmonitor GLFWmonitor;
'''
_fields_ = [("dummy", ctypes.c_int)]
class _GLFWvidmode(ctypes.Structure):
'''
Wrapper for:
typedef struct GLFWvidmode GLFWvidmode;
'''
_fields_ = [("width", ctypes.c_int),
("height", ctypes.c_int),
("red_bits", ctypes.c_int),
("green_bits", ctypes.c_int),
("blue_bits", ctypes.c_int),
("refresh_rate", ctypes.c_uint)]
def __init__(self):
ctypes.Structure.__init__(self)
self.width = 0
self.height = 0
self.red_bits = 0
self.green_bits = 0
self.blue_bits = 0
self.refresh_rate = 0
def wrap(self, video_mode):
'''
Wraps a nested python sequence.
'''
size, bits, self.refresh_rate = video_mode
self.width, self.height = size
self.red_bits, self.green_bits, self.blue_bits = bits
def unwrap(self):
'''
Returns a nested python sequence.
'''
size = self.width, self.height
bits = self.red_bits, self.green_bits, self.blue_bits
return size, bits, self.refresh_rate
class _GLFWgammaramp(ctypes.Structure):
'''
Wrapper for:
typedef struct GLFWgammaramp GLFWgammaramp;
'''
_fields_ = [("red", ctypes.POINTER(ctypes.c_ushort)),
("green", ctypes.POINTER(ctypes.c_ushort)),
("blue", ctypes.POINTER(ctypes.c_ushort)),
("size", ctypes.c_uint)]
def __init__(self):
ctypes.Structure.__init__(self)
self.red = None
self.red_array = None
self.green = None
self.green_array = None
self.blue = None
self.blue_array = None
self.size = 0
def wrap(self, gammaramp):
'''
Wraps a nested python sequence.
'''
red, green, blue = gammaramp
size = min(len(red), len(green), len(blue))
array_type = ctypes.c_ushort*size
self.size = ctypes.c_uint(size)
self.red_array = array_type()
self.green_array = array_type()
self.blue_array = array_type()
for i in range(self.size):
self.red_array[i] = int(red[i]*65535)
self.green_array[i] = int(green[i]*65535)
self.blue_array[i] = int(blue[i]*65535)
pointer_type = ctypes.POINTER(ctypes.c_ushort)
self.red = ctypes.cast(self.red_array, pointer_type)
self.green = ctypes.cast(self.green_array, pointer_type)
self.blue = ctypes.cast(self.blue_array, pointer_type)
def unwrap(self):
'''
Returns a nested python sequence.
'''
red = [self.red[i]/65535.0 for i in range(self.size)]
green = [self.green[i]/65535.0 for i in range(self.size)]
blue = [self.blue[i]/65535.0 for i in range(self.size)]
return red, green, blue
VERSION_MAJOR = 3
VERSION_MINOR = 0
VERSION_REVISION = 3
RELEASE = 0
PRESS = 1
REPEAT = 2
KEY_UNKNOWN = -1
KEY_SPACE = 32
KEY_APOSTROPHE = 39
KEY_COMMA = 44
KEY_MINUS = 45
KEY_PERIOD = 46
KEY_SLASH = 47
KEY_0 = 48
KEY_1 = 49
KEY_2 = 50
KEY_3 = 51
KEY_4 = 52
KEY_5 = 53
KEY_6 = 54
KEY_7 = 55
KEY_8 = 56
KEY_9 = 57
KEY_SEMICOLON = 59
KEY_EQUAL = 61
KEY_A = 65
KEY_B = 66
KEY_C = 67
KEY_D = 68
KEY_E = 69
KEY_F = 70
KEY_G = 71
KEY_H = 72
KEY_I = 73
KEY_J = 74
KEY_K = 75
KEY_L = 76
KEY_M = 77
KEY_N = 78
KEY_O = 79
KEY_P = 80
KEY_Q = 81
KEY_R = 82
KEY_S = 83
KEY_T = 84
KEY_U = 85
KEY_V = 86
KEY_W = 87
KEY_X = 88
KEY_Y = 89
KEY_Z = 90
KEY_LEFT_BRACKET = 91
KEY_BACKSLASH = 92
KEY_RIGHT_BRACKET = 93
KEY_GRAVE_ACCENT = 96
KEY_WORLD_1 = 161
KEY_WORLD_2 = 162
KEY_ESCAPE = 256
KEY_ENTER = 257
KEY_TAB = 258
KEY_BACKSPACE = 259
KEY_INSERT = 260
KEY_DELETE = 261
KEY_RIGHT = 262
KEY_LEFT = 263
KEY_DOWN = 264
KEY_UP = 265
KEY_PAGE_UP = 266
KEY_PAGE_DOWN = 267
KEY_HOME = 268
KEY_END = 269
KEY_CAPS_LOCK = 280
KEY_SCROLL_LOCK = 281
KEY_NUM_LOCK = 282
KEY_PRINT_SCREEN = 283
KEY_PAUSE = 284
KEY_F1 = 290
KEY_F2 = 291
KEY_F3 = 292
KEY_F4 = 293
KEY_F5 = 294
KEY_F6 = 295
KEY_F7 = 296
KEY_F8 = 297
KEY_F9 = 298
KEY_F10 = 299
KEY_F11 = 300
KEY_F12 = 301
KEY_F13 = 302
KEY_F14 = 303
KEY_F15 = 304
KEY_F16 = 305
KEY_F17 = 306
KEY_F18 = 307
KEY_F19 = 308
KEY_F20 = 309
KEY_F21 = 310
KEY_F22 = 311
KEY_F23 = 312
KEY_F24 = 313
KEY_F25 = 314
KEY_KP_0 = 320
KEY_KP_1 = 321
KEY_KP_2 = 322
KEY_KP_3 = 323
KEY_KP_4 = 324
KEY_KP_5 = 325
KEY_KP_6 = 326
KEY_KP_7 = 327
KEY_KP_8 = 328
KEY_KP_9 = 329
KEY_KP_DECIMAL = 330
KEY_KP_DIVIDE = 331
KEY_KP_MULTIPLY = 332
KEY_KP_SUBTRACT = 333
KEY_KP_ADD = 334
KEY_KP_ENTER = 335
KEY_KP_EQUAL = 336
KEY_LEFT_SHIFT = 340
KEY_LEFT_CONTROL = 341
KEY_LEFT_ALT = 342
KEY_LEFT_SUPER = 343
KEY_RIGHT_SHIFT = 344
KEY_RIGHT_CONTROL = 345
KEY_RIGHT_ALT = 346
KEY_RIGHT_SUPER = 347
KEY_MENU = 348
KEY_LAST = KEY_MENU
MOD_SHIFT = 0x0001
MOD_CONTROL = 0x0002
MOD_ALT = 0x0004
MOD_SUPER = 0x0008
MOUSE_BUTTON_1 = 0
MOUSE_BUTTON_2 = 1
MOUSE_BUTTON_3 = 2
MOUSE_BUTTON_4 = 3
MOUSE_BUTTON_5 = 4
MOUSE_BUTTON_6 = 5
MOUSE_BUTTON_7 = 6
MOUSE_BUTTON_8 = 7
MOUSE_BUTTON_LAST = MOUSE_BUTTON_8
MOUSE_BUTTON_LEFT = MOUSE_BUTTON_1
MOUSE_BUTTON_RIGHT = MOUSE_BUTTON_2
MOUSE_BUTTON_MIDDLE = MOUSE_BUTTON_3
JOYSTICK_1 = 0
JOYSTICK_2 = 1
JOYSTICK_3 = 2
JOYSTICK_4 = 3
JOYSTICK_5 = 4
JOYSTICK_6 = 5
JOYSTICK_7 = 6
JOYSTICK_8 = 7
JOYSTICK_9 = 8
JOYSTICK_10 = 9
JOYSTICK_11 = 10
JOYSTICK_12 = 11
JOYSTICK_13 = 12
JOYSTICK_14 = 13
JOYSTICK_15 = 14
JOYSTICK_16 = 15
JOYSTICK_LAST = JOYSTICK_16
NOT_INITIALIZED = 0x00010001
NO_CURRENT_CONTEXT = 0x00010002
INVALID_ENUM = 0x00010003
INVALID_VALUE = 0x00010004
OUT_OF_MEMORY = 0x00010005
API_UNAVAILABLE = 0x00010006
VERSION_UNAVAILABLE = 0x00010007
PLATFORM_ERROR = 0x00010008
FORMAT_UNAVAILABLE = 0x00010009
FOCUSED = 0x00020001
ICONIFIED = 0x00020002
RESIZABLE = 0x00020003
VISIBLE = 0x00020004
DECORATED = 0x00020005
RED_BITS = 0x00021001
GREEN_BITS = 0x00021002
BLUE_BITS = 0x00021003
ALPHA_BITS = 0x00021004
DEPTH_BITS = 0x00021005
STENCIL_BITS = 0x00021006
ACCUM_RED_BITS = 0x00021007
ACCUM_GREEN_BITS = 0x00021008
ACCUM_BLUE_BITS = 0x00021009
ACCUM_ALPHA_BITS = 0x0002100A
AUX_BUFFERS = 0x0002100B
STEREO = 0x0002100C
SAMPLES = 0x0002100D
SRGB_CAPABLE = 0x0002100E
REFRESH_RATE = 0x0002100F
CLIENT_API = 0x00022001
CONTEXT_VERSION_MAJOR = 0x00022002
CONTEXT_VERSION_MINOR = 0x00022003
CONTEXT_REVISION = 0x00022004
CONTEXT_ROBUSTNESS = 0x00022005
OPENGL_FORWARD_COMPAT = 0x00022006
OPENGL_DEBUG_CONTEXT = 0x00022007
OPENGL_PROFILE = 0x00022008
OPENGL_API = 0x00030001
OPENGL_ES_API = 0x00030002
NO_ROBUSTNESS = 0
NO_RESET_NOTIFICATION = 0x00031001
LOSE_CONTEXT_ON_RESET = 0x00031002
OPENGL_ANY_PROFILE = 0
OPENGL_CORE_PROFILE = 0x00032001
OPENGL_COMPAT_PROFILE = 0x00032002
CURSOR = 0x00033001
STICKY_KEYS = 0x00033002
STICKY_MOUSE_BUTTONS = 0x00033003
CURSOR_NORMAL = 0x00034001
CURSOR_HIDDEN = 0x00034002
CURSOR_DISABLED = 0x00034003
CONNECTED = 0x00040001
DISCONNECTED = 0x00040002
_GLFWerrorfun = ctypes.CFUNCTYPE(None,
ctypes.c_int,
ctypes.c_char_p)
_GLFWwindowposfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int)
_GLFWwindowsizefun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int)
_GLFWwindowclosefun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow))
_GLFWwindowrefreshfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow))
_GLFWwindowfocusfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int)
_GLFWwindowiconifyfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int)
_GLFWframebuffersizefun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int)
_GLFWmousebuttonfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int,
ctypes.c_int)
_GLFWcursorposfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_double,
ctypes.c_double)
_GLFWcursorenterfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int)
_GLFWscrollfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_double,
ctypes.c_double)
_GLFWkeyfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int,
ctypes.c_int,
ctypes.c_int)
_GLFWcharfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int)
_GLFWmonitorfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWmonitor),
ctypes.c_int)
_glfw.glfwInit.restype = ctypes.c_int
_glfw.glfwInit.argtypes = []
def init():
'''
Initializes the GLFW library.
Wrapper for:
int glfwInit(void);
'''
cwd = _getcwd()
res = _glfw.glfwInit()
os.chdir(cwd)
return res
_glfw.glfwTerminate.restype = None
_glfw.glfwTerminate.argtypes = []
def terminate():
'''
Terminates the GLFW library.
Wrapper for:
void glfwTerminate(void);
'''
_glfw.glfwTerminate()
_glfw.glfwGetVersion.restype = None
_glfw.glfwGetVersion.argtypes = [ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_version():
'''
Retrieves the version of the GLFW library.
Wrapper for:
void glfwGetVersion(int* major, int* minor, int* rev);
'''
major_value = ctypes.c_int(0)
major = ctypes.pointer(major_value)
minor_value = ctypes.c_int(0)
minor = ctypes.pointer(minor_value)
rev_value = ctypes.c_int(0)
rev = ctypes.pointer(rev_value)
_glfw.glfwGetVersion(major, minor, rev)
return major_value.value, minor_value.value, rev_value.value
_glfw.glfwGetVersionString.restype = ctypes.c_char_p
_glfw.glfwGetVersionString.argtypes = []
def get_version_string():
'''
Returns a string describing the compile-time configuration.
Wrapper for:
const char* glfwGetVersionString(void);
'''
return _glfw.glfwGetVersionString()
_error_callback = None
_glfw.glfwSetErrorCallback.restype = _GLFWerrorfun
_glfw.glfwSetErrorCallback.argtypes = [_GLFWerrorfun]
def set_error_callback(cbfun):
'''
Sets the error callback.
Wrapper for:
GLFWerrorfun glfwSetErrorCallback(GLFWerrorfun cbfun);
'''
global _error_callback
previous_callback = _error_callback
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWerrorfun(cbfun)
_error_callback = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetErrorCallback(cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_glfw.glfwGetMonitors.restype = ctypes.POINTER(ctypes.POINTER(_GLFWmonitor))
_glfw.glfwGetMonitors.argtypes = [ctypes.POINTER(ctypes.c_int)]
def get_monitors():
'''
Returns the currently connected monitors.
Wrapper for:
GLFWmonitor** glfwGetMonitors(int* count);
'''
count_value = ctypes.c_int(0)
count = ctypes.pointer(count_value)
result = _glfw.glfwGetMonitors(count)
monitors = [result[i] for i in range(count_value.value)]
return monitors
_glfw.glfwGetPrimaryMonitor.restype = ctypes.POINTER(_GLFWmonitor)
_glfw.glfwGetPrimaryMonitor.argtypes = []
def get_primary_monitor():
'''
Returns the primary monitor.
Wrapper for:
GLFWmonitor* glfwGetPrimaryMonitor(void);
'''
return _glfw.glfwGetPrimaryMonitor()
_glfw.glfwGetMonitorPos.restype = None
_glfw.glfwGetMonitorPos.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_monitor_pos(monitor):
'''
Returns the position of the monitor's viewport on the virtual screen.
Wrapper for:
void glfwGetMonitorPos(GLFWmonitor* monitor, int* xpos, int* ypos);
'''
xpos_value = ctypes.c_int(0)
xpos = ctypes.pointer(xpos_value)
ypos_value = ctypes.c_int(0)
ypos = ctypes.pointer(ypos_value)
_glfw.glfwGetMonitorPos(monitor, xpos, ypos)
return xpos_value.value, ypos_value.value
_glfw.glfwGetMonitorPhysicalSize.restype = None
_glfw.glfwGetMonitorPhysicalSize.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_monitor_physical_size(monitor):
'''
Returns the physical size of the monitor.
Wrapper for:
void glfwGetMonitorPhysicalSize(GLFWmonitor* monitor, int* width, int* height);
'''
width_value = ctypes.c_int(0)
width = ctypes.pointer(width_value)
height_value = ctypes.c_int(0)
height = ctypes.pointer(height_value)
_glfw.glfwGetMonitorPhysicalSize(monitor, width, height)
return width_value.value, height_value.value
_glfw.glfwGetMonitorName.restype = ctypes.c_char_p
_glfw.glfwGetMonitorName.argtypes = [ctypes.POINTER(_GLFWmonitor)]
def get_monitor_name(monitor):
'''
Returns the name of the specified monitor.
Wrapper for:
const char* glfwGetMonitorName(GLFWmonitor* monitor);
'''
return _glfw.glfwGetMonitorName(monitor)
_monitor_callback = None
_glfw.glfwSetMonitorCallback.restype = _GLFWmonitorfun
_glfw.glfwSetMonitorCallback.argtypes = [_GLFWmonitorfun]
def set_monitor_callback(cbfun):
'''
Sets the monitor configuration callback.
Wrapper for:
GLFWmonitorfun glfwSetMonitorCallback(GLFWmonitorfun cbfun);
'''
global _monitor_callback
previous_callback = _monitor_callback
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWmonitorfun(cbfun)
_monitor_callback = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetMonitorCallback(cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_glfw.glfwGetVideoModes.restype = ctypes.POINTER(_GLFWvidmode)
_glfw.glfwGetVideoModes.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(ctypes.c_int)]
def get_video_modes(monitor):
'''
Returns the available video modes for the specified monitor.
Wrapper for:
const GLFWvidmode* glfwGetVideoModes(GLFWmonitor* monitor, int* count);
'''
count_value = ctypes.c_int(0)
count = ctypes.pointer(count_value)
result = _glfw.glfwGetVideoModes(monitor, count)
videomodes = [result[i].unwrap() for i in range(count_value.value)]
return videomodes
_glfw.glfwGetVideoMode.restype = ctypes.POINTER(_GLFWvidmode)
_glfw.glfwGetVideoMode.argtypes = [ctypes.POINTER(_GLFWmonitor)]
def get_video_mode(monitor):
'''
Returns the current mode of the specified monitor.
Wrapper for:
const GLFWvidmode* glfwGetVideoMode(GLFWmonitor* monitor);
'''
videomode = _glfw.glfwGetVideoMode(monitor).contents
return videomode.unwrap()
_glfw.glfwSetGamma.restype = None
_glfw.glfwSetGamma.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.c_float]
def set_gamma(monitor, gamma):
'''
Generates a gamma ramp and sets it for the specified monitor.
Wrapper for:
void glfwSetGamma(GLFWmonitor* monitor, float gamma);
'''
_glfw.glfwSetGamma(monitor, gamma)
_glfw.glfwGetGammaRamp.restype = ctypes.POINTER(_GLFWgammaramp)
_glfw.glfwGetGammaRamp.argtypes = [ctypes.POINTER(_GLFWmonitor)]
def get_gamma_ramp(monitor):
'''
Retrieves the current gamma ramp for the specified monitor.
Wrapper for:
const GLFWgammaramp* glfwGetGammaRamp(GLFWmonitor* monitor);
'''
gammaramp = _glfw.glfwGetGammaRamp(monitor).contents
return gammaramp.unwrap()
_glfw.glfwSetGammaRamp.restype = None
_glfw.glfwSetGammaRamp.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(_GLFWgammaramp)]
def set_gamma_ramp(monitor, ramp):
'''
Sets the current gamma ramp for the specified monitor.
Wrapper for:
void glfwSetGammaRamp(GLFWmonitor* monitor, const GLFWgammaramp* ramp);
'''
gammaramp = _GLFWgammaramp()
gammaramp.wrap(ramp)
_glfw.glfwSetGammaRamp(monitor, ctypes.pointer(gammaramp))
_glfw.glfwDefaultWindowHints.restype = None
_glfw.glfwDefaultWindowHints.argtypes = []
def default_window_hints():
'''
Resets all window hints to their default values.
Wrapper for:
void glfwDefaultWindowHints(void);
'''
_glfw.glfwDefaultWindowHints()
_glfw.glfwWindowHint.restype = None
_glfw.glfwWindowHint.argtypes = [ctypes.c_int,
ctypes.c_int]
def window_hint(target, hint):
'''
Sets the specified window hint to the desired value.
Wrapper for:
void glfwWindowHint(int target, int hint);
'''
_glfw.glfwWindowHint(target, hint)
_glfw.glfwCreateWindow.restype = ctypes.POINTER(_GLFWwindow)
_glfw.glfwCreateWindow.argtypes = [ctypes.c_int,
ctypes.c_int,
ctypes.c_char_p,
ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(_GLFWwindow)]
def create_window(width, height, title, monitor, share):
'''
Creates a window and its associated context.
Wrapper for:
GLFWwindow* glfwCreateWindow(int width, int height, const char* title, GLFWmonitor* monitor, GLFWwindow* share);
'''
return _glfw.glfwCreateWindow(width, height, _to_char_p(title),
monitor, share)
_glfw.glfwDestroyWindow.restype = None
_glfw.glfwDestroyWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def destroy_window(window):
'''
Destroys the specified window and its context.
Wrapper for:
void glfwDestroyWindow(GLFWwindow* window);
'''
_glfw.glfwDestroyWindow(window)
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_ulong)).contents.value
for callback_repository in _callback_repositories:
if window_addr in callback_repository:
del callback_repository[window_addr]
_glfw.glfwWindowShouldClose.restype = ctypes.c_int
_glfw.glfwWindowShouldClose.argtypes = [ctypes.POINTER(_GLFWwindow)]
def window_should_close(window):
'''
Checks the close flag of the specified window.
Wrapper for:
int glfwWindowShouldClose(GLFWwindow* window);
'''
return _glfw.glfwWindowShouldClose(window)
_glfw.glfwSetWindowShouldClose.restype = None
_glfw.glfwSetWindowShouldClose.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def set_window_should_close(window, value):
'''
Sets the close flag of the specified window.
Wrapper for:
void glfwSetWindowShouldClose(GLFWwindow* window, int value);
'''
_glfw.glfwSetWindowShouldClose(window, value)
_glfw.glfwSetWindowTitle.restype = None
_glfw.glfwSetWindowTitle.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_char_p]
def set_window_title(window, title):
'''
Sets the title of the specified window.
Wrapper for:
void glfwSetWindowTitle(GLFWwindow* window, const char* title);
'''
_glfw.glfwSetWindowTitle(window, _to_char_p(title))
_glfw.glfwGetWindowPos.restype = None
_glfw.glfwGetWindowPos.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_window_pos(window):
'''
Retrieves the position of the client area of the specified window.
Wrapper for:
void glfwGetWindowPos(GLFWwindow* window, int* xpos, int* ypos);
'''
xpos_value = ctypes.c_int(0)
xpos = ctypes.pointer(xpos_value)
ypos_value = ctypes.c_int(0)
ypos = ctypes.pointer(ypos_value)
_glfw.glfwGetWindowPos(window, xpos, ypos)
return xpos_value.value, ypos_value.value
_glfw.glfwSetWindowPos.restype = None
_glfw.glfwSetWindowPos.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int]
def set_window_pos(window, xpos, ypos):
'''
Sets the position of the client area of the specified window.
Wrapper for:
void glfwSetWindowPos(GLFWwindow* window, int xpos, int ypos);
'''
_glfw.glfwSetWindowPos(window, xpos, ypos)
_glfw.glfwGetWindowSize.restype = None
_glfw.glfwGetWindowSize.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_window_size(window):
'''
Retrieves the size of the client area of the specified window.
Wrapper for:
void glfwGetWindowSize(GLFWwindow* window, int* width, int* height);
'''
width_value = ctypes.c_int(0)
width = ctypes.pointer(width_value)
height_value = ctypes.c_int(0)
height = ctypes.pointer(height_value)
_glfw.glfwGetWindowSize(window, width, height)
return width_value.value, height_value.value
_glfw.glfwSetWindowSize.restype = None
_glfw.glfwSetWindowSize.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int]
def set_window_size(window, width, height):
'''
Sets the size of the client area of the specified window.
Wrapper for:
void glfwSetWindowSize(GLFWwindow* window, int width, int height);
'''
_glfw.glfwSetWindowSize(window, width, height)
_glfw.glfwGetFramebufferSize.restype = None
_glfw.glfwGetFramebufferSize.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_framebuffer_size(window):
'''
Retrieves the size of the framebuffer of the specified window.
Wrapper for:
void glfwGetFramebufferSize(GLFWwindow* window, int* width, int* height);
'''
width_value = ctypes.c_int(0)
width = ctypes.pointer(width_value)
height_value = ctypes.c_int(0)
height = ctypes.pointer(height_value)
_glfw.glfwGetFramebufferSize(window, width, height)
return width_value.value, height_value.value
_glfw.glfwIconifyWindow.restype = None
_glfw.glfwIconifyWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def iconify_window(window):
'''
Iconifies the specified window.
Wrapper for:
void glfwIconifyWindow(GLFWwindow* window);
'''
_glfw.glfwIconifyWindow(window)
_glfw.glfwRestoreWindow.restype = None
_glfw.glfwRestoreWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def restore_window(window):
'''
Restores the specified window.
Wrapper for:
void glfwRestoreWindow(GLFWwindow* window);
'''
_glfw.glfwRestoreWindow(window)
_glfw.glfwShowWindow.restype = None
_glfw.glfwShowWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def show_window(window):
'''
Makes the specified window visible.
Wrapper for:
void glfwShowWindow(GLFWwindow* window);
'''
_glfw.glfwShowWindow(window)
_glfw.glfwHideWindow.restype = None
_glfw.glfwHideWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def hide_window(window):
'''
Hides the specified window.
Wrapper for:
void glfwHideWindow(GLFWwindow* window);
'''
_glfw.glfwHideWindow(window)
_glfw.glfwGetWindowMonitor.restype = ctypes.POINTER(_GLFWmonitor)
_glfw.glfwGetWindowMonitor.argtypes = [ctypes.POINTER(_GLFWwindow)]
def get_window_monitor(window):
'''
Returns the monitor that the window uses for full screen mode.
Wrapper for:
GLFWmonitor* glfwGetWindowMonitor(GLFWwindow* window);
'''
return _glfw.glfwGetWindowMonitor(window)
_glfw.glfwGetWindowAttrib.restype = ctypes.c_int
_glfw.glfwGetWindowAttrib.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def get_window_attrib(window, attrib):
'''
Returns an attribute of the specified window.
Wrapper for:
int glfwGetWindowAttrib(GLFWwindow* window, int attrib);
'''
return _glfw.glfwGetWindowAttrib(window, attrib)
_glfw.glfwSetWindowUserPointer.restype = None
_glfw.glfwSetWindowUserPointer.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_void_p]
def set_window_user_pointer(window, pointer):
'''
Sets the user pointer of the specified window.
Wrapper for:
void glfwSetWindowUserPointer(GLFWwindow* window, void* pointer);
'''
_glfw.glfwSetWindowUserPointer(window, pointer)
_glfw.glfwGetWindowUserPointer.restype = ctypes.c_void_p
_glfw.glfwGetWindowUserPointer.argtypes = [ctypes.POINTER(_GLFWwindow)]
def get_window_user_pointer(window):
'''
Returns the user pointer of the specified window.
Wrapper for:
void* glfwGetWindowUserPointer(GLFWwindow* window);
'''
return _glfw.glfwGetWindowUserPointer(window)
_window_pos_callback_repository = {}
_callback_repositories.append(_window_pos_callback_repository)
_glfw.glfwSetWindowPosCallback.restype = _GLFWwindowposfun
_glfw.glfwSetWindowPosCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowposfun]
def set_window_pos_callback(window, cbfun):
'''
Sets the position callback for the specified window.
Wrapper for:
GLFWwindowposfun glfwSetWindowPosCallback(GLFWwindow* window, GLFWwindowposfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_pos_callback_repository:
previous_callback = _window_pos_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowposfun(cbfun)
_window_pos_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowPosCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_size_callback_repository = {}
_callback_repositories.append(_window_size_callback_repository)
_glfw.glfwSetWindowSizeCallback.restype = _GLFWwindowsizefun
_glfw.glfwSetWindowSizeCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowsizefun]
def set_window_size_callback(window, cbfun):
'''
Sets the size callback for the specified window.
Wrapper for:
GLFWwindowsizefun glfwSetWindowSizeCallback(GLFWwindow* window, GLFWwindowsizefun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_size_callback_repository:
previous_callback = _window_size_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowsizefun(cbfun)
_window_size_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowSizeCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_close_callback_repository = {}
_callback_repositories.append(_window_close_callback_repository)
_glfw.glfwSetWindowCloseCallback.restype = _GLFWwindowclosefun
_glfw.glfwSetWindowCloseCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowclosefun]
def set_window_close_callback(window, cbfun):
'''
Sets the close callback for the specified window.
Wrapper for:
GLFWwindowclosefun glfwSetWindowCloseCallback(GLFWwindow* window, GLFWwindowclosefun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_close_callback_repository:
previous_callback = _window_close_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowclosefun(cbfun)
_window_close_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowCloseCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_refresh_callback_repository = {}
_callback_repositories.append(_window_refresh_callback_repository)
_glfw.glfwSetWindowRefreshCallback.restype = _GLFWwindowrefreshfun
_glfw.glfwSetWindowRefreshCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowrefreshfun]
def set_window_refresh_callback(window, cbfun):
'''
Sets the refresh callback for the specified window.
Wrapper for:
GLFWwindowrefreshfun glfwSetWindowRefreshCallback(GLFWwindow* window, GLFWwindowrefreshfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_refresh_callback_repository:
previous_callback = _window_refresh_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowrefreshfun(cbfun)
_window_refresh_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowRefreshCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_focus_callback_repository = {}
_callback_repositories.append(_window_focus_callback_repository)
_glfw.glfwSetWindowFocusCallback.restype = _GLFWwindowfocusfun
_glfw.glfwSetWindowFocusCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowfocusfun]
def set_window_focus_callback(window, cbfun):
'''
Sets the focus callback for the specified window.
Wrapper for:
GLFWwindowfocusfun glfwSetWindowFocusCallback(GLFWwindow* window, GLFWwindowfocusfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_focus_callback_repository:
previous_callback = _window_focus_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowfocusfun(cbfun)
_window_focus_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowFocusCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_iconify_callback_repository = {}
_callback_repositories.append(_window_iconify_callback_repository)
_glfw.glfwSetWindowIconifyCallback.restype = _GLFWwindowiconifyfun
_glfw.glfwSetWindowIconifyCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowiconifyfun]
def set_window_iconify_callback(window, cbfun):
'''
Sets the iconify callback for the specified window.
Wrapper for:
GLFWwindowiconifyfun glfwSetWindowIconifyCallback(GLFWwindow* window, GLFWwindowiconifyfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_iconify_callback_repository:
previous_callback = _window_iconify_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowiconifyfun(cbfun)
_window_iconify_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowIconifyCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_framebuffer_size_callback_repository = {}
_callback_repositories.append(_framebuffer_size_callback_repository)
_glfw.glfwSetFramebufferSizeCallback.restype = _GLFWframebuffersizefun
_glfw.glfwSetFramebufferSizeCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWframebuffersizefun]
def set_framebuffer_size_callback(window, cbfun):
'''
Sets the framebuffer resize callback for the specified window.
Wrapper for:
GLFWframebuffersizefun glfwSetFramebufferSizeCallback(GLFWwindow* window, GLFWframebuffersizefun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _framebuffer_size_callback_repository:
previous_callback = _framebuffer_size_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWframebuffersizefun(cbfun)
_framebuffer_size_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetFramebufferSizeCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_glfw.glfwPollEvents.restype = None
_glfw.glfwPollEvents.argtypes = []
def poll_events():
'''
Processes all pending events.
Wrapper for:
void glfwPollEvents(void);
'''
_glfw.glfwPollEvents()
_glfw.glfwWaitEvents.restype = None
_glfw.glfwWaitEvents.argtypes = []
def wait_events():
'''
Waits until events are pending and processes them.
Wrapper for:
void glfwWaitEvents(void);
'''
_glfw.glfwWaitEvents()
_glfw.glfwGetInputMode.restype = ctypes.c_int
_glfw.glfwGetInputMode.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def get_input_mode(window, mode):
'''
Returns the value of an input option for the specified window.
Wrapper for:
int glfwGetInputMode(GLFWwindow* window, int mode);
'''
return _glfw.glfwGetInputMode(window, mode)
_glfw.glfwSetInputMode.restype = None
_glfw.glfwSetInputMode.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int]
def set_input_mode(window, mode, value):
'''
Sets an input option for the specified window.
@param[in] window The window whose input mode to set.
@param[in] mode One of `GLFW_CURSOR`, `GLFW_STICKY_KEYS` or
`GLFW_STICKY_MOUSE_BUTTONS`.
@param[in] value The new value of the specified input mode.
Wrapper for:
void glfwSetInputMode(GLFWwindow* window, int mode, int value);
'''
_glfw.glfwSetInputMode(window, mode, value)
_glfw.glfwGetKey.restype = ctypes.c_int
_glfw.glfwGetKey.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def get_key(window, key):
'''
Returns the last reported state of a keyboard key for the specified
window.
Wrapper for:
int glfwGetKey(GLFWwindow* window, int key);
'''
return _glfw.glfwGetKey(window, key)
_glfw.glfwGetMouseButton.restype = ctypes.c_int
_glfw.glfwGetMouseButton.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def get_mouse_button(window, button):
'''
Returns the last reported state of a mouse button for the specified
window.
Wrapper for:
int glfwGetMouseButton(GLFWwindow* window, int button);
'''
return _glfw.glfwGetMouseButton(window, button)
_glfw.glfwGetCursorPos.restype = None
_glfw.glfwGetCursorPos.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(ctypes.c_double),
ctypes.POINTER(ctypes.c_double)]
def get_cursor_pos(window):
'''
Retrieves the last reported cursor position, relative to the client
area of the window.
Wrapper for:
void glfwGetCursorPos(GLFWwindow* window, double* xpos, double* ypos);
'''
xpos_value = ctypes.c_double(0.0)
xpos = ctypes.pointer(xpos_value)
ypos_value = ctypes.c_double(0.0)
ypos = ctypes.pointer(ypos_value)
_glfw.glfwGetCursorPos(window, xpos, ypos)
return xpos_value.value, ypos_value.value
_glfw.glfwSetCursorPos.restype = None
_glfw.glfwSetCursorPos.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_double,
ctypes.c_double]
def set_cursor_pos(window, xpos, ypos):
'''
Sets the position of the cursor, relative to the client area of the window.
Wrapper for:
void glfwSetCursorPos(GLFWwindow* window, double xpos, double ypos);
'''
_glfw.glfwSetCursorPos(window, xpos, ypos)
_key_callback_repository = {}
_callback_repositories.append(_key_callback_repository)
_glfw.glfwSetKeyCallback.restype = _GLFWkeyfun
_glfw.glfwSetKeyCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWkeyfun]
def set_key_callback(window, cbfun):
'''
Sets the key callback.
Wrapper for:
GLFWkeyfun glfwSetKeyCallback(GLFWwindow* window, GLFWkeyfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _key_callback_repository:
previous_callback = _key_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWkeyfun(cbfun)
_key_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetKeyCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_char_callback_repository = {}
_callback_repositories.append(_char_callback_repository)
_glfw.glfwSetCharCallback.restype = _GLFWcharfun
_glfw.glfwSetCharCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWcharfun]
def set_char_callback(window, cbfun):
'''
Sets the Unicode character callback.
Wrapper for:
GLFWcharfun glfwSetCharCallback(GLFWwindow* window, GLFWcharfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _char_callback_repository:
previous_callback = _char_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWcharfun(cbfun)
_char_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetCharCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_mouse_button_callback_repository = {}
_callback_repositories.append(_mouse_button_callback_repository)
_glfw.glfwSetMouseButtonCallback.restype = _GLFWmousebuttonfun
_glfw.glfwSetMouseButtonCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWmousebuttonfun]
def set_mouse_button_callback(window, cbfun):
'''
Sets the mouse button callback.
Wrapper for:
GLFWmousebuttonfun glfwSetMouseButtonCallback(GLFWwindow* window, GLFWmousebuttonfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _mouse_button_callback_repository:
previous_callback = _mouse_button_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWmousebuttonfun(cbfun)
_mouse_button_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetMouseButtonCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_cursor_pos_callback_repository = {}
_callback_repositories.append(_cursor_pos_callback_repository)
_glfw.glfwSetCursorPosCallback.restype = _GLFWcursorposfun
_glfw.glfwSetCursorPosCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWcursorposfun]
def set_cursor_pos_callback(window, cbfun):
'''
Sets the cursor position callback.
Wrapper for:
GLFWcursorposfun glfwSetCursorPosCallback(GLFWwindow* window, GLFWcursorposfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _cursor_pos_callback_repository:
previous_callback = _cursor_pos_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWcursorposfun(cbfun)
_cursor_pos_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetCursorPosCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_cursor_enter_callback_repository = {}
_callback_repositories.append(_cursor_enter_callback_repository)
_glfw.glfwSetCursorEnterCallback.restype = _GLFWcursorenterfun
_glfw.glfwSetCursorEnterCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWcursorenterfun]
def set_cursor_enter_callback(window, cbfun):
'''
Sets the cursor enter/exit callback.
Wrapper for:
GLFWcursorenterfun glfwSetCursorEnterCallback(GLFWwindow* window, GLFWcursorenterfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _cursor_enter_callback_repository:
previous_callback = _cursor_enter_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWcursorenterfun(cbfun)
_cursor_enter_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetCursorEnterCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_scroll_callback_repository = {}
_callback_repositories.append(_scroll_callback_repository)
_glfw.glfwSetScrollCallback.restype = _GLFWscrollfun
_glfw.glfwSetScrollCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWscrollfun]
def set_scroll_callback(window, cbfun):
'''
Sets the scroll callback.
Wrapper for:
GLFWscrollfun glfwSetScrollCallback(GLFWwindow* window, GLFWscrollfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _scroll_callback_repository:
previous_callback = _scroll_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWscrollfun(cbfun)
_scroll_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetScrollCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_glfw.glfwJoystickPresent.restype = ctypes.c_int
_glfw.glfwJoystickPresent.argtypes = [ctypes.c_int]
def joystick_present(joy):
'''
Returns whether the specified joystick is present.
Wrapper for:
int glfwJoystickPresent(int joy);
'''
return _glfw.glfwJoystickPresent(joy)
_glfw.glfwGetJoystickAxes.restype = ctypes.POINTER(ctypes.c_float)
_glfw.glfwGetJoystickAxes.argtypes = [ctypes.c_int,
ctypes.POINTER(ctypes.c_int)]
def get_joystick_axes(joy):
'''
Returns the values of all axes of the specified joystick.
Wrapper for:
const float* glfwGetJoystickAxes(int joy, int* count);
'''
count_value = ctypes.c_int(0)
count = ctypes.pointer(count_value)
result = _glfw.glfwGetJoystickAxes(joy, count)
return result, count_value.value
_glfw.glfwGetJoystickButtons.restype = ctypes.POINTER(ctypes.c_ubyte)
_glfw.glfwGetJoystickButtons.argtypes = [ctypes.c_int,
ctypes.POINTER(ctypes.c_int)]
def get_joystick_buttons(joy):
'''
Returns the state of all buttons of the specified joystick.
Wrapper for:
const unsigned char* glfwGetJoystickButtons(int joy, int* count);
'''
count_value = ctypes.c_int(0)
count = ctypes.pointer(count_value)
result = _glfw.glfwGetJoystickButtons(joy, count)
return result, count_value.value
_glfw.glfwGetJoystickName.restype = ctypes.c_char_p
_glfw.glfwGetJoystickName.argtypes = [ctypes.c_int]
def get_joystick_name(joy):
'''
Returns the name of the specified joystick.
Wrapper for:
const char* glfwGetJoystickName(int joy);
'''
return _glfw.glfwGetJoystickName(joy)
_glfw.glfwSetClipboardString.restype = None
_glfw.glfwSetClipboardString.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_char_p]
def set_clipboard_string(window, string):
'''
Sets the clipboard to the specified string.
Wrapper for:
void glfwSetClipboardString(GLFWwindow* window, const char* string);
'''
_glfw.glfwSetClipboardString(window, _to_char_p(string))
_glfw.glfwGetClipboardString.restype = ctypes.c_char_p
_glfw.glfwGetClipboardString.argtypes = [ctypes.POINTER(_GLFWwindow)]
def get_clipboard_string(window):
'''
Retrieves the contents of the clipboard as a string.
Wrapper for:
const char* glfwGetClipboardString(GLFWwindow* window);
'''
return _glfw.glfwGetClipboardString(window)
_glfw.glfwGetTime.restype = ctypes.c_double
_glfw.glfwGetTime.argtypes = []
def get_time():
'''
Returns the value of the GLFW timer.
Wrapper for:
double glfwGetTime(void);
'''
return _glfw.glfwGetTime()
_glfw.glfwSetTime.restype = None
_glfw.glfwSetTime.argtypes = [ctypes.c_double]
def set_time(time):
'''
Sets the GLFW timer.
Wrapper for:
void glfwSetTime(double time);
'''
_glfw.glfwSetTime(time)
_glfw.glfwMakeContextCurrent.restype = None
_glfw.glfwMakeContextCurrent.argtypes = [ctypes.POINTER(_GLFWwindow)]
def make_context_current(window):
'''
Makes the context of the specified window current for the calling
thread.
Wrapper for:
void glfwMakeContextCurrent(GLFWwindow* window);
'''
_glfw.glfwMakeContextCurrent(window)
_glfw.glfwGetCurrentContext.restype = ctypes.POINTER(_GLFWwindow)
_glfw.glfwGetCurrentContext.argtypes = []
def get_current_context():
'''
Returns the window whose context is current on the calling thread.
Wrapper for:
GLFWwindow* glfwGetCurrentContext(void);
'''
return _glfw.glfwGetCurrentContext()
_glfw.glfwSwapBuffers.restype = None
_glfw.glfwSwapBuffers.argtypes = [ctypes.POINTER(_GLFWwindow)]
def swap_buffers(window):
'''
Swaps the front and back buffers of the specified window.
Wrapper for:
void glfwSwapBuffers(GLFWwindow* window);
'''
_glfw.glfwSwapBuffers(window)
_glfw.glfwSwapInterval.restype = None
_glfw.glfwSwapInterval.argtypes = [ctypes.c_int]
def swap_interval(interval):
'''
Sets the swap interval for the current context.
Wrapper for:
void glfwSwapInterval(int interval);
'''
_glfw.glfwSwapInterval(interval)
_glfw.glfwExtensionSupported.restype = ctypes.c_int
_glfw.glfwExtensionSupported.argtypes = [ctypes.c_char_p]
def extension_supported(extension):
'''
Returns whether the specified extension is available.
Wrapper for:
int glfwExtensionSupported(const char* extension);
'''
return _glfw.glfwExtensionSupported(_to_char_p(extension))
_glfw.glfwGetProcAddress.restype = ctypes.c_void_p
_glfw.glfwGetProcAddress.argtypes = [ctypes.c_char_p]
def get_proc_address(procname):
'''
Returns the address of the specified function for the current
context.
Wrapper for:
GLFWglproc glfwGetProcAddress(const char* procname);
'''
return _glfw.glfwGetProcAddress(_to_char_p(procname))
|
fogleman/pg | pg/glfw.py | set_error_callback | python | def set_error_callback(cbfun):
'''
Sets the error callback.
Wrapper for:
GLFWerrorfun glfwSetErrorCallback(GLFWerrorfun cbfun);
'''
global _error_callback
previous_callback = _error_callback
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWerrorfun(cbfun)
_error_callback = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetErrorCallback(cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0] | Sets the error callback.
Wrapper for:
GLFWerrorfun glfwSetErrorCallback(GLFWerrorfun cbfun); | train | https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/glfw.py#L579-L595 | null | '''
Python bindings for GLFW.
'''
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
__author__ = 'Florian Rhiem (florian.rhiem@gmail.com)'
__copyright__ = 'Copyright (c) 2013 Florian Rhiem'
__license__ = 'MIT'
__version__ = '1.0.1'
import ctypes
import os
import glob
import sys
import subprocess
import textwrap
# Python 3 compatibility:
try:
_getcwd = os.getcwdu
except AttributeError:
_getcwd = os.getcwd
if sys.version_info.major > 2:
_to_char_p = lambda s: s.encode('utf-8')
else:
_to_char_p = lambda s: s
def _find_library_candidates(library_names,
library_file_extensions,
library_search_paths):
'''
Finds and returns filenames which might be the library you are looking for.
'''
candidates = set()
for library_name in library_names:
for search_path in library_search_paths:
glob_query = os.path.join(search_path, '*'+library_name+'*')
for filename in glob.iglob(glob_query):
filename = os.path.realpath(filename)
if filename in candidates:
continue
basename = os.path.basename(filename)
if basename.startswith('lib'+library_name):
basename_end = basename[len('lib'+library_name):]
elif basename.startswith(library_name):
basename_end = basename[len(library_name):]
else:
continue
for file_extension in library_file_extensions:
if basename_end.startswith(file_extension):
if basename_end[len(file_extension):][:1] in ('', '.'):
candidates.add(filename)
if basename_end.endswith(file_extension):
basename_middle = basename_end[:-len(file_extension)]
if all(c in '0123456789.' for c in basename_middle):
candidates.add(filename)
return candidates
def _load_library(library_names, library_file_extensions,
library_search_paths, version_check_callback):
'''
Finds, loads and returns the most recent version of the library.
'''
candidates = _find_library_candidates(library_names,
library_file_extensions,
library_search_paths)
library_versions = []
for filename in candidates:
version = version_check_callback(filename)
if version is not None and version >= (3, 0, 0):
library_versions.append((version, filename))
if not library_versions:
return None
library_versions.sort()
return ctypes.CDLL(library_versions[-1][1])
def _glfw_get_version(filename):
'''
Queries and returns the library version tuple or None by using a
subprocess.
'''
version_checker_source = """
import sys
import ctypes
def get_version(library_handle):
'''
Queries and returns the library version tuple or None.
'''
major_value = ctypes.c_int(0)
major = ctypes.pointer(major_value)
minor_value = ctypes.c_int(0)
minor = ctypes.pointer(minor_value)
rev_value = ctypes.c_int(0)
rev = ctypes.pointer(rev_value)
if hasattr(library_handle, 'glfwGetVersion'):
library_handle.glfwGetVersion(major, minor, rev)
version = (major_value.value,
minor_value.value,
rev_value.value)
return version
else:
return None
try:
input_func = raw_input
except NameError:
input_func = input
filename = input_func().strip()
try:
library_handle = ctypes.CDLL(filename)
except OSError:
pass
else:
version = get_version(library_handle)
print(version)
"""
args = [sys.executable, '-c', textwrap.dedent(version_checker_source)]
process = subprocess.Popen(args, universal_newlines=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
out = process.communicate(_to_char_p(filename))[0]
out = out.strip()
if out:
return eval(out)
else:
return None
_glfw = _load_library(['glfw', 'glfw3'], ['.so', '.dylib', '.dll'],
['', '/usr/lib', '/usr/local/lib'], _glfw_get_version)
if _glfw is None:
raise ImportError("Failed to load GLFW3 shared library.")
_callback_repositories = []
class _GLFWwindow(ctypes.Structure):
'''
Wrapper for:
typedef struct GLFWwindow GLFWwindow;
'''
_fields_ = [("dummy", ctypes.c_int)]
class _GLFWmonitor(ctypes.Structure):
'''
Wrapper for:
typedef struct GLFWmonitor GLFWmonitor;
'''
_fields_ = [("dummy", ctypes.c_int)]
class _GLFWvidmode(ctypes.Structure):
'''
Wrapper for:
typedef struct GLFWvidmode GLFWvidmode;
'''
_fields_ = [("width", ctypes.c_int),
("height", ctypes.c_int),
("red_bits", ctypes.c_int),
("green_bits", ctypes.c_int),
("blue_bits", ctypes.c_int),
("refresh_rate", ctypes.c_uint)]
def __init__(self):
ctypes.Structure.__init__(self)
self.width = 0
self.height = 0
self.red_bits = 0
self.green_bits = 0
self.blue_bits = 0
self.refresh_rate = 0
def wrap(self, video_mode):
'''
Wraps a nested python sequence.
'''
size, bits, self.refresh_rate = video_mode
self.width, self.height = size
self.red_bits, self.green_bits, self.blue_bits = bits
def unwrap(self):
'''
Returns a nested python sequence.
'''
size = self.width, self.height
bits = self.red_bits, self.green_bits, self.blue_bits
return size, bits, self.refresh_rate
class _GLFWgammaramp(ctypes.Structure):
'''
Wrapper for:
typedef struct GLFWgammaramp GLFWgammaramp;
'''
_fields_ = [("red", ctypes.POINTER(ctypes.c_ushort)),
("green", ctypes.POINTER(ctypes.c_ushort)),
("blue", ctypes.POINTER(ctypes.c_ushort)),
("size", ctypes.c_uint)]
def __init__(self):
ctypes.Structure.__init__(self)
self.red = None
self.red_array = None
self.green = None
self.green_array = None
self.blue = None
self.blue_array = None
self.size = 0
def wrap(self, gammaramp):
'''
Wraps a nested python sequence.
'''
red, green, blue = gammaramp
size = min(len(red), len(green), len(blue))
array_type = ctypes.c_ushort*size
self.size = ctypes.c_uint(size)
self.red_array = array_type()
self.green_array = array_type()
self.blue_array = array_type()
for i in range(self.size):
self.red_array[i] = int(red[i]*65535)
self.green_array[i] = int(green[i]*65535)
self.blue_array[i] = int(blue[i]*65535)
pointer_type = ctypes.POINTER(ctypes.c_ushort)
self.red = ctypes.cast(self.red_array, pointer_type)
self.green = ctypes.cast(self.green_array, pointer_type)
self.blue = ctypes.cast(self.blue_array, pointer_type)
def unwrap(self):
'''
Returns a nested python sequence.
'''
red = [self.red[i]/65535.0 for i in range(self.size)]
green = [self.green[i]/65535.0 for i in range(self.size)]
blue = [self.blue[i]/65535.0 for i in range(self.size)]
return red, green, blue
VERSION_MAJOR = 3
VERSION_MINOR = 0
VERSION_REVISION = 3
RELEASE = 0
PRESS = 1
REPEAT = 2
KEY_UNKNOWN = -1
KEY_SPACE = 32
KEY_APOSTROPHE = 39
KEY_COMMA = 44
KEY_MINUS = 45
KEY_PERIOD = 46
KEY_SLASH = 47
KEY_0 = 48
KEY_1 = 49
KEY_2 = 50
KEY_3 = 51
KEY_4 = 52
KEY_5 = 53
KEY_6 = 54
KEY_7 = 55
KEY_8 = 56
KEY_9 = 57
KEY_SEMICOLON = 59
KEY_EQUAL = 61
KEY_A = 65
KEY_B = 66
KEY_C = 67
KEY_D = 68
KEY_E = 69
KEY_F = 70
KEY_G = 71
KEY_H = 72
KEY_I = 73
KEY_J = 74
KEY_K = 75
KEY_L = 76
KEY_M = 77
KEY_N = 78
KEY_O = 79
KEY_P = 80
KEY_Q = 81
KEY_R = 82
KEY_S = 83
KEY_T = 84
KEY_U = 85
KEY_V = 86
KEY_W = 87
KEY_X = 88
KEY_Y = 89
KEY_Z = 90
KEY_LEFT_BRACKET = 91
KEY_BACKSLASH = 92
KEY_RIGHT_BRACKET = 93
KEY_GRAVE_ACCENT = 96
KEY_WORLD_1 = 161
KEY_WORLD_2 = 162
KEY_ESCAPE = 256
KEY_ENTER = 257
KEY_TAB = 258
KEY_BACKSPACE = 259
KEY_INSERT = 260
KEY_DELETE = 261
KEY_RIGHT = 262
KEY_LEFT = 263
KEY_DOWN = 264
KEY_UP = 265
KEY_PAGE_UP = 266
KEY_PAGE_DOWN = 267
KEY_HOME = 268
KEY_END = 269
KEY_CAPS_LOCK = 280
KEY_SCROLL_LOCK = 281
KEY_NUM_LOCK = 282
KEY_PRINT_SCREEN = 283
KEY_PAUSE = 284
KEY_F1 = 290
KEY_F2 = 291
KEY_F3 = 292
KEY_F4 = 293
KEY_F5 = 294
KEY_F6 = 295
KEY_F7 = 296
KEY_F8 = 297
KEY_F9 = 298
KEY_F10 = 299
KEY_F11 = 300
KEY_F12 = 301
KEY_F13 = 302
KEY_F14 = 303
KEY_F15 = 304
KEY_F16 = 305
KEY_F17 = 306
KEY_F18 = 307
KEY_F19 = 308
KEY_F20 = 309
KEY_F21 = 310
KEY_F22 = 311
KEY_F23 = 312
KEY_F24 = 313
KEY_F25 = 314
KEY_KP_0 = 320
KEY_KP_1 = 321
KEY_KP_2 = 322
KEY_KP_3 = 323
KEY_KP_4 = 324
KEY_KP_5 = 325
KEY_KP_6 = 326
KEY_KP_7 = 327
KEY_KP_8 = 328
KEY_KP_9 = 329
KEY_KP_DECIMAL = 330
KEY_KP_DIVIDE = 331
KEY_KP_MULTIPLY = 332
KEY_KP_SUBTRACT = 333
KEY_KP_ADD = 334
KEY_KP_ENTER = 335
KEY_KP_EQUAL = 336
KEY_LEFT_SHIFT = 340
KEY_LEFT_CONTROL = 341
KEY_LEFT_ALT = 342
KEY_LEFT_SUPER = 343
KEY_RIGHT_SHIFT = 344
KEY_RIGHT_CONTROL = 345
KEY_RIGHT_ALT = 346
KEY_RIGHT_SUPER = 347
KEY_MENU = 348
KEY_LAST = KEY_MENU
MOD_SHIFT = 0x0001
MOD_CONTROL = 0x0002
MOD_ALT = 0x0004
MOD_SUPER = 0x0008
MOUSE_BUTTON_1 = 0
MOUSE_BUTTON_2 = 1
MOUSE_BUTTON_3 = 2
MOUSE_BUTTON_4 = 3
MOUSE_BUTTON_5 = 4
MOUSE_BUTTON_6 = 5
MOUSE_BUTTON_7 = 6
MOUSE_BUTTON_8 = 7
MOUSE_BUTTON_LAST = MOUSE_BUTTON_8
MOUSE_BUTTON_LEFT = MOUSE_BUTTON_1
MOUSE_BUTTON_RIGHT = MOUSE_BUTTON_2
MOUSE_BUTTON_MIDDLE = MOUSE_BUTTON_3
JOYSTICK_1 = 0
JOYSTICK_2 = 1
JOYSTICK_3 = 2
JOYSTICK_4 = 3
JOYSTICK_5 = 4
JOYSTICK_6 = 5
JOYSTICK_7 = 6
JOYSTICK_8 = 7
JOYSTICK_9 = 8
JOYSTICK_10 = 9
JOYSTICK_11 = 10
JOYSTICK_12 = 11
JOYSTICK_13 = 12
JOYSTICK_14 = 13
JOYSTICK_15 = 14
JOYSTICK_16 = 15
JOYSTICK_LAST = JOYSTICK_16
NOT_INITIALIZED = 0x00010001
NO_CURRENT_CONTEXT = 0x00010002
INVALID_ENUM = 0x00010003
INVALID_VALUE = 0x00010004
OUT_OF_MEMORY = 0x00010005
API_UNAVAILABLE = 0x00010006
VERSION_UNAVAILABLE = 0x00010007
PLATFORM_ERROR = 0x00010008
FORMAT_UNAVAILABLE = 0x00010009
FOCUSED = 0x00020001
ICONIFIED = 0x00020002
RESIZABLE = 0x00020003
VISIBLE = 0x00020004
DECORATED = 0x00020005
RED_BITS = 0x00021001
GREEN_BITS = 0x00021002
BLUE_BITS = 0x00021003
ALPHA_BITS = 0x00021004
DEPTH_BITS = 0x00021005
STENCIL_BITS = 0x00021006
ACCUM_RED_BITS = 0x00021007
ACCUM_GREEN_BITS = 0x00021008
ACCUM_BLUE_BITS = 0x00021009
ACCUM_ALPHA_BITS = 0x0002100A
AUX_BUFFERS = 0x0002100B
STEREO = 0x0002100C
SAMPLES = 0x0002100D
SRGB_CAPABLE = 0x0002100E
REFRESH_RATE = 0x0002100F
CLIENT_API = 0x00022001
CONTEXT_VERSION_MAJOR = 0x00022002
CONTEXT_VERSION_MINOR = 0x00022003
CONTEXT_REVISION = 0x00022004
CONTEXT_ROBUSTNESS = 0x00022005
OPENGL_FORWARD_COMPAT = 0x00022006
OPENGL_DEBUG_CONTEXT = 0x00022007
OPENGL_PROFILE = 0x00022008
OPENGL_API = 0x00030001
OPENGL_ES_API = 0x00030002
NO_ROBUSTNESS = 0
NO_RESET_NOTIFICATION = 0x00031001
LOSE_CONTEXT_ON_RESET = 0x00031002
OPENGL_ANY_PROFILE = 0
OPENGL_CORE_PROFILE = 0x00032001
OPENGL_COMPAT_PROFILE = 0x00032002
CURSOR = 0x00033001
STICKY_KEYS = 0x00033002
STICKY_MOUSE_BUTTONS = 0x00033003
CURSOR_NORMAL = 0x00034001
CURSOR_HIDDEN = 0x00034002
CURSOR_DISABLED = 0x00034003
CONNECTED = 0x00040001
DISCONNECTED = 0x00040002
_GLFWerrorfun = ctypes.CFUNCTYPE(None,
ctypes.c_int,
ctypes.c_char_p)
_GLFWwindowposfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int)
_GLFWwindowsizefun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int)
_GLFWwindowclosefun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow))
_GLFWwindowrefreshfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow))
_GLFWwindowfocusfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int)
_GLFWwindowiconifyfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int)
_GLFWframebuffersizefun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int)
_GLFWmousebuttonfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int,
ctypes.c_int)
_GLFWcursorposfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_double,
ctypes.c_double)
_GLFWcursorenterfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int)
_GLFWscrollfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_double,
ctypes.c_double)
_GLFWkeyfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int,
ctypes.c_int,
ctypes.c_int)
_GLFWcharfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int)
_GLFWmonitorfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWmonitor),
ctypes.c_int)
_glfw.glfwInit.restype = ctypes.c_int
_glfw.glfwInit.argtypes = []
def init():
'''
Initializes the GLFW library.
Wrapper for:
int glfwInit(void);
'''
cwd = _getcwd()
res = _glfw.glfwInit()
os.chdir(cwd)
return res
_glfw.glfwTerminate.restype = None
_glfw.glfwTerminate.argtypes = []
def terminate():
'''
Terminates the GLFW library.
Wrapper for:
void glfwTerminate(void);
'''
_glfw.glfwTerminate()
_glfw.glfwGetVersion.restype = None
_glfw.glfwGetVersion.argtypes = [ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_version():
'''
Retrieves the version of the GLFW library.
Wrapper for:
void glfwGetVersion(int* major, int* minor, int* rev);
'''
major_value = ctypes.c_int(0)
major = ctypes.pointer(major_value)
minor_value = ctypes.c_int(0)
minor = ctypes.pointer(minor_value)
rev_value = ctypes.c_int(0)
rev = ctypes.pointer(rev_value)
_glfw.glfwGetVersion(major, minor, rev)
return major_value.value, minor_value.value, rev_value.value
_glfw.glfwGetVersionString.restype = ctypes.c_char_p
_glfw.glfwGetVersionString.argtypes = []
def get_version_string():
'''
Returns a string describing the compile-time configuration.
Wrapper for:
const char* glfwGetVersionString(void);
'''
return _glfw.glfwGetVersionString()
_error_callback = None
_glfw.glfwSetErrorCallback.restype = _GLFWerrorfun
_glfw.glfwSetErrorCallback.argtypes = [_GLFWerrorfun]
_glfw.glfwGetMonitors.restype = ctypes.POINTER(ctypes.POINTER(_GLFWmonitor))
_glfw.glfwGetMonitors.argtypes = [ctypes.POINTER(ctypes.c_int)]
def get_monitors():
'''
Returns the currently connected monitors.
Wrapper for:
GLFWmonitor** glfwGetMonitors(int* count);
'''
count_value = ctypes.c_int(0)
count = ctypes.pointer(count_value)
result = _glfw.glfwGetMonitors(count)
monitors = [result[i] for i in range(count_value.value)]
return monitors
_glfw.glfwGetPrimaryMonitor.restype = ctypes.POINTER(_GLFWmonitor)
_glfw.glfwGetPrimaryMonitor.argtypes = []
def get_primary_monitor():
'''
Returns the primary monitor.
Wrapper for:
GLFWmonitor* glfwGetPrimaryMonitor(void);
'''
return _glfw.glfwGetPrimaryMonitor()
_glfw.glfwGetMonitorPos.restype = None
_glfw.glfwGetMonitorPos.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_monitor_pos(monitor):
'''
Returns the position of the monitor's viewport on the virtual screen.
Wrapper for:
void glfwGetMonitorPos(GLFWmonitor* monitor, int* xpos, int* ypos);
'''
xpos_value = ctypes.c_int(0)
xpos = ctypes.pointer(xpos_value)
ypos_value = ctypes.c_int(0)
ypos = ctypes.pointer(ypos_value)
_glfw.glfwGetMonitorPos(monitor, xpos, ypos)
return xpos_value.value, ypos_value.value
_glfw.glfwGetMonitorPhysicalSize.restype = None
_glfw.glfwGetMonitorPhysicalSize.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_monitor_physical_size(monitor):
'''
Returns the physical size of the monitor.
Wrapper for:
void glfwGetMonitorPhysicalSize(GLFWmonitor* monitor, int* width, int* height);
'''
width_value = ctypes.c_int(0)
width = ctypes.pointer(width_value)
height_value = ctypes.c_int(0)
height = ctypes.pointer(height_value)
_glfw.glfwGetMonitorPhysicalSize(monitor, width, height)
return width_value.value, height_value.value
_glfw.glfwGetMonitorName.restype = ctypes.c_char_p
_glfw.glfwGetMonitorName.argtypes = [ctypes.POINTER(_GLFWmonitor)]
def get_monitor_name(monitor):
'''
Returns the name of the specified monitor.
Wrapper for:
const char* glfwGetMonitorName(GLFWmonitor* monitor);
'''
return _glfw.glfwGetMonitorName(monitor)
_monitor_callback = None
_glfw.glfwSetMonitorCallback.restype = _GLFWmonitorfun
_glfw.glfwSetMonitorCallback.argtypes = [_GLFWmonitorfun]
def set_monitor_callback(cbfun):
'''
Sets the monitor configuration callback.
Wrapper for:
GLFWmonitorfun glfwSetMonitorCallback(GLFWmonitorfun cbfun);
'''
global _monitor_callback
previous_callback = _monitor_callback
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWmonitorfun(cbfun)
_monitor_callback = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetMonitorCallback(cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_glfw.glfwGetVideoModes.restype = ctypes.POINTER(_GLFWvidmode)
_glfw.glfwGetVideoModes.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(ctypes.c_int)]
def get_video_modes(monitor):
'''
Returns the available video modes for the specified monitor.
Wrapper for:
const GLFWvidmode* glfwGetVideoModes(GLFWmonitor* monitor, int* count);
'''
count_value = ctypes.c_int(0)
count = ctypes.pointer(count_value)
result = _glfw.glfwGetVideoModes(monitor, count)
videomodes = [result[i].unwrap() for i in range(count_value.value)]
return videomodes
_glfw.glfwGetVideoMode.restype = ctypes.POINTER(_GLFWvidmode)
_glfw.glfwGetVideoMode.argtypes = [ctypes.POINTER(_GLFWmonitor)]
def get_video_mode(monitor):
'''
Returns the current mode of the specified monitor.
Wrapper for:
const GLFWvidmode* glfwGetVideoMode(GLFWmonitor* monitor);
'''
videomode = _glfw.glfwGetVideoMode(monitor).contents
return videomode.unwrap()
_glfw.glfwSetGamma.restype = None
_glfw.glfwSetGamma.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.c_float]
def set_gamma(monitor, gamma):
'''
Generates a gamma ramp and sets it for the specified monitor.
Wrapper for:
void glfwSetGamma(GLFWmonitor* monitor, float gamma);
'''
_glfw.glfwSetGamma(monitor, gamma)
_glfw.glfwGetGammaRamp.restype = ctypes.POINTER(_GLFWgammaramp)
_glfw.glfwGetGammaRamp.argtypes = [ctypes.POINTER(_GLFWmonitor)]
def get_gamma_ramp(monitor):
'''
Retrieves the current gamma ramp for the specified monitor.
Wrapper for:
const GLFWgammaramp* glfwGetGammaRamp(GLFWmonitor* monitor);
'''
gammaramp = _glfw.glfwGetGammaRamp(monitor).contents
return gammaramp.unwrap()
_glfw.glfwSetGammaRamp.restype = None
_glfw.glfwSetGammaRamp.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(_GLFWgammaramp)]
def set_gamma_ramp(monitor, ramp):
'''
Sets the current gamma ramp for the specified monitor.
Wrapper for:
void glfwSetGammaRamp(GLFWmonitor* monitor, const GLFWgammaramp* ramp);
'''
gammaramp = _GLFWgammaramp()
gammaramp.wrap(ramp)
_glfw.glfwSetGammaRamp(monitor, ctypes.pointer(gammaramp))
_glfw.glfwDefaultWindowHints.restype = None
_glfw.glfwDefaultWindowHints.argtypes = []
def default_window_hints():
'''
Resets all window hints to their default values.
Wrapper for:
void glfwDefaultWindowHints(void);
'''
_glfw.glfwDefaultWindowHints()
_glfw.glfwWindowHint.restype = None
_glfw.glfwWindowHint.argtypes = [ctypes.c_int,
ctypes.c_int]
def window_hint(target, hint):
'''
Sets the specified window hint to the desired value.
Wrapper for:
void glfwWindowHint(int target, int hint);
'''
_glfw.glfwWindowHint(target, hint)
_glfw.glfwCreateWindow.restype = ctypes.POINTER(_GLFWwindow)
_glfw.glfwCreateWindow.argtypes = [ctypes.c_int,
ctypes.c_int,
ctypes.c_char_p,
ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(_GLFWwindow)]
def create_window(width, height, title, monitor, share):
'''
Creates a window and its associated context.
Wrapper for:
GLFWwindow* glfwCreateWindow(int width, int height, const char* title, GLFWmonitor* monitor, GLFWwindow* share);
'''
return _glfw.glfwCreateWindow(width, height, _to_char_p(title),
monitor, share)
_glfw.glfwDestroyWindow.restype = None
_glfw.glfwDestroyWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def destroy_window(window):
'''
Destroys the specified window and its context.
Wrapper for:
void glfwDestroyWindow(GLFWwindow* window);
'''
_glfw.glfwDestroyWindow(window)
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_ulong)).contents.value
for callback_repository in _callback_repositories:
if window_addr in callback_repository:
del callback_repository[window_addr]
_glfw.glfwWindowShouldClose.restype = ctypes.c_int
_glfw.glfwWindowShouldClose.argtypes = [ctypes.POINTER(_GLFWwindow)]
def window_should_close(window):
'''
Checks the close flag of the specified window.
Wrapper for:
int glfwWindowShouldClose(GLFWwindow* window);
'''
return _glfw.glfwWindowShouldClose(window)
_glfw.glfwSetWindowShouldClose.restype = None
_glfw.glfwSetWindowShouldClose.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def set_window_should_close(window, value):
'''
Sets the close flag of the specified window.
Wrapper for:
void glfwSetWindowShouldClose(GLFWwindow* window, int value);
'''
_glfw.glfwSetWindowShouldClose(window, value)
_glfw.glfwSetWindowTitle.restype = None
_glfw.glfwSetWindowTitle.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_char_p]
def set_window_title(window, title):
'''
Sets the title of the specified window.
Wrapper for:
void glfwSetWindowTitle(GLFWwindow* window, const char* title);
'''
_glfw.glfwSetWindowTitle(window, _to_char_p(title))
_glfw.glfwGetWindowPos.restype = None
_glfw.glfwGetWindowPos.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_window_pos(window):
'''
Retrieves the position of the client area of the specified window.
Wrapper for:
void glfwGetWindowPos(GLFWwindow* window, int* xpos, int* ypos);
'''
xpos_value = ctypes.c_int(0)
xpos = ctypes.pointer(xpos_value)
ypos_value = ctypes.c_int(0)
ypos = ctypes.pointer(ypos_value)
_glfw.glfwGetWindowPos(window, xpos, ypos)
return xpos_value.value, ypos_value.value
_glfw.glfwSetWindowPos.restype = None
_glfw.glfwSetWindowPos.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int]
def set_window_pos(window, xpos, ypos):
'''
Sets the position of the client area of the specified window.
Wrapper for:
void glfwSetWindowPos(GLFWwindow* window, int xpos, int ypos);
'''
_glfw.glfwSetWindowPos(window, xpos, ypos)
_glfw.glfwGetWindowSize.restype = None
_glfw.glfwGetWindowSize.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_window_size(window):
'''
Retrieves the size of the client area of the specified window.
Wrapper for:
void glfwGetWindowSize(GLFWwindow* window, int* width, int* height);
'''
width_value = ctypes.c_int(0)
width = ctypes.pointer(width_value)
height_value = ctypes.c_int(0)
height = ctypes.pointer(height_value)
_glfw.glfwGetWindowSize(window, width, height)
return width_value.value, height_value.value
_glfw.glfwSetWindowSize.restype = None
_glfw.glfwSetWindowSize.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int]
def set_window_size(window, width, height):
'''
Sets the size of the client area of the specified window.
Wrapper for:
void glfwSetWindowSize(GLFWwindow* window, int width, int height);
'''
_glfw.glfwSetWindowSize(window, width, height)
_glfw.glfwGetFramebufferSize.restype = None
_glfw.glfwGetFramebufferSize.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_framebuffer_size(window):
'''
Retrieves the size of the framebuffer of the specified window.
Wrapper for:
void glfwGetFramebufferSize(GLFWwindow* window, int* width, int* height);
'''
width_value = ctypes.c_int(0)
width = ctypes.pointer(width_value)
height_value = ctypes.c_int(0)
height = ctypes.pointer(height_value)
_glfw.glfwGetFramebufferSize(window, width, height)
return width_value.value, height_value.value
_glfw.glfwIconifyWindow.restype = None
_glfw.glfwIconifyWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def iconify_window(window):
'''
Iconifies the specified window.
Wrapper for:
void glfwIconifyWindow(GLFWwindow* window);
'''
_glfw.glfwIconifyWindow(window)
_glfw.glfwRestoreWindow.restype = None
_glfw.glfwRestoreWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def restore_window(window):
'''
Restores the specified window.
Wrapper for:
void glfwRestoreWindow(GLFWwindow* window);
'''
_glfw.glfwRestoreWindow(window)
_glfw.glfwShowWindow.restype = None
_glfw.glfwShowWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def show_window(window):
'''
Makes the specified window visible.
Wrapper for:
void glfwShowWindow(GLFWwindow* window);
'''
_glfw.glfwShowWindow(window)
_glfw.glfwHideWindow.restype = None
_glfw.glfwHideWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def hide_window(window):
'''
Hides the specified window.
Wrapper for:
void glfwHideWindow(GLFWwindow* window);
'''
_glfw.glfwHideWindow(window)
_glfw.glfwGetWindowMonitor.restype = ctypes.POINTER(_GLFWmonitor)
_glfw.glfwGetWindowMonitor.argtypes = [ctypes.POINTER(_GLFWwindow)]
def get_window_monitor(window):
'''
Returns the monitor that the window uses for full screen mode.
Wrapper for:
GLFWmonitor* glfwGetWindowMonitor(GLFWwindow* window);
'''
return _glfw.glfwGetWindowMonitor(window)
_glfw.glfwGetWindowAttrib.restype = ctypes.c_int
_glfw.glfwGetWindowAttrib.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def get_window_attrib(window, attrib):
'''
Returns an attribute of the specified window.
Wrapper for:
int glfwGetWindowAttrib(GLFWwindow* window, int attrib);
'''
return _glfw.glfwGetWindowAttrib(window, attrib)
_glfw.glfwSetWindowUserPointer.restype = None
_glfw.glfwSetWindowUserPointer.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_void_p]
def set_window_user_pointer(window, pointer):
'''
Sets the user pointer of the specified window.
Wrapper for:
void glfwSetWindowUserPointer(GLFWwindow* window, void* pointer);
'''
_glfw.glfwSetWindowUserPointer(window, pointer)
_glfw.glfwGetWindowUserPointer.restype = ctypes.c_void_p
_glfw.glfwGetWindowUserPointer.argtypes = [ctypes.POINTER(_GLFWwindow)]
def get_window_user_pointer(window):
'''
Returns the user pointer of the specified window.
Wrapper for:
void* glfwGetWindowUserPointer(GLFWwindow* window);
'''
return _glfw.glfwGetWindowUserPointer(window)
_window_pos_callback_repository = {}
_callback_repositories.append(_window_pos_callback_repository)
_glfw.glfwSetWindowPosCallback.restype = _GLFWwindowposfun
_glfw.glfwSetWindowPosCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowposfun]
def set_window_pos_callback(window, cbfun):
'''
Sets the position callback for the specified window.
Wrapper for:
GLFWwindowposfun glfwSetWindowPosCallback(GLFWwindow* window, GLFWwindowposfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_pos_callback_repository:
previous_callback = _window_pos_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowposfun(cbfun)
_window_pos_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowPosCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_size_callback_repository = {}
_callback_repositories.append(_window_size_callback_repository)
_glfw.glfwSetWindowSizeCallback.restype = _GLFWwindowsizefun
_glfw.glfwSetWindowSizeCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowsizefun]
def set_window_size_callback(window, cbfun):
'''
Sets the size callback for the specified window.
Wrapper for:
GLFWwindowsizefun glfwSetWindowSizeCallback(GLFWwindow* window, GLFWwindowsizefun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_size_callback_repository:
previous_callback = _window_size_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowsizefun(cbfun)
_window_size_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowSizeCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_close_callback_repository = {}
_callback_repositories.append(_window_close_callback_repository)
_glfw.glfwSetWindowCloseCallback.restype = _GLFWwindowclosefun
_glfw.glfwSetWindowCloseCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowclosefun]
def set_window_close_callback(window, cbfun):
'''
Sets the close callback for the specified window.
Wrapper for:
GLFWwindowclosefun glfwSetWindowCloseCallback(GLFWwindow* window, GLFWwindowclosefun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_close_callback_repository:
previous_callback = _window_close_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowclosefun(cbfun)
_window_close_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowCloseCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_refresh_callback_repository = {}
_callback_repositories.append(_window_refresh_callback_repository)
_glfw.glfwSetWindowRefreshCallback.restype = _GLFWwindowrefreshfun
_glfw.glfwSetWindowRefreshCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowrefreshfun]
def set_window_refresh_callback(window, cbfun):
'''
Sets the refresh callback for the specified window.
Wrapper for:
GLFWwindowrefreshfun glfwSetWindowRefreshCallback(GLFWwindow* window, GLFWwindowrefreshfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_refresh_callback_repository:
previous_callback = _window_refresh_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowrefreshfun(cbfun)
_window_refresh_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowRefreshCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_focus_callback_repository = {}
_callback_repositories.append(_window_focus_callback_repository)
_glfw.glfwSetWindowFocusCallback.restype = _GLFWwindowfocusfun
_glfw.glfwSetWindowFocusCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowfocusfun]
def set_window_focus_callback(window, cbfun):
'''
Sets the focus callback for the specified window.
Wrapper for:
GLFWwindowfocusfun glfwSetWindowFocusCallback(GLFWwindow* window, GLFWwindowfocusfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_focus_callback_repository:
previous_callback = _window_focus_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowfocusfun(cbfun)
_window_focus_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowFocusCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_iconify_callback_repository = {}
_callback_repositories.append(_window_iconify_callback_repository)
_glfw.glfwSetWindowIconifyCallback.restype = _GLFWwindowiconifyfun
_glfw.glfwSetWindowIconifyCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowiconifyfun]
def set_window_iconify_callback(window, cbfun):
'''
Sets the iconify callback for the specified window.
Wrapper for:
GLFWwindowiconifyfun glfwSetWindowIconifyCallback(GLFWwindow* window, GLFWwindowiconifyfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_iconify_callback_repository:
previous_callback = _window_iconify_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowiconifyfun(cbfun)
_window_iconify_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowIconifyCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_framebuffer_size_callback_repository = {}
_callback_repositories.append(_framebuffer_size_callback_repository)
_glfw.glfwSetFramebufferSizeCallback.restype = _GLFWframebuffersizefun
_glfw.glfwSetFramebufferSizeCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWframebuffersizefun]
def set_framebuffer_size_callback(window, cbfun):
'''
Sets the framebuffer resize callback for the specified window.
Wrapper for:
GLFWframebuffersizefun glfwSetFramebufferSizeCallback(GLFWwindow* window, GLFWframebuffersizefun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _framebuffer_size_callback_repository:
previous_callback = _framebuffer_size_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWframebuffersizefun(cbfun)
_framebuffer_size_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetFramebufferSizeCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_glfw.glfwPollEvents.restype = None
_glfw.glfwPollEvents.argtypes = []
def poll_events():
'''
Processes all pending events.
Wrapper for:
void glfwPollEvents(void);
'''
_glfw.glfwPollEvents()
_glfw.glfwWaitEvents.restype = None
_glfw.glfwWaitEvents.argtypes = []
def wait_events():
'''
Waits until events are pending and processes them.
Wrapper for:
void glfwWaitEvents(void);
'''
_glfw.glfwWaitEvents()
_glfw.glfwGetInputMode.restype = ctypes.c_int
_glfw.glfwGetInputMode.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def get_input_mode(window, mode):
'''
Returns the value of an input option for the specified window.
Wrapper for:
int glfwGetInputMode(GLFWwindow* window, int mode);
'''
return _glfw.glfwGetInputMode(window, mode)
_glfw.glfwSetInputMode.restype = None
_glfw.glfwSetInputMode.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int]
def set_input_mode(window, mode, value):
'''
Sets an input option for the specified window.
@param[in] window The window whose input mode to set.
@param[in] mode One of `GLFW_CURSOR`, `GLFW_STICKY_KEYS` or
`GLFW_STICKY_MOUSE_BUTTONS`.
@param[in] value The new value of the specified input mode.
Wrapper for:
void glfwSetInputMode(GLFWwindow* window, int mode, int value);
'''
_glfw.glfwSetInputMode(window, mode, value)
_glfw.glfwGetKey.restype = ctypes.c_int
_glfw.glfwGetKey.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def get_key(window, key):
'''
Returns the last reported state of a keyboard key for the specified
window.
Wrapper for:
int glfwGetKey(GLFWwindow* window, int key);
'''
return _glfw.glfwGetKey(window, key)
_glfw.glfwGetMouseButton.restype = ctypes.c_int
_glfw.glfwGetMouseButton.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def get_mouse_button(window, button):
'''
Returns the last reported state of a mouse button for the specified
window.
Wrapper for:
int glfwGetMouseButton(GLFWwindow* window, int button);
'''
return _glfw.glfwGetMouseButton(window, button)
_glfw.glfwGetCursorPos.restype = None
_glfw.glfwGetCursorPos.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(ctypes.c_double),
ctypes.POINTER(ctypes.c_double)]
def get_cursor_pos(window):
'''
Retrieves the last reported cursor position, relative to the client
area of the window.
Wrapper for:
void glfwGetCursorPos(GLFWwindow* window, double* xpos, double* ypos);
'''
xpos_value = ctypes.c_double(0.0)
xpos = ctypes.pointer(xpos_value)
ypos_value = ctypes.c_double(0.0)
ypos = ctypes.pointer(ypos_value)
_glfw.glfwGetCursorPos(window, xpos, ypos)
return xpos_value.value, ypos_value.value
_glfw.glfwSetCursorPos.restype = None
_glfw.glfwSetCursorPos.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_double,
ctypes.c_double]
def set_cursor_pos(window, xpos, ypos):
'''
Sets the position of the cursor, relative to the client area of the window.
Wrapper for:
void glfwSetCursorPos(GLFWwindow* window, double xpos, double ypos);
'''
_glfw.glfwSetCursorPos(window, xpos, ypos)
_key_callback_repository = {}
_callback_repositories.append(_key_callback_repository)
_glfw.glfwSetKeyCallback.restype = _GLFWkeyfun
_glfw.glfwSetKeyCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWkeyfun]
def set_key_callback(window, cbfun):
'''
Sets the key callback.
Wrapper for:
GLFWkeyfun glfwSetKeyCallback(GLFWwindow* window, GLFWkeyfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _key_callback_repository:
previous_callback = _key_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWkeyfun(cbfun)
_key_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetKeyCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_char_callback_repository = {}
_callback_repositories.append(_char_callback_repository)
_glfw.glfwSetCharCallback.restype = _GLFWcharfun
_glfw.glfwSetCharCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWcharfun]
def set_char_callback(window, cbfun):
'''
Sets the Unicode character callback.
Wrapper for:
GLFWcharfun glfwSetCharCallback(GLFWwindow* window, GLFWcharfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _char_callback_repository:
previous_callback = _char_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWcharfun(cbfun)
_char_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetCharCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_mouse_button_callback_repository = {}
_callback_repositories.append(_mouse_button_callback_repository)
_glfw.glfwSetMouseButtonCallback.restype = _GLFWmousebuttonfun
_glfw.glfwSetMouseButtonCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWmousebuttonfun]
def set_mouse_button_callback(window, cbfun):
'''
Sets the mouse button callback.
Wrapper for:
GLFWmousebuttonfun glfwSetMouseButtonCallback(GLFWwindow* window, GLFWmousebuttonfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _mouse_button_callback_repository:
previous_callback = _mouse_button_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWmousebuttonfun(cbfun)
_mouse_button_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetMouseButtonCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_cursor_pos_callback_repository = {}
_callback_repositories.append(_cursor_pos_callback_repository)
_glfw.glfwSetCursorPosCallback.restype = _GLFWcursorposfun
_glfw.glfwSetCursorPosCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWcursorposfun]
def set_cursor_pos_callback(window, cbfun):
'''
Sets the cursor position callback.
Wrapper for:
GLFWcursorposfun glfwSetCursorPosCallback(GLFWwindow* window, GLFWcursorposfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _cursor_pos_callback_repository:
previous_callback = _cursor_pos_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWcursorposfun(cbfun)
_cursor_pos_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetCursorPosCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_cursor_enter_callback_repository = {}
_callback_repositories.append(_cursor_enter_callback_repository)
_glfw.glfwSetCursorEnterCallback.restype = _GLFWcursorenterfun
_glfw.glfwSetCursorEnterCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWcursorenterfun]
def set_cursor_enter_callback(window, cbfun):
'''
Sets the cursor enter/exit callback.
Wrapper for:
GLFWcursorenterfun glfwSetCursorEnterCallback(GLFWwindow* window, GLFWcursorenterfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _cursor_enter_callback_repository:
previous_callback = _cursor_enter_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWcursorenterfun(cbfun)
_cursor_enter_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetCursorEnterCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_scroll_callback_repository = {}
_callback_repositories.append(_scroll_callback_repository)
_glfw.glfwSetScrollCallback.restype = _GLFWscrollfun
_glfw.glfwSetScrollCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWscrollfun]
def set_scroll_callback(window, cbfun):
'''
Sets the scroll callback.
Wrapper for:
GLFWscrollfun glfwSetScrollCallback(GLFWwindow* window, GLFWscrollfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _scroll_callback_repository:
previous_callback = _scroll_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWscrollfun(cbfun)
_scroll_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetScrollCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_glfw.glfwJoystickPresent.restype = ctypes.c_int
_glfw.glfwJoystickPresent.argtypes = [ctypes.c_int]
def joystick_present(joy):
'''
Returns whether the specified joystick is present.
Wrapper for:
int glfwJoystickPresent(int joy);
'''
return _glfw.glfwJoystickPresent(joy)
_glfw.glfwGetJoystickAxes.restype = ctypes.POINTER(ctypes.c_float)
_glfw.glfwGetJoystickAxes.argtypes = [ctypes.c_int,
ctypes.POINTER(ctypes.c_int)]
def get_joystick_axes(joy):
'''
Returns the values of all axes of the specified joystick.
Wrapper for:
const float* glfwGetJoystickAxes(int joy, int* count);
'''
count_value = ctypes.c_int(0)
count = ctypes.pointer(count_value)
result = _glfw.glfwGetJoystickAxes(joy, count)
return result, count_value.value
_glfw.glfwGetJoystickButtons.restype = ctypes.POINTER(ctypes.c_ubyte)
_glfw.glfwGetJoystickButtons.argtypes = [ctypes.c_int,
ctypes.POINTER(ctypes.c_int)]
def get_joystick_buttons(joy):
'''
Returns the state of all buttons of the specified joystick.
Wrapper for:
const unsigned char* glfwGetJoystickButtons(int joy, int* count);
'''
count_value = ctypes.c_int(0)
count = ctypes.pointer(count_value)
result = _glfw.glfwGetJoystickButtons(joy, count)
return result, count_value.value
_glfw.glfwGetJoystickName.restype = ctypes.c_char_p
_glfw.glfwGetJoystickName.argtypes = [ctypes.c_int]
def get_joystick_name(joy):
'''
Returns the name of the specified joystick.
Wrapper for:
const char* glfwGetJoystickName(int joy);
'''
return _glfw.glfwGetJoystickName(joy)
_glfw.glfwSetClipboardString.restype = None
_glfw.glfwSetClipboardString.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_char_p]
def set_clipboard_string(window, string):
'''
Sets the clipboard to the specified string.
Wrapper for:
void glfwSetClipboardString(GLFWwindow* window, const char* string);
'''
_glfw.glfwSetClipboardString(window, _to_char_p(string))
_glfw.glfwGetClipboardString.restype = ctypes.c_char_p
_glfw.glfwGetClipboardString.argtypes = [ctypes.POINTER(_GLFWwindow)]
def get_clipboard_string(window):
'''
Retrieves the contents of the clipboard as a string.
Wrapper for:
const char* glfwGetClipboardString(GLFWwindow* window);
'''
return _glfw.glfwGetClipboardString(window)
_glfw.glfwGetTime.restype = ctypes.c_double
_glfw.glfwGetTime.argtypes = []
def get_time():
'''
Returns the value of the GLFW timer.
Wrapper for:
double glfwGetTime(void);
'''
return _glfw.glfwGetTime()
_glfw.glfwSetTime.restype = None
_glfw.glfwSetTime.argtypes = [ctypes.c_double]
def set_time(time):
'''
Sets the GLFW timer.
Wrapper for:
void glfwSetTime(double time);
'''
_glfw.glfwSetTime(time)
_glfw.glfwMakeContextCurrent.restype = None
_glfw.glfwMakeContextCurrent.argtypes = [ctypes.POINTER(_GLFWwindow)]
def make_context_current(window):
'''
Makes the context of the specified window current for the calling
thread.
Wrapper for:
void glfwMakeContextCurrent(GLFWwindow* window);
'''
_glfw.glfwMakeContextCurrent(window)
_glfw.glfwGetCurrentContext.restype = ctypes.POINTER(_GLFWwindow)
_glfw.glfwGetCurrentContext.argtypes = []
def get_current_context():
'''
Returns the window whose context is current on the calling thread.
Wrapper for:
GLFWwindow* glfwGetCurrentContext(void);
'''
return _glfw.glfwGetCurrentContext()
_glfw.glfwSwapBuffers.restype = None
_glfw.glfwSwapBuffers.argtypes = [ctypes.POINTER(_GLFWwindow)]
def swap_buffers(window):
'''
Swaps the front and back buffers of the specified window.
Wrapper for:
void glfwSwapBuffers(GLFWwindow* window);
'''
_glfw.glfwSwapBuffers(window)
_glfw.glfwSwapInterval.restype = None
_glfw.glfwSwapInterval.argtypes = [ctypes.c_int]
def swap_interval(interval):
'''
Sets the swap interval for the current context.
Wrapper for:
void glfwSwapInterval(int interval);
'''
_glfw.glfwSwapInterval(interval)
_glfw.glfwExtensionSupported.restype = ctypes.c_int
_glfw.glfwExtensionSupported.argtypes = [ctypes.c_char_p]
def extension_supported(extension):
'''
Returns whether the specified extension is available.
Wrapper for:
int glfwExtensionSupported(const char* extension);
'''
return _glfw.glfwExtensionSupported(_to_char_p(extension))
_glfw.glfwGetProcAddress.restype = ctypes.c_void_p
_glfw.glfwGetProcAddress.argtypes = [ctypes.c_char_p]
def get_proc_address(procname):
'''
Returns the address of the specified function for the current
context.
Wrapper for:
GLFWglproc glfwGetProcAddress(const char* procname);
'''
return _glfw.glfwGetProcAddress(_to_char_p(procname))
|
fogleman/pg | pg/glfw.py | destroy_window | python | def destroy_window(window):
'''
Destroys the specified window and its context.
Wrapper for:
void glfwDestroyWindow(GLFWwindow* window);
'''
_glfw.glfwDestroyWindow(window)
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_ulong)).contents.value
for callback_repository in _callback_repositories:
if window_addr in callback_repository:
del callback_repository[window_addr] | Destroys the specified window and its context.
Wrapper for:
void glfwDestroyWindow(GLFWwindow* window); | train | https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/glfw.py#L798-L810 | null | '''
Python bindings for GLFW.
'''
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
__author__ = 'Florian Rhiem (florian.rhiem@gmail.com)'
__copyright__ = 'Copyright (c) 2013 Florian Rhiem'
__license__ = 'MIT'
__version__ = '1.0.1'
import ctypes
import os
import glob
import sys
import subprocess
import textwrap
# Python 3 compatibility:
try:
_getcwd = os.getcwdu
except AttributeError:
_getcwd = os.getcwd
if sys.version_info.major > 2:
_to_char_p = lambda s: s.encode('utf-8')
else:
_to_char_p = lambda s: s
def _find_library_candidates(library_names,
library_file_extensions,
library_search_paths):
'''
Finds and returns filenames which might be the library you are looking for.
'''
candidates = set()
for library_name in library_names:
for search_path in library_search_paths:
glob_query = os.path.join(search_path, '*'+library_name+'*')
for filename in glob.iglob(glob_query):
filename = os.path.realpath(filename)
if filename in candidates:
continue
basename = os.path.basename(filename)
if basename.startswith('lib'+library_name):
basename_end = basename[len('lib'+library_name):]
elif basename.startswith(library_name):
basename_end = basename[len(library_name):]
else:
continue
for file_extension in library_file_extensions:
if basename_end.startswith(file_extension):
if basename_end[len(file_extension):][:1] in ('', '.'):
candidates.add(filename)
if basename_end.endswith(file_extension):
basename_middle = basename_end[:-len(file_extension)]
if all(c in '0123456789.' for c in basename_middle):
candidates.add(filename)
return candidates
def _load_library(library_names, library_file_extensions,
library_search_paths, version_check_callback):
'''
Finds, loads and returns the most recent version of the library.
'''
candidates = _find_library_candidates(library_names,
library_file_extensions,
library_search_paths)
library_versions = []
for filename in candidates:
version = version_check_callback(filename)
if version is not None and version >= (3, 0, 0):
library_versions.append((version, filename))
if not library_versions:
return None
library_versions.sort()
return ctypes.CDLL(library_versions[-1][1])
def _glfw_get_version(filename):
'''
Queries and returns the library version tuple or None by using a
subprocess.
'''
version_checker_source = """
import sys
import ctypes
def get_version(library_handle):
'''
Queries and returns the library version tuple or None.
'''
major_value = ctypes.c_int(0)
major = ctypes.pointer(major_value)
minor_value = ctypes.c_int(0)
minor = ctypes.pointer(minor_value)
rev_value = ctypes.c_int(0)
rev = ctypes.pointer(rev_value)
if hasattr(library_handle, 'glfwGetVersion'):
library_handle.glfwGetVersion(major, minor, rev)
version = (major_value.value,
minor_value.value,
rev_value.value)
return version
else:
return None
try:
input_func = raw_input
except NameError:
input_func = input
filename = input_func().strip()
try:
library_handle = ctypes.CDLL(filename)
except OSError:
pass
else:
version = get_version(library_handle)
print(version)
"""
args = [sys.executable, '-c', textwrap.dedent(version_checker_source)]
process = subprocess.Popen(args, universal_newlines=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
out = process.communicate(_to_char_p(filename))[0]
out = out.strip()
if out:
return eval(out)
else:
return None
_glfw = _load_library(['glfw', 'glfw3'], ['.so', '.dylib', '.dll'],
['', '/usr/lib', '/usr/local/lib'], _glfw_get_version)
if _glfw is None:
raise ImportError("Failed to load GLFW3 shared library.")
_callback_repositories = []
class _GLFWwindow(ctypes.Structure):
'''
Wrapper for:
typedef struct GLFWwindow GLFWwindow;
'''
_fields_ = [("dummy", ctypes.c_int)]
class _GLFWmonitor(ctypes.Structure):
'''
Wrapper for:
typedef struct GLFWmonitor GLFWmonitor;
'''
_fields_ = [("dummy", ctypes.c_int)]
class _GLFWvidmode(ctypes.Structure):
'''
Wrapper for:
typedef struct GLFWvidmode GLFWvidmode;
'''
_fields_ = [("width", ctypes.c_int),
("height", ctypes.c_int),
("red_bits", ctypes.c_int),
("green_bits", ctypes.c_int),
("blue_bits", ctypes.c_int),
("refresh_rate", ctypes.c_uint)]
def __init__(self):
ctypes.Structure.__init__(self)
self.width = 0
self.height = 0
self.red_bits = 0
self.green_bits = 0
self.blue_bits = 0
self.refresh_rate = 0
def wrap(self, video_mode):
'''
Wraps a nested python sequence.
'''
size, bits, self.refresh_rate = video_mode
self.width, self.height = size
self.red_bits, self.green_bits, self.blue_bits = bits
def unwrap(self):
'''
Returns a nested python sequence.
'''
size = self.width, self.height
bits = self.red_bits, self.green_bits, self.blue_bits
return size, bits, self.refresh_rate
class _GLFWgammaramp(ctypes.Structure):
'''
Wrapper for:
typedef struct GLFWgammaramp GLFWgammaramp;
'''
_fields_ = [("red", ctypes.POINTER(ctypes.c_ushort)),
("green", ctypes.POINTER(ctypes.c_ushort)),
("blue", ctypes.POINTER(ctypes.c_ushort)),
("size", ctypes.c_uint)]
def __init__(self):
ctypes.Structure.__init__(self)
self.red = None
self.red_array = None
self.green = None
self.green_array = None
self.blue = None
self.blue_array = None
self.size = 0
def wrap(self, gammaramp):
'''
Wraps a nested python sequence.
'''
red, green, blue = gammaramp
size = min(len(red), len(green), len(blue))
array_type = ctypes.c_ushort*size
self.size = ctypes.c_uint(size)
self.red_array = array_type()
self.green_array = array_type()
self.blue_array = array_type()
for i in range(self.size):
self.red_array[i] = int(red[i]*65535)
self.green_array[i] = int(green[i]*65535)
self.blue_array[i] = int(blue[i]*65535)
pointer_type = ctypes.POINTER(ctypes.c_ushort)
self.red = ctypes.cast(self.red_array, pointer_type)
self.green = ctypes.cast(self.green_array, pointer_type)
self.blue = ctypes.cast(self.blue_array, pointer_type)
def unwrap(self):
'''
Returns a nested python sequence.
'''
red = [self.red[i]/65535.0 for i in range(self.size)]
green = [self.green[i]/65535.0 for i in range(self.size)]
blue = [self.blue[i]/65535.0 for i in range(self.size)]
return red, green, blue
VERSION_MAJOR = 3
VERSION_MINOR = 0
VERSION_REVISION = 3
RELEASE = 0
PRESS = 1
REPEAT = 2
KEY_UNKNOWN = -1
KEY_SPACE = 32
KEY_APOSTROPHE = 39
KEY_COMMA = 44
KEY_MINUS = 45
KEY_PERIOD = 46
KEY_SLASH = 47
KEY_0 = 48
KEY_1 = 49
KEY_2 = 50
KEY_3 = 51
KEY_4 = 52
KEY_5 = 53
KEY_6 = 54
KEY_7 = 55
KEY_8 = 56
KEY_9 = 57
KEY_SEMICOLON = 59
KEY_EQUAL = 61
KEY_A = 65
KEY_B = 66
KEY_C = 67
KEY_D = 68
KEY_E = 69
KEY_F = 70
KEY_G = 71
KEY_H = 72
KEY_I = 73
KEY_J = 74
KEY_K = 75
KEY_L = 76
KEY_M = 77
KEY_N = 78
KEY_O = 79
KEY_P = 80
KEY_Q = 81
KEY_R = 82
KEY_S = 83
KEY_T = 84
KEY_U = 85
KEY_V = 86
KEY_W = 87
KEY_X = 88
KEY_Y = 89
KEY_Z = 90
KEY_LEFT_BRACKET = 91
KEY_BACKSLASH = 92
KEY_RIGHT_BRACKET = 93
KEY_GRAVE_ACCENT = 96
KEY_WORLD_1 = 161
KEY_WORLD_2 = 162
KEY_ESCAPE = 256
KEY_ENTER = 257
KEY_TAB = 258
KEY_BACKSPACE = 259
KEY_INSERT = 260
KEY_DELETE = 261
KEY_RIGHT = 262
KEY_LEFT = 263
KEY_DOWN = 264
KEY_UP = 265
KEY_PAGE_UP = 266
KEY_PAGE_DOWN = 267
KEY_HOME = 268
KEY_END = 269
KEY_CAPS_LOCK = 280
KEY_SCROLL_LOCK = 281
KEY_NUM_LOCK = 282
KEY_PRINT_SCREEN = 283
KEY_PAUSE = 284
KEY_F1 = 290
KEY_F2 = 291
KEY_F3 = 292
KEY_F4 = 293
KEY_F5 = 294
KEY_F6 = 295
KEY_F7 = 296
KEY_F8 = 297
KEY_F9 = 298
KEY_F10 = 299
KEY_F11 = 300
KEY_F12 = 301
KEY_F13 = 302
KEY_F14 = 303
KEY_F15 = 304
KEY_F16 = 305
KEY_F17 = 306
KEY_F18 = 307
KEY_F19 = 308
KEY_F20 = 309
KEY_F21 = 310
KEY_F22 = 311
KEY_F23 = 312
KEY_F24 = 313
KEY_F25 = 314
KEY_KP_0 = 320
KEY_KP_1 = 321
KEY_KP_2 = 322
KEY_KP_3 = 323
KEY_KP_4 = 324
KEY_KP_5 = 325
KEY_KP_6 = 326
KEY_KP_7 = 327
KEY_KP_8 = 328
KEY_KP_9 = 329
KEY_KP_DECIMAL = 330
KEY_KP_DIVIDE = 331
KEY_KP_MULTIPLY = 332
KEY_KP_SUBTRACT = 333
KEY_KP_ADD = 334
KEY_KP_ENTER = 335
KEY_KP_EQUAL = 336
KEY_LEFT_SHIFT = 340
KEY_LEFT_CONTROL = 341
KEY_LEFT_ALT = 342
KEY_LEFT_SUPER = 343
KEY_RIGHT_SHIFT = 344
KEY_RIGHT_CONTROL = 345
KEY_RIGHT_ALT = 346
KEY_RIGHT_SUPER = 347
KEY_MENU = 348
KEY_LAST = KEY_MENU
MOD_SHIFT = 0x0001
MOD_CONTROL = 0x0002
MOD_ALT = 0x0004
MOD_SUPER = 0x0008
MOUSE_BUTTON_1 = 0
MOUSE_BUTTON_2 = 1
MOUSE_BUTTON_3 = 2
MOUSE_BUTTON_4 = 3
MOUSE_BUTTON_5 = 4
MOUSE_BUTTON_6 = 5
MOUSE_BUTTON_7 = 6
MOUSE_BUTTON_8 = 7
MOUSE_BUTTON_LAST = MOUSE_BUTTON_8
MOUSE_BUTTON_LEFT = MOUSE_BUTTON_1
MOUSE_BUTTON_RIGHT = MOUSE_BUTTON_2
MOUSE_BUTTON_MIDDLE = MOUSE_BUTTON_3
JOYSTICK_1 = 0
JOYSTICK_2 = 1
JOYSTICK_3 = 2
JOYSTICK_4 = 3
JOYSTICK_5 = 4
JOYSTICK_6 = 5
JOYSTICK_7 = 6
JOYSTICK_8 = 7
JOYSTICK_9 = 8
JOYSTICK_10 = 9
JOYSTICK_11 = 10
JOYSTICK_12 = 11
JOYSTICK_13 = 12
JOYSTICK_14 = 13
JOYSTICK_15 = 14
JOYSTICK_16 = 15
JOYSTICK_LAST = JOYSTICK_16
NOT_INITIALIZED = 0x00010001
NO_CURRENT_CONTEXT = 0x00010002
INVALID_ENUM = 0x00010003
INVALID_VALUE = 0x00010004
OUT_OF_MEMORY = 0x00010005
API_UNAVAILABLE = 0x00010006
VERSION_UNAVAILABLE = 0x00010007
PLATFORM_ERROR = 0x00010008
FORMAT_UNAVAILABLE = 0x00010009
FOCUSED = 0x00020001
ICONIFIED = 0x00020002
RESIZABLE = 0x00020003
VISIBLE = 0x00020004
DECORATED = 0x00020005
RED_BITS = 0x00021001
GREEN_BITS = 0x00021002
BLUE_BITS = 0x00021003
ALPHA_BITS = 0x00021004
DEPTH_BITS = 0x00021005
STENCIL_BITS = 0x00021006
ACCUM_RED_BITS = 0x00021007
ACCUM_GREEN_BITS = 0x00021008
ACCUM_BLUE_BITS = 0x00021009
ACCUM_ALPHA_BITS = 0x0002100A
AUX_BUFFERS = 0x0002100B
STEREO = 0x0002100C
SAMPLES = 0x0002100D
SRGB_CAPABLE = 0x0002100E
REFRESH_RATE = 0x0002100F
CLIENT_API = 0x00022001
CONTEXT_VERSION_MAJOR = 0x00022002
CONTEXT_VERSION_MINOR = 0x00022003
CONTEXT_REVISION = 0x00022004
CONTEXT_ROBUSTNESS = 0x00022005
OPENGL_FORWARD_COMPAT = 0x00022006
OPENGL_DEBUG_CONTEXT = 0x00022007
OPENGL_PROFILE = 0x00022008
OPENGL_API = 0x00030001
OPENGL_ES_API = 0x00030002
NO_ROBUSTNESS = 0
NO_RESET_NOTIFICATION = 0x00031001
LOSE_CONTEXT_ON_RESET = 0x00031002
OPENGL_ANY_PROFILE = 0
OPENGL_CORE_PROFILE = 0x00032001
OPENGL_COMPAT_PROFILE = 0x00032002
CURSOR = 0x00033001
STICKY_KEYS = 0x00033002
STICKY_MOUSE_BUTTONS = 0x00033003
CURSOR_NORMAL = 0x00034001
CURSOR_HIDDEN = 0x00034002
CURSOR_DISABLED = 0x00034003
CONNECTED = 0x00040001
DISCONNECTED = 0x00040002
_GLFWerrorfun = ctypes.CFUNCTYPE(None,
ctypes.c_int,
ctypes.c_char_p)
_GLFWwindowposfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int)
_GLFWwindowsizefun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int)
_GLFWwindowclosefun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow))
_GLFWwindowrefreshfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow))
_GLFWwindowfocusfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int)
_GLFWwindowiconifyfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int)
_GLFWframebuffersizefun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int)
_GLFWmousebuttonfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int,
ctypes.c_int)
_GLFWcursorposfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_double,
ctypes.c_double)
_GLFWcursorenterfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int)
_GLFWscrollfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_double,
ctypes.c_double)
_GLFWkeyfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int,
ctypes.c_int,
ctypes.c_int)
_GLFWcharfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int)
_GLFWmonitorfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWmonitor),
ctypes.c_int)
_glfw.glfwInit.restype = ctypes.c_int
_glfw.glfwInit.argtypes = []
def init():
'''
Initializes the GLFW library.
Wrapper for:
int glfwInit(void);
'''
cwd = _getcwd()
res = _glfw.glfwInit()
os.chdir(cwd)
return res
_glfw.glfwTerminate.restype = None
_glfw.glfwTerminate.argtypes = []
def terminate():
'''
Terminates the GLFW library.
Wrapper for:
void glfwTerminate(void);
'''
_glfw.glfwTerminate()
_glfw.glfwGetVersion.restype = None
_glfw.glfwGetVersion.argtypes = [ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_version():
'''
Retrieves the version of the GLFW library.
Wrapper for:
void glfwGetVersion(int* major, int* minor, int* rev);
'''
major_value = ctypes.c_int(0)
major = ctypes.pointer(major_value)
minor_value = ctypes.c_int(0)
minor = ctypes.pointer(minor_value)
rev_value = ctypes.c_int(0)
rev = ctypes.pointer(rev_value)
_glfw.glfwGetVersion(major, minor, rev)
return major_value.value, minor_value.value, rev_value.value
_glfw.glfwGetVersionString.restype = ctypes.c_char_p
_glfw.glfwGetVersionString.argtypes = []
def get_version_string():
'''
Returns a string describing the compile-time configuration.
Wrapper for:
const char* glfwGetVersionString(void);
'''
return _glfw.glfwGetVersionString()
_error_callback = None
_glfw.glfwSetErrorCallback.restype = _GLFWerrorfun
_glfw.glfwSetErrorCallback.argtypes = [_GLFWerrorfun]
def set_error_callback(cbfun):
'''
Sets the error callback.
Wrapper for:
GLFWerrorfun glfwSetErrorCallback(GLFWerrorfun cbfun);
'''
global _error_callback
previous_callback = _error_callback
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWerrorfun(cbfun)
_error_callback = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetErrorCallback(cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_glfw.glfwGetMonitors.restype = ctypes.POINTER(ctypes.POINTER(_GLFWmonitor))
_glfw.glfwGetMonitors.argtypes = [ctypes.POINTER(ctypes.c_int)]
def get_monitors():
'''
Returns the currently connected monitors.
Wrapper for:
GLFWmonitor** glfwGetMonitors(int* count);
'''
count_value = ctypes.c_int(0)
count = ctypes.pointer(count_value)
result = _glfw.glfwGetMonitors(count)
monitors = [result[i] for i in range(count_value.value)]
return monitors
_glfw.glfwGetPrimaryMonitor.restype = ctypes.POINTER(_GLFWmonitor)
_glfw.glfwGetPrimaryMonitor.argtypes = []
def get_primary_monitor():
'''
Returns the primary monitor.
Wrapper for:
GLFWmonitor* glfwGetPrimaryMonitor(void);
'''
return _glfw.glfwGetPrimaryMonitor()
_glfw.glfwGetMonitorPos.restype = None
_glfw.glfwGetMonitorPos.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_monitor_pos(monitor):
'''
Returns the position of the monitor's viewport on the virtual screen.
Wrapper for:
void glfwGetMonitorPos(GLFWmonitor* monitor, int* xpos, int* ypos);
'''
xpos_value = ctypes.c_int(0)
xpos = ctypes.pointer(xpos_value)
ypos_value = ctypes.c_int(0)
ypos = ctypes.pointer(ypos_value)
_glfw.glfwGetMonitorPos(monitor, xpos, ypos)
return xpos_value.value, ypos_value.value
_glfw.glfwGetMonitorPhysicalSize.restype = None
_glfw.glfwGetMonitorPhysicalSize.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_monitor_physical_size(monitor):
'''
Returns the physical size of the monitor.
Wrapper for:
void glfwGetMonitorPhysicalSize(GLFWmonitor* monitor, int* width, int* height);
'''
width_value = ctypes.c_int(0)
width = ctypes.pointer(width_value)
height_value = ctypes.c_int(0)
height = ctypes.pointer(height_value)
_glfw.glfwGetMonitorPhysicalSize(monitor, width, height)
return width_value.value, height_value.value
_glfw.glfwGetMonitorName.restype = ctypes.c_char_p
_glfw.glfwGetMonitorName.argtypes = [ctypes.POINTER(_GLFWmonitor)]
def get_monitor_name(monitor):
'''
Returns the name of the specified monitor.
Wrapper for:
const char* glfwGetMonitorName(GLFWmonitor* monitor);
'''
return _glfw.glfwGetMonitorName(monitor)
_monitor_callback = None
_glfw.glfwSetMonitorCallback.restype = _GLFWmonitorfun
_glfw.glfwSetMonitorCallback.argtypes = [_GLFWmonitorfun]
def set_monitor_callback(cbfun):
'''
Sets the monitor configuration callback.
Wrapper for:
GLFWmonitorfun glfwSetMonitorCallback(GLFWmonitorfun cbfun);
'''
global _monitor_callback
previous_callback = _monitor_callback
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWmonitorfun(cbfun)
_monitor_callback = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetMonitorCallback(cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_glfw.glfwGetVideoModes.restype = ctypes.POINTER(_GLFWvidmode)
_glfw.glfwGetVideoModes.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(ctypes.c_int)]
def get_video_modes(monitor):
'''
Returns the available video modes for the specified monitor.
Wrapper for:
const GLFWvidmode* glfwGetVideoModes(GLFWmonitor* monitor, int* count);
'''
count_value = ctypes.c_int(0)
count = ctypes.pointer(count_value)
result = _glfw.glfwGetVideoModes(monitor, count)
videomodes = [result[i].unwrap() for i in range(count_value.value)]
return videomodes
_glfw.glfwGetVideoMode.restype = ctypes.POINTER(_GLFWvidmode)
_glfw.glfwGetVideoMode.argtypes = [ctypes.POINTER(_GLFWmonitor)]
def get_video_mode(monitor):
'''
Returns the current mode of the specified monitor.
Wrapper for:
const GLFWvidmode* glfwGetVideoMode(GLFWmonitor* monitor);
'''
videomode = _glfw.glfwGetVideoMode(monitor).contents
return videomode.unwrap()
_glfw.glfwSetGamma.restype = None
_glfw.glfwSetGamma.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.c_float]
def set_gamma(monitor, gamma):
'''
Generates a gamma ramp and sets it for the specified monitor.
Wrapper for:
void glfwSetGamma(GLFWmonitor* monitor, float gamma);
'''
_glfw.glfwSetGamma(monitor, gamma)
_glfw.glfwGetGammaRamp.restype = ctypes.POINTER(_GLFWgammaramp)
_glfw.glfwGetGammaRamp.argtypes = [ctypes.POINTER(_GLFWmonitor)]
def get_gamma_ramp(monitor):
'''
Retrieves the current gamma ramp for the specified monitor.
Wrapper for:
const GLFWgammaramp* glfwGetGammaRamp(GLFWmonitor* monitor);
'''
gammaramp = _glfw.glfwGetGammaRamp(monitor).contents
return gammaramp.unwrap()
_glfw.glfwSetGammaRamp.restype = None
_glfw.glfwSetGammaRamp.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(_GLFWgammaramp)]
def set_gamma_ramp(monitor, ramp):
'''
Sets the current gamma ramp for the specified monitor.
Wrapper for:
void glfwSetGammaRamp(GLFWmonitor* monitor, const GLFWgammaramp* ramp);
'''
gammaramp = _GLFWgammaramp()
gammaramp.wrap(ramp)
_glfw.glfwSetGammaRamp(monitor, ctypes.pointer(gammaramp))
_glfw.glfwDefaultWindowHints.restype = None
_glfw.glfwDefaultWindowHints.argtypes = []
def default_window_hints():
'''
Resets all window hints to their default values.
Wrapper for:
void glfwDefaultWindowHints(void);
'''
_glfw.glfwDefaultWindowHints()
_glfw.glfwWindowHint.restype = None
_glfw.glfwWindowHint.argtypes = [ctypes.c_int,
ctypes.c_int]
def window_hint(target, hint):
'''
Sets the specified window hint to the desired value.
Wrapper for:
void glfwWindowHint(int target, int hint);
'''
_glfw.glfwWindowHint(target, hint)
_glfw.glfwCreateWindow.restype = ctypes.POINTER(_GLFWwindow)
_glfw.glfwCreateWindow.argtypes = [ctypes.c_int,
ctypes.c_int,
ctypes.c_char_p,
ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(_GLFWwindow)]
def create_window(width, height, title, monitor, share):
'''
Creates a window and its associated context.
Wrapper for:
GLFWwindow* glfwCreateWindow(int width, int height, const char* title, GLFWmonitor* monitor, GLFWwindow* share);
'''
return _glfw.glfwCreateWindow(width, height, _to_char_p(title),
monitor, share)
_glfw.glfwDestroyWindow.restype = None
_glfw.glfwDestroyWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
_glfw.glfwWindowShouldClose.restype = ctypes.c_int
_glfw.glfwWindowShouldClose.argtypes = [ctypes.POINTER(_GLFWwindow)]
def window_should_close(window):
'''
Checks the close flag of the specified window.
Wrapper for:
int glfwWindowShouldClose(GLFWwindow* window);
'''
return _glfw.glfwWindowShouldClose(window)
_glfw.glfwSetWindowShouldClose.restype = None
_glfw.glfwSetWindowShouldClose.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def set_window_should_close(window, value):
'''
Sets the close flag of the specified window.
Wrapper for:
void glfwSetWindowShouldClose(GLFWwindow* window, int value);
'''
_glfw.glfwSetWindowShouldClose(window, value)
_glfw.glfwSetWindowTitle.restype = None
_glfw.glfwSetWindowTitle.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_char_p]
def set_window_title(window, title):
'''
Sets the title of the specified window.
Wrapper for:
void glfwSetWindowTitle(GLFWwindow* window, const char* title);
'''
_glfw.glfwSetWindowTitle(window, _to_char_p(title))
_glfw.glfwGetWindowPos.restype = None
_glfw.glfwGetWindowPos.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_window_pos(window):
'''
Retrieves the position of the client area of the specified window.
Wrapper for:
void glfwGetWindowPos(GLFWwindow* window, int* xpos, int* ypos);
'''
xpos_value = ctypes.c_int(0)
xpos = ctypes.pointer(xpos_value)
ypos_value = ctypes.c_int(0)
ypos = ctypes.pointer(ypos_value)
_glfw.glfwGetWindowPos(window, xpos, ypos)
return xpos_value.value, ypos_value.value
_glfw.glfwSetWindowPos.restype = None
_glfw.glfwSetWindowPos.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int]
def set_window_pos(window, xpos, ypos):
'''
Sets the position of the client area of the specified window.
Wrapper for:
void glfwSetWindowPos(GLFWwindow* window, int xpos, int ypos);
'''
_glfw.glfwSetWindowPos(window, xpos, ypos)
_glfw.glfwGetWindowSize.restype = None
_glfw.glfwGetWindowSize.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_window_size(window):
'''
Retrieves the size of the client area of the specified window.
Wrapper for:
void glfwGetWindowSize(GLFWwindow* window, int* width, int* height);
'''
width_value = ctypes.c_int(0)
width = ctypes.pointer(width_value)
height_value = ctypes.c_int(0)
height = ctypes.pointer(height_value)
_glfw.glfwGetWindowSize(window, width, height)
return width_value.value, height_value.value
_glfw.glfwSetWindowSize.restype = None
_glfw.glfwSetWindowSize.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int]
def set_window_size(window, width, height):
'''
Sets the size of the client area of the specified window.
Wrapper for:
void glfwSetWindowSize(GLFWwindow* window, int width, int height);
'''
_glfw.glfwSetWindowSize(window, width, height)
_glfw.glfwGetFramebufferSize.restype = None
_glfw.glfwGetFramebufferSize.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_framebuffer_size(window):
'''
Retrieves the size of the framebuffer of the specified window.
Wrapper for:
void glfwGetFramebufferSize(GLFWwindow* window, int* width, int* height);
'''
width_value = ctypes.c_int(0)
width = ctypes.pointer(width_value)
height_value = ctypes.c_int(0)
height = ctypes.pointer(height_value)
_glfw.glfwGetFramebufferSize(window, width, height)
return width_value.value, height_value.value
_glfw.glfwIconifyWindow.restype = None
_glfw.glfwIconifyWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def iconify_window(window):
'''
Iconifies the specified window.
Wrapper for:
void glfwIconifyWindow(GLFWwindow* window);
'''
_glfw.glfwIconifyWindow(window)
_glfw.glfwRestoreWindow.restype = None
_glfw.glfwRestoreWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def restore_window(window):
'''
Restores the specified window.
Wrapper for:
void glfwRestoreWindow(GLFWwindow* window);
'''
_glfw.glfwRestoreWindow(window)
_glfw.glfwShowWindow.restype = None
_glfw.glfwShowWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def show_window(window):
'''
Makes the specified window visible.
Wrapper for:
void glfwShowWindow(GLFWwindow* window);
'''
_glfw.glfwShowWindow(window)
_glfw.glfwHideWindow.restype = None
_glfw.glfwHideWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def hide_window(window):
'''
Hides the specified window.
Wrapper for:
void glfwHideWindow(GLFWwindow* window);
'''
_glfw.glfwHideWindow(window)
_glfw.glfwGetWindowMonitor.restype = ctypes.POINTER(_GLFWmonitor)
_glfw.glfwGetWindowMonitor.argtypes = [ctypes.POINTER(_GLFWwindow)]
def get_window_monitor(window):
'''
Returns the monitor that the window uses for full screen mode.
Wrapper for:
GLFWmonitor* glfwGetWindowMonitor(GLFWwindow* window);
'''
return _glfw.glfwGetWindowMonitor(window)
_glfw.glfwGetWindowAttrib.restype = ctypes.c_int
_glfw.glfwGetWindowAttrib.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def get_window_attrib(window, attrib):
'''
Returns an attribute of the specified window.
Wrapper for:
int glfwGetWindowAttrib(GLFWwindow* window, int attrib);
'''
return _glfw.glfwGetWindowAttrib(window, attrib)
_glfw.glfwSetWindowUserPointer.restype = None
_glfw.glfwSetWindowUserPointer.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_void_p]
def set_window_user_pointer(window, pointer):
'''
Sets the user pointer of the specified window.
Wrapper for:
void glfwSetWindowUserPointer(GLFWwindow* window, void* pointer);
'''
_glfw.glfwSetWindowUserPointer(window, pointer)
_glfw.glfwGetWindowUserPointer.restype = ctypes.c_void_p
_glfw.glfwGetWindowUserPointer.argtypes = [ctypes.POINTER(_GLFWwindow)]
def get_window_user_pointer(window):
'''
Returns the user pointer of the specified window.
Wrapper for:
void* glfwGetWindowUserPointer(GLFWwindow* window);
'''
return _glfw.glfwGetWindowUserPointer(window)
_window_pos_callback_repository = {}
_callback_repositories.append(_window_pos_callback_repository)
_glfw.glfwSetWindowPosCallback.restype = _GLFWwindowposfun
_glfw.glfwSetWindowPosCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowposfun]
def set_window_pos_callback(window, cbfun):
'''
Sets the position callback for the specified window.
Wrapper for:
GLFWwindowposfun glfwSetWindowPosCallback(GLFWwindow* window, GLFWwindowposfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_pos_callback_repository:
previous_callback = _window_pos_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowposfun(cbfun)
_window_pos_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowPosCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_size_callback_repository = {}
_callback_repositories.append(_window_size_callback_repository)
_glfw.glfwSetWindowSizeCallback.restype = _GLFWwindowsizefun
_glfw.glfwSetWindowSizeCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowsizefun]
def set_window_size_callback(window, cbfun):
'''
Sets the size callback for the specified window.
Wrapper for:
GLFWwindowsizefun glfwSetWindowSizeCallback(GLFWwindow* window, GLFWwindowsizefun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_size_callback_repository:
previous_callback = _window_size_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowsizefun(cbfun)
_window_size_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowSizeCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_close_callback_repository = {}
_callback_repositories.append(_window_close_callback_repository)
_glfw.glfwSetWindowCloseCallback.restype = _GLFWwindowclosefun
_glfw.glfwSetWindowCloseCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowclosefun]
def set_window_close_callback(window, cbfun):
'''
Sets the close callback for the specified window.
Wrapper for:
GLFWwindowclosefun glfwSetWindowCloseCallback(GLFWwindow* window, GLFWwindowclosefun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_close_callback_repository:
previous_callback = _window_close_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowclosefun(cbfun)
_window_close_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowCloseCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_refresh_callback_repository = {}
_callback_repositories.append(_window_refresh_callback_repository)
_glfw.glfwSetWindowRefreshCallback.restype = _GLFWwindowrefreshfun
_glfw.glfwSetWindowRefreshCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowrefreshfun]
def set_window_refresh_callback(window, cbfun):
'''
Sets the refresh callback for the specified window.
Wrapper for:
GLFWwindowrefreshfun glfwSetWindowRefreshCallback(GLFWwindow* window, GLFWwindowrefreshfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_refresh_callback_repository:
previous_callback = _window_refresh_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowrefreshfun(cbfun)
_window_refresh_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowRefreshCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_focus_callback_repository = {}
_callback_repositories.append(_window_focus_callback_repository)
_glfw.glfwSetWindowFocusCallback.restype = _GLFWwindowfocusfun
_glfw.glfwSetWindowFocusCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowfocusfun]
def set_window_focus_callback(window, cbfun):
'''
Sets the focus callback for the specified window.
Wrapper for:
GLFWwindowfocusfun glfwSetWindowFocusCallback(GLFWwindow* window, GLFWwindowfocusfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_focus_callback_repository:
previous_callback = _window_focus_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowfocusfun(cbfun)
_window_focus_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowFocusCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_iconify_callback_repository = {}
_callback_repositories.append(_window_iconify_callback_repository)
_glfw.glfwSetWindowIconifyCallback.restype = _GLFWwindowiconifyfun
_glfw.glfwSetWindowIconifyCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowiconifyfun]
def set_window_iconify_callback(window, cbfun):
'''
Sets the iconify callback for the specified window.
Wrapper for:
GLFWwindowiconifyfun glfwSetWindowIconifyCallback(GLFWwindow* window, GLFWwindowiconifyfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_iconify_callback_repository:
previous_callback = _window_iconify_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowiconifyfun(cbfun)
_window_iconify_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowIconifyCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_framebuffer_size_callback_repository = {}
_callback_repositories.append(_framebuffer_size_callback_repository)
_glfw.glfwSetFramebufferSizeCallback.restype = _GLFWframebuffersizefun
_glfw.glfwSetFramebufferSizeCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWframebuffersizefun]
def set_framebuffer_size_callback(window, cbfun):
'''
Sets the framebuffer resize callback for the specified window.
Wrapper for:
GLFWframebuffersizefun glfwSetFramebufferSizeCallback(GLFWwindow* window, GLFWframebuffersizefun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _framebuffer_size_callback_repository:
previous_callback = _framebuffer_size_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWframebuffersizefun(cbfun)
_framebuffer_size_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetFramebufferSizeCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_glfw.glfwPollEvents.restype = None
_glfw.glfwPollEvents.argtypes = []
def poll_events():
'''
Processes all pending events.
Wrapper for:
void glfwPollEvents(void);
'''
_glfw.glfwPollEvents()
_glfw.glfwWaitEvents.restype = None
_glfw.glfwWaitEvents.argtypes = []
def wait_events():
'''
Waits until events are pending and processes them.
Wrapper for:
void glfwWaitEvents(void);
'''
_glfw.glfwWaitEvents()
_glfw.glfwGetInputMode.restype = ctypes.c_int
_glfw.glfwGetInputMode.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def get_input_mode(window, mode):
'''
Returns the value of an input option for the specified window.
Wrapper for:
int glfwGetInputMode(GLFWwindow* window, int mode);
'''
return _glfw.glfwGetInputMode(window, mode)
_glfw.glfwSetInputMode.restype = None
_glfw.glfwSetInputMode.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int]
def set_input_mode(window, mode, value):
'''
Sets an input option for the specified window.
@param[in] window The window whose input mode to set.
@param[in] mode One of `GLFW_CURSOR`, `GLFW_STICKY_KEYS` or
`GLFW_STICKY_MOUSE_BUTTONS`.
@param[in] value The new value of the specified input mode.
Wrapper for:
void glfwSetInputMode(GLFWwindow* window, int mode, int value);
'''
_glfw.glfwSetInputMode(window, mode, value)
_glfw.glfwGetKey.restype = ctypes.c_int
_glfw.glfwGetKey.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def get_key(window, key):
'''
Returns the last reported state of a keyboard key for the specified
window.
Wrapper for:
int glfwGetKey(GLFWwindow* window, int key);
'''
return _glfw.glfwGetKey(window, key)
_glfw.glfwGetMouseButton.restype = ctypes.c_int
_glfw.glfwGetMouseButton.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def get_mouse_button(window, button):
'''
Returns the last reported state of a mouse button for the specified
window.
Wrapper for:
int glfwGetMouseButton(GLFWwindow* window, int button);
'''
return _glfw.glfwGetMouseButton(window, button)
_glfw.glfwGetCursorPos.restype = None
_glfw.glfwGetCursorPos.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(ctypes.c_double),
ctypes.POINTER(ctypes.c_double)]
def get_cursor_pos(window):
'''
Retrieves the last reported cursor position, relative to the client
area of the window.
Wrapper for:
void glfwGetCursorPos(GLFWwindow* window, double* xpos, double* ypos);
'''
xpos_value = ctypes.c_double(0.0)
xpos = ctypes.pointer(xpos_value)
ypos_value = ctypes.c_double(0.0)
ypos = ctypes.pointer(ypos_value)
_glfw.glfwGetCursorPos(window, xpos, ypos)
return xpos_value.value, ypos_value.value
_glfw.glfwSetCursorPos.restype = None
_glfw.glfwSetCursorPos.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_double,
ctypes.c_double]
def set_cursor_pos(window, xpos, ypos):
'''
Sets the position of the cursor, relative to the client area of the window.
Wrapper for:
void glfwSetCursorPos(GLFWwindow* window, double xpos, double ypos);
'''
_glfw.glfwSetCursorPos(window, xpos, ypos)
_key_callback_repository = {}
_callback_repositories.append(_key_callback_repository)
_glfw.glfwSetKeyCallback.restype = _GLFWkeyfun
_glfw.glfwSetKeyCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWkeyfun]
def set_key_callback(window, cbfun):
'''
Sets the key callback.
Wrapper for:
GLFWkeyfun glfwSetKeyCallback(GLFWwindow* window, GLFWkeyfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _key_callback_repository:
previous_callback = _key_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWkeyfun(cbfun)
_key_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetKeyCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_char_callback_repository = {}
_callback_repositories.append(_char_callback_repository)
_glfw.glfwSetCharCallback.restype = _GLFWcharfun
_glfw.glfwSetCharCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWcharfun]
def set_char_callback(window, cbfun):
'''
Sets the Unicode character callback.
Wrapper for:
GLFWcharfun glfwSetCharCallback(GLFWwindow* window, GLFWcharfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _char_callback_repository:
previous_callback = _char_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWcharfun(cbfun)
_char_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetCharCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_mouse_button_callback_repository = {}
_callback_repositories.append(_mouse_button_callback_repository)
_glfw.glfwSetMouseButtonCallback.restype = _GLFWmousebuttonfun
_glfw.glfwSetMouseButtonCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWmousebuttonfun]
def set_mouse_button_callback(window, cbfun):
'''
Sets the mouse button callback.
Wrapper for:
GLFWmousebuttonfun glfwSetMouseButtonCallback(GLFWwindow* window, GLFWmousebuttonfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _mouse_button_callback_repository:
previous_callback = _mouse_button_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWmousebuttonfun(cbfun)
_mouse_button_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetMouseButtonCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_cursor_pos_callback_repository = {}
_callback_repositories.append(_cursor_pos_callback_repository)
_glfw.glfwSetCursorPosCallback.restype = _GLFWcursorposfun
_glfw.glfwSetCursorPosCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWcursorposfun]
def set_cursor_pos_callback(window, cbfun):
'''
Sets the cursor position callback.
Wrapper for:
GLFWcursorposfun glfwSetCursorPosCallback(GLFWwindow* window, GLFWcursorposfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _cursor_pos_callback_repository:
previous_callback = _cursor_pos_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWcursorposfun(cbfun)
_cursor_pos_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetCursorPosCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_cursor_enter_callback_repository = {}
_callback_repositories.append(_cursor_enter_callback_repository)
_glfw.glfwSetCursorEnterCallback.restype = _GLFWcursorenterfun
_glfw.glfwSetCursorEnterCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWcursorenterfun]
def set_cursor_enter_callback(window, cbfun):
'''
Sets the cursor enter/exit callback.
Wrapper for:
GLFWcursorenterfun glfwSetCursorEnterCallback(GLFWwindow* window, GLFWcursorenterfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _cursor_enter_callback_repository:
previous_callback = _cursor_enter_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWcursorenterfun(cbfun)
_cursor_enter_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetCursorEnterCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_scroll_callback_repository = {}
_callback_repositories.append(_scroll_callback_repository)
_glfw.glfwSetScrollCallback.restype = _GLFWscrollfun
_glfw.glfwSetScrollCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWscrollfun]
def set_scroll_callback(window, cbfun):
'''
Sets the scroll callback.
Wrapper for:
GLFWscrollfun glfwSetScrollCallback(GLFWwindow* window, GLFWscrollfun cbfun);
'''
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _scroll_callback_repository:
previous_callback = _scroll_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWscrollfun(cbfun)
_scroll_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetScrollCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_glfw.glfwJoystickPresent.restype = ctypes.c_int
_glfw.glfwJoystickPresent.argtypes = [ctypes.c_int]
def joystick_present(joy):
'''
Returns whether the specified joystick is present.
Wrapper for:
int glfwJoystickPresent(int joy);
'''
return _glfw.glfwJoystickPresent(joy)
_glfw.glfwGetJoystickAxes.restype = ctypes.POINTER(ctypes.c_float)
_glfw.glfwGetJoystickAxes.argtypes = [ctypes.c_int,
ctypes.POINTER(ctypes.c_int)]
def get_joystick_axes(joy):
'''
Returns the values of all axes of the specified joystick.
Wrapper for:
const float* glfwGetJoystickAxes(int joy, int* count);
'''
count_value = ctypes.c_int(0)
count = ctypes.pointer(count_value)
result = _glfw.glfwGetJoystickAxes(joy, count)
return result, count_value.value
_glfw.glfwGetJoystickButtons.restype = ctypes.POINTER(ctypes.c_ubyte)
_glfw.glfwGetJoystickButtons.argtypes = [ctypes.c_int,
ctypes.POINTER(ctypes.c_int)]
def get_joystick_buttons(joy):
'''
Returns the state of all buttons of the specified joystick.
Wrapper for:
const unsigned char* glfwGetJoystickButtons(int joy, int* count);
'''
count_value = ctypes.c_int(0)
count = ctypes.pointer(count_value)
result = _glfw.glfwGetJoystickButtons(joy, count)
return result, count_value.value
_glfw.glfwGetJoystickName.restype = ctypes.c_char_p
_glfw.glfwGetJoystickName.argtypes = [ctypes.c_int]
def get_joystick_name(joy):
'''
Returns the name of the specified joystick.
Wrapper for:
const char* glfwGetJoystickName(int joy);
'''
return _glfw.glfwGetJoystickName(joy)
_glfw.glfwSetClipboardString.restype = None
_glfw.glfwSetClipboardString.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_char_p]
def set_clipboard_string(window, string):
'''
Sets the clipboard to the specified string.
Wrapper for:
void glfwSetClipboardString(GLFWwindow* window, const char* string);
'''
_glfw.glfwSetClipboardString(window, _to_char_p(string))
_glfw.glfwGetClipboardString.restype = ctypes.c_char_p
_glfw.glfwGetClipboardString.argtypes = [ctypes.POINTER(_GLFWwindow)]
def get_clipboard_string(window):
'''
Retrieves the contents of the clipboard as a string.
Wrapper for:
const char* glfwGetClipboardString(GLFWwindow* window);
'''
return _glfw.glfwGetClipboardString(window)
_glfw.glfwGetTime.restype = ctypes.c_double
_glfw.glfwGetTime.argtypes = []
def get_time():
'''
Returns the value of the GLFW timer.
Wrapper for:
double glfwGetTime(void);
'''
return _glfw.glfwGetTime()
_glfw.glfwSetTime.restype = None
_glfw.glfwSetTime.argtypes = [ctypes.c_double]
def set_time(time):
'''
Sets the GLFW timer.
Wrapper for:
void glfwSetTime(double time);
'''
_glfw.glfwSetTime(time)
_glfw.glfwMakeContextCurrent.restype = None
_glfw.glfwMakeContextCurrent.argtypes = [ctypes.POINTER(_GLFWwindow)]
def make_context_current(window):
'''
Makes the context of the specified window current for the calling
thread.
Wrapper for:
void glfwMakeContextCurrent(GLFWwindow* window);
'''
_glfw.glfwMakeContextCurrent(window)
_glfw.glfwGetCurrentContext.restype = ctypes.POINTER(_GLFWwindow)
_glfw.glfwGetCurrentContext.argtypes = []
def get_current_context():
'''
Returns the window whose context is current on the calling thread.
Wrapper for:
GLFWwindow* glfwGetCurrentContext(void);
'''
return _glfw.glfwGetCurrentContext()
_glfw.glfwSwapBuffers.restype = None
_glfw.glfwSwapBuffers.argtypes = [ctypes.POINTER(_GLFWwindow)]
def swap_buffers(window):
'''
Swaps the front and back buffers of the specified window.
Wrapper for:
void glfwSwapBuffers(GLFWwindow* window);
'''
_glfw.glfwSwapBuffers(window)
_glfw.glfwSwapInterval.restype = None
_glfw.glfwSwapInterval.argtypes = [ctypes.c_int]
def swap_interval(interval):
'''
Sets the swap interval for the current context.
Wrapper for:
void glfwSwapInterval(int interval);
'''
_glfw.glfwSwapInterval(interval)
_glfw.glfwExtensionSupported.restype = ctypes.c_int
_glfw.glfwExtensionSupported.argtypes = [ctypes.c_char_p]
def extension_supported(extension):
'''
Returns whether the specified extension is available.
Wrapper for:
int glfwExtensionSupported(const char* extension);
'''
return _glfw.glfwExtensionSupported(_to_char_p(extension))
_glfw.glfwGetProcAddress.restype = ctypes.c_void_p
_glfw.glfwGetProcAddress.argtypes = [ctypes.c_char_p]
def get_proc_address(procname):
'''
Returns the address of the specified function for the current
context.
Wrapper for:
GLFWglproc glfwGetProcAddress(const char* procname);
'''
return _glfw.glfwGetProcAddress(_to_char_p(procname))
|
fogleman/pg | pg/glfw.py | _GLFWvidmode.unwrap | python | def unwrap(self):
'''
Returns a nested python sequence.
'''
size = self.width, self.height
bits = self.red_bits, self.green_bits, self.blue_bits
return size, bits, self.refresh_rate | Returns a nested python sequence. | train | https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/glfw.py#L190-L196 | null | class _GLFWvidmode(ctypes.Structure):
'''
Wrapper for:
typedef struct GLFWvidmode GLFWvidmode;
'''
_fields_ = [("width", ctypes.c_int),
("height", ctypes.c_int),
("red_bits", ctypes.c_int),
("green_bits", ctypes.c_int),
("blue_bits", ctypes.c_int),
("refresh_rate", ctypes.c_uint)]
def __init__(self):
ctypes.Structure.__init__(self)
self.width = 0
self.height = 0
self.red_bits = 0
self.green_bits = 0
self.blue_bits = 0
self.refresh_rate = 0
def wrap(self, video_mode):
'''
Wraps a nested python sequence.
'''
size, bits, self.refresh_rate = video_mode
self.width, self.height = size
self.red_bits, self.green_bits, self.blue_bits = bits
|
fogleman/pg | pg/glfw.py | _GLFWgammaramp.wrap | python | def wrap(self, gammaramp):
'''
Wraps a nested python sequence.
'''
red, green, blue = gammaramp
size = min(len(red), len(green), len(blue))
array_type = ctypes.c_ushort*size
self.size = ctypes.c_uint(size)
self.red_array = array_type()
self.green_array = array_type()
self.blue_array = array_type()
for i in range(self.size):
self.red_array[i] = int(red[i]*65535)
self.green_array[i] = int(green[i]*65535)
self.blue_array[i] = int(blue[i]*65535)
pointer_type = ctypes.POINTER(ctypes.c_ushort)
self.red = ctypes.cast(self.red_array, pointer_type)
self.green = ctypes.cast(self.green_array, pointer_type)
self.blue = ctypes.cast(self.blue_array, pointer_type) | Wraps a nested python sequence. | train | https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/glfw.py#L219-L237 | null | class _GLFWgammaramp(ctypes.Structure):
'''
Wrapper for:
typedef struct GLFWgammaramp GLFWgammaramp;
'''
_fields_ = [("red", ctypes.POINTER(ctypes.c_ushort)),
("green", ctypes.POINTER(ctypes.c_ushort)),
("blue", ctypes.POINTER(ctypes.c_ushort)),
("size", ctypes.c_uint)]
def __init__(self):
ctypes.Structure.__init__(self)
self.red = None
self.red_array = None
self.green = None
self.green_array = None
self.blue = None
self.blue_array = None
self.size = 0
def unwrap(self):
'''
Returns a nested python sequence.
'''
red = [self.red[i]/65535.0 for i in range(self.size)]
green = [self.green[i]/65535.0 for i in range(self.size)]
blue = [self.blue[i]/65535.0 for i in range(self.size)]
return red, green, blue
|
fogleman/pg | pg/glfw.py | _GLFWgammaramp.unwrap | python | def unwrap(self):
'''
Returns a nested python sequence.
'''
red = [self.red[i]/65535.0 for i in range(self.size)]
green = [self.green[i]/65535.0 for i in range(self.size)]
blue = [self.blue[i]/65535.0 for i in range(self.size)]
return red, green, blue | Returns a nested python sequence. | train | https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/glfw.py#L239-L246 | null | class _GLFWgammaramp(ctypes.Structure):
'''
Wrapper for:
typedef struct GLFWgammaramp GLFWgammaramp;
'''
_fields_ = [("red", ctypes.POINTER(ctypes.c_ushort)),
("green", ctypes.POINTER(ctypes.c_ushort)),
("blue", ctypes.POINTER(ctypes.c_ushort)),
("size", ctypes.c_uint)]
def __init__(self):
ctypes.Structure.__init__(self)
self.red = None
self.red_array = None
self.green = None
self.green_array = None
self.blue = None
self.blue_array = None
self.size = 0
def wrap(self, gammaramp):
'''
Wraps a nested python sequence.
'''
red, green, blue = gammaramp
size = min(len(red), len(green), len(blue))
array_type = ctypes.c_ushort*size
self.size = ctypes.c_uint(size)
self.red_array = array_type()
self.green_array = array_type()
self.blue_array = array_type()
for i in range(self.size):
self.red_array[i] = int(red[i]*65535)
self.green_array[i] = int(green[i]*65535)
self.blue_array[i] = int(blue[i]*65535)
pointer_type = ctypes.POINTER(ctypes.c_ushort)
self.red = ctypes.cast(self.red_array, pointer_type)
self.green = ctypes.cast(self.green_array, pointer_type)
self.blue = ctypes.cast(self.blue_array, pointer_type)
|
fogleman/pg | pg/util.py | hex_color | python | def hex_color(value):
'''Accepts a hexadecimal color `value` in the format ``0xrrggbb`` and
returns an (r, g, b) tuple where 0.0 <= r, g, b <= 1.0.
'''
r = ((value >> (8 * 2)) & 255) / 255.0
g = ((value >> (8 * 1)) & 255) / 255.0
b = ((value >> (8 * 0)) & 255) / 255.0
return (r, g, b) | Accepts a hexadecimal color `value` in the format ``0xrrggbb`` and
returns an (r, g, b) tuple where 0.0 <= r, g, b <= 1.0. | train | https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L5-L12 | null | from collections import defaultdict
from ctypes import create_string_buffer
import struct
def normalize(vector):
'''Normalizes the `vector` so that its length is 1. `vector` can have
any number of components.
'''
d = sum(x * x for x in vector) ** 0.5
return tuple(x / d for x in vector)
def distance(p1, p2):
'''Computes and returns the distance between two points, `p1` and `p2`.
The points can have any number of components.
'''
return sum((a - b) ** 2 for a, b in zip(p1, p2)) ** 0.5
def cross(v1, v2):
'''Computes the cross product of two vectors.
'''
return (
v1[1] * v2[2] - v1[2] * v2[1],
v1[2] * v2[0] - v1[0] * v2[2],
v1[0] * v2[1] - v1[1] * v2[0],
)
def dot(v1, v2):
'''Computes the dot product of two vectors.
'''
x1, y1, z1 = v1
x2, y2, z2 = v2
return x1 * x2 + y1 * y2 + z1 * z2
def add(v1, v2):
'''Adds two vectors.
'''
return tuple(a + b for a, b in zip(v1, v2))
def sub(v1, v2):
'''Subtracts two vectors.
'''
return tuple(a - b for a, b in zip(v1, v2))
def mul(v, s):
'''Multiplies a vector and a scalar.
'''
return tuple(a * s for a in v)
def neg(vector):
'''Negates a vector.
'''
return tuple(-x for x in vector)
def interpolate(v1, v2, t):
'''Interpolate from one vector to another.
'''
return add(v1, mul(sub(v2, v1), t))
def normal_from_points(a, b, c):
'''Computes a normal vector given three points.
'''
x1, y1, z1 = a
x2, y2, z2 = b
x3, y3, z3 = c
ab = (x2 - x1, y2 - y1, z2 - z1)
ac = (x3 - x1, y3 - y1, z3 - z1)
x, y, z = cross(ab, ac)
d = (x * x + y * y + z * z) ** 0.5
return (x / d, y / d, z / d)
def smooth_normals(positions, normals):
'''Assigns an averaged normal to each position based on all of the normals
originally used for the position.
'''
lookup = defaultdict(list)
for position, normal in zip(positions, normals):
lookup[position].append(normal)
result = []
for position in positions:
tx = ty = tz = 0
for x, y, z in lookup[position]:
tx += x
ty += y
tz += z
d = (tx * tx + ty * ty + tz * tz) ** 0.5
result.append((tx / d, ty / d, tz / d))
return result
def bounding_box(positions):
'''Computes the bounding box for a list of 3-dimensional points.
'''
(x0, y0, z0) = (x1, y1, z1) = positions[0]
for x, y, z in positions:
x0 = min(x0, x)
y0 = min(y0, y)
z0 = min(z0, z)
x1 = max(x1, x)
y1 = max(y1, y)
z1 = max(z1, z)
return (x0, y0, z0), (x1, y1, z1)
def recenter(positions):
'''Returns a list of new positions centered around the origin.
'''
(x0, y0, z0), (x1, y1, z1) = bounding_box(positions)
dx = x1 - (x1 - x0) / 2.0
dy = y1 - (y1 - y0) / 2.0
dz = z1 - (z1 - z0) / 2.0
result = []
for x, y, z in positions:
result.append((x - dx, y - dy, z - dz))
return result
def interleave(*args):
'''Interleaves the elements of the provided arrays.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> b = [(0, 0), (0, 1), (0, 2), (0, 3)]
>>> interleave(a, b)
[(0, 0, 0, 0), (1, 0, 0, 1), (2, 0, 0, 2), (3, 0, 0, 3)]
This is useful for combining multiple vertex attributes into a single
vertex buffer. The shader attributes can be assigned a slice of the
vertex buffer.
'''
result = []
for array in zip(*args):
result.append(tuple(flatten(array)))
return result
def flatten(array):
'''Flattens the elements of the provided array, `data`.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> flatten(a)
[0, 0, 1, 0, 2, 0, 3, 0]
The flattening process is not recursive, it is only one level deep.
'''
result = []
for value in array:
result.extend(value)
return result
def distinct(iterable, keyfunc=None):
'''Yields distinct items from `iterable` in the order that they appear.
'''
seen = set()
for item in iterable:
key = item if keyfunc is None else keyfunc(item)
if key not in seen:
seen.add(key)
yield item
def ray_triangle_intersection(v1, v2, v3, o, d):
'''Computes the distance from a point to a triangle given a ray.
'''
eps = 1e-6
e1 = sub(v2, v1)
e2 = sub(v3, v1)
p = cross(d, e2)
det = dot(e1, p)
if abs(det) < eps:
return None
inv = 1.0 / det
t = sub(o, v1)
u = dot(t, p) * inv
if u < 0 or u > 1:
return None
q = cross(t, e1)
v = dot(d, q) * inv
if v < 0 or v > 1:
return None
t = dot(e2, q) * inv
if t > eps:
return t
return None
def pack_list(fmt, data):
'''Convert a Python list into a ctypes buffer.
This appears to be faster than the typical method of creating a ctypes
array, e.g. (c_float * len(data))(*data)
'''
func = struct.Struct(fmt).pack
return create_string_buffer(''.join([func(x) for x in data]))
|
fogleman/pg | pg/util.py | normalize | python | def normalize(vector):
'''Normalizes the `vector` so that its length is 1. `vector` can have
any number of components.
'''
d = sum(x * x for x in vector) ** 0.5
return tuple(x / d for x in vector) | Normalizes the `vector` so that its length is 1. `vector` can have
any number of components. | train | https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L14-L19 | null | from collections import defaultdict
from ctypes import create_string_buffer
import struct
def hex_color(value):
'''Accepts a hexadecimal color `value` in the format ``0xrrggbb`` and
returns an (r, g, b) tuple where 0.0 <= r, g, b <= 1.0.
'''
r = ((value >> (8 * 2)) & 255) / 255.0
g = ((value >> (8 * 1)) & 255) / 255.0
b = ((value >> (8 * 0)) & 255) / 255.0
return (r, g, b)
def distance(p1, p2):
'''Computes and returns the distance between two points, `p1` and `p2`.
The points can have any number of components.
'''
return sum((a - b) ** 2 for a, b in zip(p1, p2)) ** 0.5
def cross(v1, v2):
'''Computes the cross product of two vectors.
'''
return (
v1[1] * v2[2] - v1[2] * v2[1],
v1[2] * v2[0] - v1[0] * v2[2],
v1[0] * v2[1] - v1[1] * v2[0],
)
def dot(v1, v2):
'''Computes the dot product of two vectors.
'''
x1, y1, z1 = v1
x2, y2, z2 = v2
return x1 * x2 + y1 * y2 + z1 * z2
def add(v1, v2):
'''Adds two vectors.
'''
return tuple(a + b for a, b in zip(v1, v2))
def sub(v1, v2):
'''Subtracts two vectors.
'''
return tuple(a - b for a, b in zip(v1, v2))
def mul(v, s):
'''Multiplies a vector and a scalar.
'''
return tuple(a * s for a in v)
def neg(vector):
'''Negates a vector.
'''
return tuple(-x for x in vector)
def interpolate(v1, v2, t):
'''Interpolate from one vector to another.
'''
return add(v1, mul(sub(v2, v1), t))
def normal_from_points(a, b, c):
'''Computes a normal vector given three points.
'''
x1, y1, z1 = a
x2, y2, z2 = b
x3, y3, z3 = c
ab = (x2 - x1, y2 - y1, z2 - z1)
ac = (x3 - x1, y3 - y1, z3 - z1)
x, y, z = cross(ab, ac)
d = (x * x + y * y + z * z) ** 0.5
return (x / d, y / d, z / d)
def smooth_normals(positions, normals):
'''Assigns an averaged normal to each position based on all of the normals
originally used for the position.
'''
lookup = defaultdict(list)
for position, normal in zip(positions, normals):
lookup[position].append(normal)
result = []
for position in positions:
tx = ty = tz = 0
for x, y, z in lookup[position]:
tx += x
ty += y
tz += z
d = (tx * tx + ty * ty + tz * tz) ** 0.5
result.append((tx / d, ty / d, tz / d))
return result
def bounding_box(positions):
'''Computes the bounding box for a list of 3-dimensional points.
'''
(x0, y0, z0) = (x1, y1, z1) = positions[0]
for x, y, z in positions:
x0 = min(x0, x)
y0 = min(y0, y)
z0 = min(z0, z)
x1 = max(x1, x)
y1 = max(y1, y)
z1 = max(z1, z)
return (x0, y0, z0), (x1, y1, z1)
def recenter(positions):
'''Returns a list of new positions centered around the origin.
'''
(x0, y0, z0), (x1, y1, z1) = bounding_box(positions)
dx = x1 - (x1 - x0) / 2.0
dy = y1 - (y1 - y0) / 2.0
dz = z1 - (z1 - z0) / 2.0
result = []
for x, y, z in positions:
result.append((x - dx, y - dy, z - dz))
return result
def interleave(*args):
'''Interleaves the elements of the provided arrays.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> b = [(0, 0), (0, 1), (0, 2), (0, 3)]
>>> interleave(a, b)
[(0, 0, 0, 0), (1, 0, 0, 1), (2, 0, 0, 2), (3, 0, 0, 3)]
This is useful for combining multiple vertex attributes into a single
vertex buffer. The shader attributes can be assigned a slice of the
vertex buffer.
'''
result = []
for array in zip(*args):
result.append(tuple(flatten(array)))
return result
def flatten(array):
'''Flattens the elements of the provided array, `data`.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> flatten(a)
[0, 0, 1, 0, 2, 0, 3, 0]
The flattening process is not recursive, it is only one level deep.
'''
result = []
for value in array:
result.extend(value)
return result
def distinct(iterable, keyfunc=None):
'''Yields distinct items from `iterable` in the order that they appear.
'''
seen = set()
for item in iterable:
key = item if keyfunc is None else keyfunc(item)
if key not in seen:
seen.add(key)
yield item
def ray_triangle_intersection(v1, v2, v3, o, d):
'''Computes the distance from a point to a triangle given a ray.
'''
eps = 1e-6
e1 = sub(v2, v1)
e2 = sub(v3, v1)
p = cross(d, e2)
det = dot(e1, p)
if abs(det) < eps:
return None
inv = 1.0 / det
t = sub(o, v1)
u = dot(t, p) * inv
if u < 0 or u > 1:
return None
q = cross(t, e1)
v = dot(d, q) * inv
if v < 0 or v > 1:
return None
t = dot(e2, q) * inv
if t > eps:
return t
return None
def pack_list(fmt, data):
'''Convert a Python list into a ctypes buffer.
This appears to be faster than the typical method of creating a ctypes
array, e.g. (c_float * len(data))(*data)
'''
func = struct.Struct(fmt).pack
return create_string_buffer(''.join([func(x) for x in data]))
|
fogleman/pg | pg/util.py | distance | python | def distance(p1, p2):
'''Computes and returns the distance between two points, `p1` and `p2`.
The points can have any number of components.
'''
return sum((a - b) ** 2 for a, b in zip(p1, p2)) ** 0.5 | Computes and returns the distance between two points, `p1` and `p2`.
The points can have any number of components. | train | https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L21-L25 | null | from collections import defaultdict
from ctypes import create_string_buffer
import struct
def hex_color(value):
'''Accepts a hexadecimal color `value` in the format ``0xrrggbb`` and
returns an (r, g, b) tuple where 0.0 <= r, g, b <= 1.0.
'''
r = ((value >> (8 * 2)) & 255) / 255.0
g = ((value >> (8 * 1)) & 255) / 255.0
b = ((value >> (8 * 0)) & 255) / 255.0
return (r, g, b)
def normalize(vector):
'''Normalizes the `vector` so that its length is 1. `vector` can have
any number of components.
'''
d = sum(x * x for x in vector) ** 0.5
return tuple(x / d for x in vector)
def cross(v1, v2):
'''Computes the cross product of two vectors.
'''
return (
v1[1] * v2[2] - v1[2] * v2[1],
v1[2] * v2[0] - v1[0] * v2[2],
v1[0] * v2[1] - v1[1] * v2[0],
)
def dot(v1, v2):
'''Computes the dot product of two vectors.
'''
x1, y1, z1 = v1
x2, y2, z2 = v2
return x1 * x2 + y1 * y2 + z1 * z2
def add(v1, v2):
'''Adds two vectors.
'''
return tuple(a + b for a, b in zip(v1, v2))
def sub(v1, v2):
'''Subtracts two vectors.
'''
return tuple(a - b for a, b in zip(v1, v2))
def mul(v, s):
'''Multiplies a vector and a scalar.
'''
return tuple(a * s for a in v)
def neg(vector):
'''Negates a vector.
'''
return tuple(-x for x in vector)
def interpolate(v1, v2, t):
'''Interpolate from one vector to another.
'''
return add(v1, mul(sub(v2, v1), t))
def normal_from_points(a, b, c):
'''Computes a normal vector given three points.
'''
x1, y1, z1 = a
x2, y2, z2 = b
x3, y3, z3 = c
ab = (x2 - x1, y2 - y1, z2 - z1)
ac = (x3 - x1, y3 - y1, z3 - z1)
x, y, z = cross(ab, ac)
d = (x * x + y * y + z * z) ** 0.5
return (x / d, y / d, z / d)
def smooth_normals(positions, normals):
'''Assigns an averaged normal to each position based on all of the normals
originally used for the position.
'''
lookup = defaultdict(list)
for position, normal in zip(positions, normals):
lookup[position].append(normal)
result = []
for position in positions:
tx = ty = tz = 0
for x, y, z in lookup[position]:
tx += x
ty += y
tz += z
d = (tx * tx + ty * ty + tz * tz) ** 0.5
result.append((tx / d, ty / d, tz / d))
return result
def bounding_box(positions):
'''Computes the bounding box for a list of 3-dimensional points.
'''
(x0, y0, z0) = (x1, y1, z1) = positions[0]
for x, y, z in positions:
x0 = min(x0, x)
y0 = min(y0, y)
z0 = min(z0, z)
x1 = max(x1, x)
y1 = max(y1, y)
z1 = max(z1, z)
return (x0, y0, z0), (x1, y1, z1)
def recenter(positions):
'''Returns a list of new positions centered around the origin.
'''
(x0, y0, z0), (x1, y1, z1) = bounding_box(positions)
dx = x1 - (x1 - x0) / 2.0
dy = y1 - (y1 - y0) / 2.0
dz = z1 - (z1 - z0) / 2.0
result = []
for x, y, z in positions:
result.append((x - dx, y - dy, z - dz))
return result
def interleave(*args):
'''Interleaves the elements of the provided arrays.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> b = [(0, 0), (0, 1), (0, 2), (0, 3)]
>>> interleave(a, b)
[(0, 0, 0, 0), (1, 0, 0, 1), (2, 0, 0, 2), (3, 0, 0, 3)]
This is useful for combining multiple vertex attributes into a single
vertex buffer. The shader attributes can be assigned a slice of the
vertex buffer.
'''
result = []
for array in zip(*args):
result.append(tuple(flatten(array)))
return result
def flatten(array):
'''Flattens the elements of the provided array, `data`.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> flatten(a)
[0, 0, 1, 0, 2, 0, 3, 0]
The flattening process is not recursive, it is only one level deep.
'''
result = []
for value in array:
result.extend(value)
return result
def distinct(iterable, keyfunc=None):
'''Yields distinct items from `iterable` in the order that they appear.
'''
seen = set()
for item in iterable:
key = item if keyfunc is None else keyfunc(item)
if key not in seen:
seen.add(key)
yield item
def ray_triangle_intersection(v1, v2, v3, o, d):
'''Computes the distance from a point to a triangle given a ray.
'''
eps = 1e-6
e1 = sub(v2, v1)
e2 = sub(v3, v1)
p = cross(d, e2)
det = dot(e1, p)
if abs(det) < eps:
return None
inv = 1.0 / det
t = sub(o, v1)
u = dot(t, p) * inv
if u < 0 or u > 1:
return None
q = cross(t, e1)
v = dot(d, q) * inv
if v < 0 or v > 1:
return None
t = dot(e2, q) * inv
if t > eps:
return t
return None
def pack_list(fmt, data):
'''Convert a Python list into a ctypes buffer.
This appears to be faster than the typical method of creating a ctypes
array, e.g. (c_float * len(data))(*data)
'''
func = struct.Struct(fmt).pack
return create_string_buffer(''.join([func(x) for x in data]))
|
fogleman/pg | pg/util.py | cross | python | def cross(v1, v2):
'''Computes the cross product of two vectors.
'''
return (
v1[1] * v2[2] - v1[2] * v2[1],
v1[2] * v2[0] - v1[0] * v2[2],
v1[0] * v2[1] - v1[1] * v2[0],
) | Computes the cross product of two vectors. | train | https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L27-L34 | null | from collections import defaultdict
from ctypes import create_string_buffer
import struct
def hex_color(value):
'''Accepts a hexadecimal color `value` in the format ``0xrrggbb`` and
returns an (r, g, b) tuple where 0.0 <= r, g, b <= 1.0.
'''
r = ((value >> (8 * 2)) & 255) / 255.0
g = ((value >> (8 * 1)) & 255) / 255.0
b = ((value >> (8 * 0)) & 255) / 255.0
return (r, g, b)
def normalize(vector):
'''Normalizes the `vector` so that its length is 1. `vector` can have
any number of components.
'''
d = sum(x * x for x in vector) ** 0.5
return tuple(x / d for x in vector)
def distance(p1, p2):
'''Computes and returns the distance between two points, `p1` and `p2`.
The points can have any number of components.
'''
return sum((a - b) ** 2 for a, b in zip(p1, p2)) ** 0.5
def dot(v1, v2):
'''Computes the dot product of two vectors.
'''
x1, y1, z1 = v1
x2, y2, z2 = v2
return x1 * x2 + y1 * y2 + z1 * z2
def add(v1, v2):
'''Adds two vectors.
'''
return tuple(a + b for a, b in zip(v1, v2))
def sub(v1, v2):
'''Subtracts two vectors.
'''
return tuple(a - b for a, b in zip(v1, v2))
def mul(v, s):
'''Multiplies a vector and a scalar.
'''
return tuple(a * s for a in v)
def neg(vector):
'''Negates a vector.
'''
return tuple(-x for x in vector)
def interpolate(v1, v2, t):
'''Interpolate from one vector to another.
'''
return add(v1, mul(sub(v2, v1), t))
def normal_from_points(a, b, c):
'''Computes a normal vector given three points.
'''
x1, y1, z1 = a
x2, y2, z2 = b
x3, y3, z3 = c
ab = (x2 - x1, y2 - y1, z2 - z1)
ac = (x3 - x1, y3 - y1, z3 - z1)
x, y, z = cross(ab, ac)
d = (x * x + y * y + z * z) ** 0.5
return (x / d, y / d, z / d)
def smooth_normals(positions, normals):
'''Assigns an averaged normal to each position based on all of the normals
originally used for the position.
'''
lookup = defaultdict(list)
for position, normal in zip(positions, normals):
lookup[position].append(normal)
result = []
for position in positions:
tx = ty = tz = 0
for x, y, z in lookup[position]:
tx += x
ty += y
tz += z
d = (tx * tx + ty * ty + tz * tz) ** 0.5
result.append((tx / d, ty / d, tz / d))
return result
def bounding_box(positions):
'''Computes the bounding box for a list of 3-dimensional points.
'''
(x0, y0, z0) = (x1, y1, z1) = positions[0]
for x, y, z in positions:
x0 = min(x0, x)
y0 = min(y0, y)
z0 = min(z0, z)
x1 = max(x1, x)
y1 = max(y1, y)
z1 = max(z1, z)
return (x0, y0, z0), (x1, y1, z1)
def recenter(positions):
'''Returns a list of new positions centered around the origin.
'''
(x0, y0, z0), (x1, y1, z1) = bounding_box(positions)
dx = x1 - (x1 - x0) / 2.0
dy = y1 - (y1 - y0) / 2.0
dz = z1 - (z1 - z0) / 2.0
result = []
for x, y, z in positions:
result.append((x - dx, y - dy, z - dz))
return result
def interleave(*args):
'''Interleaves the elements of the provided arrays.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> b = [(0, 0), (0, 1), (0, 2), (0, 3)]
>>> interleave(a, b)
[(0, 0, 0, 0), (1, 0, 0, 1), (2, 0, 0, 2), (3, 0, 0, 3)]
This is useful for combining multiple vertex attributes into a single
vertex buffer. The shader attributes can be assigned a slice of the
vertex buffer.
'''
result = []
for array in zip(*args):
result.append(tuple(flatten(array)))
return result
def flatten(array):
'''Flattens the elements of the provided array, `data`.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> flatten(a)
[0, 0, 1, 0, 2, 0, 3, 0]
The flattening process is not recursive, it is only one level deep.
'''
result = []
for value in array:
result.extend(value)
return result
def distinct(iterable, keyfunc=None):
'''Yields distinct items from `iterable` in the order that they appear.
'''
seen = set()
for item in iterable:
key = item if keyfunc is None else keyfunc(item)
if key not in seen:
seen.add(key)
yield item
def ray_triangle_intersection(v1, v2, v3, o, d):
'''Computes the distance from a point to a triangle given a ray.
'''
eps = 1e-6
e1 = sub(v2, v1)
e2 = sub(v3, v1)
p = cross(d, e2)
det = dot(e1, p)
if abs(det) < eps:
return None
inv = 1.0 / det
t = sub(o, v1)
u = dot(t, p) * inv
if u < 0 or u > 1:
return None
q = cross(t, e1)
v = dot(d, q) * inv
if v < 0 or v > 1:
return None
t = dot(e2, q) * inv
if t > eps:
return t
return None
def pack_list(fmt, data):
'''Convert a Python list into a ctypes buffer.
This appears to be faster than the typical method of creating a ctypes
array, e.g. (c_float * len(data))(*data)
'''
func = struct.Struct(fmt).pack
return create_string_buffer(''.join([func(x) for x in data]))
|
fogleman/pg | pg/util.py | dot | python | def dot(v1, v2):
'''Computes the dot product of two vectors.
'''
x1, y1, z1 = v1
x2, y2, z2 = v2
return x1 * x2 + y1 * y2 + z1 * z2 | Computes the dot product of two vectors. | train | https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L36-L41 | null | from collections import defaultdict
from ctypes import create_string_buffer
import struct
def hex_color(value):
'''Accepts a hexadecimal color `value` in the format ``0xrrggbb`` and
returns an (r, g, b) tuple where 0.0 <= r, g, b <= 1.0.
'''
r = ((value >> (8 * 2)) & 255) / 255.0
g = ((value >> (8 * 1)) & 255) / 255.0
b = ((value >> (8 * 0)) & 255) / 255.0
return (r, g, b)
def normalize(vector):
'''Normalizes the `vector` so that its length is 1. `vector` can have
any number of components.
'''
d = sum(x * x for x in vector) ** 0.5
return tuple(x / d for x in vector)
def distance(p1, p2):
'''Computes and returns the distance between two points, `p1` and `p2`.
The points can have any number of components.
'''
return sum((a - b) ** 2 for a, b in zip(p1, p2)) ** 0.5
def cross(v1, v2):
'''Computes the cross product of two vectors.
'''
return (
v1[1] * v2[2] - v1[2] * v2[1],
v1[2] * v2[0] - v1[0] * v2[2],
v1[0] * v2[1] - v1[1] * v2[0],
)
def add(v1, v2):
'''Adds two vectors.
'''
return tuple(a + b for a, b in zip(v1, v2))
def sub(v1, v2):
'''Subtracts two vectors.
'''
return tuple(a - b for a, b in zip(v1, v2))
def mul(v, s):
'''Multiplies a vector and a scalar.
'''
return tuple(a * s for a in v)
def neg(vector):
'''Negates a vector.
'''
return tuple(-x for x in vector)
def interpolate(v1, v2, t):
'''Interpolate from one vector to another.
'''
return add(v1, mul(sub(v2, v1), t))
def normal_from_points(a, b, c):
'''Computes a normal vector given three points.
'''
x1, y1, z1 = a
x2, y2, z2 = b
x3, y3, z3 = c
ab = (x2 - x1, y2 - y1, z2 - z1)
ac = (x3 - x1, y3 - y1, z3 - z1)
x, y, z = cross(ab, ac)
d = (x * x + y * y + z * z) ** 0.5
return (x / d, y / d, z / d)
def smooth_normals(positions, normals):
'''Assigns an averaged normal to each position based on all of the normals
originally used for the position.
'''
lookup = defaultdict(list)
for position, normal in zip(positions, normals):
lookup[position].append(normal)
result = []
for position in positions:
tx = ty = tz = 0
for x, y, z in lookup[position]:
tx += x
ty += y
tz += z
d = (tx * tx + ty * ty + tz * tz) ** 0.5
result.append((tx / d, ty / d, tz / d))
return result
def bounding_box(positions):
'''Computes the bounding box for a list of 3-dimensional points.
'''
(x0, y0, z0) = (x1, y1, z1) = positions[0]
for x, y, z in positions:
x0 = min(x0, x)
y0 = min(y0, y)
z0 = min(z0, z)
x1 = max(x1, x)
y1 = max(y1, y)
z1 = max(z1, z)
return (x0, y0, z0), (x1, y1, z1)
def recenter(positions):
'''Returns a list of new positions centered around the origin.
'''
(x0, y0, z0), (x1, y1, z1) = bounding_box(positions)
dx = x1 - (x1 - x0) / 2.0
dy = y1 - (y1 - y0) / 2.0
dz = z1 - (z1 - z0) / 2.0
result = []
for x, y, z in positions:
result.append((x - dx, y - dy, z - dz))
return result
def interleave(*args):
'''Interleaves the elements of the provided arrays.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> b = [(0, 0), (0, 1), (0, 2), (0, 3)]
>>> interleave(a, b)
[(0, 0, 0, 0), (1, 0, 0, 1), (2, 0, 0, 2), (3, 0, 0, 3)]
This is useful for combining multiple vertex attributes into a single
vertex buffer. The shader attributes can be assigned a slice of the
vertex buffer.
'''
result = []
for array in zip(*args):
result.append(tuple(flatten(array)))
return result
def flatten(array):
'''Flattens the elements of the provided array, `data`.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> flatten(a)
[0, 0, 1, 0, 2, 0, 3, 0]
The flattening process is not recursive, it is only one level deep.
'''
result = []
for value in array:
result.extend(value)
return result
def distinct(iterable, keyfunc=None):
'''Yields distinct items from `iterable` in the order that they appear.
'''
seen = set()
for item in iterable:
key = item if keyfunc is None else keyfunc(item)
if key not in seen:
seen.add(key)
yield item
def ray_triangle_intersection(v1, v2, v3, o, d):
'''Computes the distance from a point to a triangle given a ray.
'''
eps = 1e-6
e1 = sub(v2, v1)
e2 = sub(v3, v1)
p = cross(d, e2)
det = dot(e1, p)
if abs(det) < eps:
return None
inv = 1.0 / det
t = sub(o, v1)
u = dot(t, p) * inv
if u < 0 or u > 1:
return None
q = cross(t, e1)
v = dot(d, q) * inv
if v < 0 or v > 1:
return None
t = dot(e2, q) * inv
if t > eps:
return t
return None
def pack_list(fmt, data):
'''Convert a Python list into a ctypes buffer.
This appears to be faster than the typical method of creating a ctypes
array, e.g. (c_float * len(data))(*data)
'''
func = struct.Struct(fmt).pack
return create_string_buffer(''.join([func(x) for x in data]))
|
fogleman/pg | pg/util.py | add | python | def add(v1, v2):
'''Adds two vectors.
'''
return tuple(a + b for a, b in zip(v1, v2)) | Adds two vectors. | train | https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L43-L46 | null | from collections import defaultdict
from ctypes import create_string_buffer
import struct
def hex_color(value):
'''Accepts a hexadecimal color `value` in the format ``0xrrggbb`` and
returns an (r, g, b) tuple where 0.0 <= r, g, b <= 1.0.
'''
r = ((value >> (8 * 2)) & 255) / 255.0
g = ((value >> (8 * 1)) & 255) / 255.0
b = ((value >> (8 * 0)) & 255) / 255.0
return (r, g, b)
def normalize(vector):
'''Normalizes the `vector` so that its length is 1. `vector` can have
any number of components.
'''
d = sum(x * x for x in vector) ** 0.5
return tuple(x / d for x in vector)
def distance(p1, p2):
'''Computes and returns the distance between two points, `p1` and `p2`.
The points can have any number of components.
'''
return sum((a - b) ** 2 for a, b in zip(p1, p2)) ** 0.5
def cross(v1, v2):
'''Computes the cross product of two vectors.
'''
return (
v1[1] * v2[2] - v1[2] * v2[1],
v1[2] * v2[0] - v1[0] * v2[2],
v1[0] * v2[1] - v1[1] * v2[0],
)
def dot(v1, v2):
'''Computes the dot product of two vectors.
'''
x1, y1, z1 = v1
x2, y2, z2 = v2
return x1 * x2 + y1 * y2 + z1 * z2
def sub(v1, v2):
'''Subtracts two vectors.
'''
return tuple(a - b for a, b in zip(v1, v2))
def mul(v, s):
'''Multiplies a vector and a scalar.
'''
return tuple(a * s for a in v)
def neg(vector):
'''Negates a vector.
'''
return tuple(-x for x in vector)
def interpolate(v1, v2, t):
'''Interpolate from one vector to another.
'''
return add(v1, mul(sub(v2, v1), t))
def normal_from_points(a, b, c):
'''Computes a normal vector given three points.
'''
x1, y1, z1 = a
x2, y2, z2 = b
x3, y3, z3 = c
ab = (x2 - x1, y2 - y1, z2 - z1)
ac = (x3 - x1, y3 - y1, z3 - z1)
x, y, z = cross(ab, ac)
d = (x * x + y * y + z * z) ** 0.5
return (x / d, y / d, z / d)
def smooth_normals(positions, normals):
'''Assigns an averaged normal to each position based on all of the normals
originally used for the position.
'''
lookup = defaultdict(list)
for position, normal in zip(positions, normals):
lookup[position].append(normal)
result = []
for position in positions:
tx = ty = tz = 0
for x, y, z in lookup[position]:
tx += x
ty += y
tz += z
d = (tx * tx + ty * ty + tz * tz) ** 0.5
result.append((tx / d, ty / d, tz / d))
return result
def bounding_box(positions):
'''Computes the bounding box for a list of 3-dimensional points.
'''
(x0, y0, z0) = (x1, y1, z1) = positions[0]
for x, y, z in positions:
x0 = min(x0, x)
y0 = min(y0, y)
z0 = min(z0, z)
x1 = max(x1, x)
y1 = max(y1, y)
z1 = max(z1, z)
return (x0, y0, z0), (x1, y1, z1)
def recenter(positions):
'''Returns a list of new positions centered around the origin.
'''
(x0, y0, z0), (x1, y1, z1) = bounding_box(positions)
dx = x1 - (x1 - x0) / 2.0
dy = y1 - (y1 - y0) / 2.0
dz = z1 - (z1 - z0) / 2.0
result = []
for x, y, z in positions:
result.append((x - dx, y - dy, z - dz))
return result
def interleave(*args):
'''Interleaves the elements of the provided arrays.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> b = [(0, 0), (0, 1), (0, 2), (0, 3)]
>>> interleave(a, b)
[(0, 0, 0, 0), (1, 0, 0, 1), (2, 0, 0, 2), (3, 0, 0, 3)]
This is useful for combining multiple vertex attributes into a single
vertex buffer. The shader attributes can be assigned a slice of the
vertex buffer.
'''
result = []
for array in zip(*args):
result.append(tuple(flatten(array)))
return result
def flatten(array):
'''Flattens the elements of the provided array, `data`.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> flatten(a)
[0, 0, 1, 0, 2, 0, 3, 0]
The flattening process is not recursive, it is only one level deep.
'''
result = []
for value in array:
result.extend(value)
return result
def distinct(iterable, keyfunc=None):
'''Yields distinct items from `iterable` in the order that they appear.
'''
seen = set()
for item in iterable:
key = item if keyfunc is None else keyfunc(item)
if key not in seen:
seen.add(key)
yield item
def ray_triangle_intersection(v1, v2, v3, o, d):
'''Computes the distance from a point to a triangle given a ray.
'''
eps = 1e-6
e1 = sub(v2, v1)
e2 = sub(v3, v1)
p = cross(d, e2)
det = dot(e1, p)
if abs(det) < eps:
return None
inv = 1.0 / det
t = sub(o, v1)
u = dot(t, p) * inv
if u < 0 or u > 1:
return None
q = cross(t, e1)
v = dot(d, q) * inv
if v < 0 or v > 1:
return None
t = dot(e2, q) * inv
if t > eps:
return t
return None
def pack_list(fmt, data):
'''Convert a Python list into a ctypes buffer.
This appears to be faster than the typical method of creating a ctypes
array, e.g. (c_float * len(data))(*data)
'''
func = struct.Struct(fmt).pack
return create_string_buffer(''.join([func(x) for x in data]))
|
fogleman/pg | pg/util.py | sub | python | def sub(v1, v2):
'''Subtracts two vectors.
'''
return tuple(a - b for a, b in zip(v1, v2)) | Subtracts two vectors. | train | https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L48-L51 | null | from collections import defaultdict
from ctypes import create_string_buffer
import struct
def hex_color(value):
'''Accepts a hexadecimal color `value` in the format ``0xrrggbb`` and
returns an (r, g, b) tuple where 0.0 <= r, g, b <= 1.0.
'''
r = ((value >> (8 * 2)) & 255) / 255.0
g = ((value >> (8 * 1)) & 255) / 255.0
b = ((value >> (8 * 0)) & 255) / 255.0
return (r, g, b)
def normalize(vector):
'''Normalizes the `vector` so that its length is 1. `vector` can have
any number of components.
'''
d = sum(x * x for x in vector) ** 0.5
return tuple(x / d for x in vector)
def distance(p1, p2):
'''Computes and returns the distance between two points, `p1` and `p2`.
The points can have any number of components.
'''
return sum((a - b) ** 2 for a, b in zip(p1, p2)) ** 0.5
def cross(v1, v2):
'''Computes the cross product of two vectors.
'''
return (
v1[1] * v2[2] - v1[2] * v2[1],
v1[2] * v2[0] - v1[0] * v2[2],
v1[0] * v2[1] - v1[1] * v2[0],
)
def dot(v1, v2):
'''Computes the dot product of two vectors.
'''
x1, y1, z1 = v1
x2, y2, z2 = v2
return x1 * x2 + y1 * y2 + z1 * z2
def add(v1, v2):
'''Adds two vectors.
'''
return tuple(a + b for a, b in zip(v1, v2))
def mul(v, s):
'''Multiplies a vector and a scalar.
'''
return tuple(a * s for a in v)
def neg(vector):
'''Negates a vector.
'''
return tuple(-x for x in vector)
def interpolate(v1, v2, t):
'''Interpolate from one vector to another.
'''
return add(v1, mul(sub(v2, v1), t))
def normal_from_points(a, b, c):
'''Computes a normal vector given three points.
'''
x1, y1, z1 = a
x2, y2, z2 = b
x3, y3, z3 = c
ab = (x2 - x1, y2 - y1, z2 - z1)
ac = (x3 - x1, y3 - y1, z3 - z1)
x, y, z = cross(ab, ac)
d = (x * x + y * y + z * z) ** 0.5
return (x / d, y / d, z / d)
def smooth_normals(positions, normals):
'''Assigns an averaged normal to each position based on all of the normals
originally used for the position.
'''
lookup = defaultdict(list)
for position, normal in zip(positions, normals):
lookup[position].append(normal)
result = []
for position in positions:
tx = ty = tz = 0
for x, y, z in lookup[position]:
tx += x
ty += y
tz += z
d = (tx * tx + ty * ty + tz * tz) ** 0.5
result.append((tx / d, ty / d, tz / d))
return result
def bounding_box(positions):
'''Computes the bounding box for a list of 3-dimensional points.
'''
(x0, y0, z0) = (x1, y1, z1) = positions[0]
for x, y, z in positions:
x0 = min(x0, x)
y0 = min(y0, y)
z0 = min(z0, z)
x1 = max(x1, x)
y1 = max(y1, y)
z1 = max(z1, z)
return (x0, y0, z0), (x1, y1, z1)
def recenter(positions):
'''Returns a list of new positions centered around the origin.
'''
(x0, y0, z0), (x1, y1, z1) = bounding_box(positions)
dx = x1 - (x1 - x0) / 2.0
dy = y1 - (y1 - y0) / 2.0
dz = z1 - (z1 - z0) / 2.0
result = []
for x, y, z in positions:
result.append((x - dx, y - dy, z - dz))
return result
def interleave(*args):
'''Interleaves the elements of the provided arrays.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> b = [(0, 0), (0, 1), (0, 2), (0, 3)]
>>> interleave(a, b)
[(0, 0, 0, 0), (1, 0, 0, 1), (2, 0, 0, 2), (3, 0, 0, 3)]
This is useful for combining multiple vertex attributes into a single
vertex buffer. The shader attributes can be assigned a slice of the
vertex buffer.
'''
result = []
for array in zip(*args):
result.append(tuple(flatten(array)))
return result
def flatten(array):
'''Flattens the elements of the provided array, `data`.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> flatten(a)
[0, 0, 1, 0, 2, 0, 3, 0]
The flattening process is not recursive, it is only one level deep.
'''
result = []
for value in array:
result.extend(value)
return result
def distinct(iterable, keyfunc=None):
'''Yields distinct items from `iterable` in the order that they appear.
'''
seen = set()
for item in iterable:
key = item if keyfunc is None else keyfunc(item)
if key not in seen:
seen.add(key)
yield item
def ray_triangle_intersection(v1, v2, v3, o, d):
'''Computes the distance from a point to a triangle given a ray.
'''
eps = 1e-6
e1 = sub(v2, v1)
e2 = sub(v3, v1)
p = cross(d, e2)
det = dot(e1, p)
if abs(det) < eps:
return None
inv = 1.0 / det
t = sub(o, v1)
u = dot(t, p) * inv
if u < 0 or u > 1:
return None
q = cross(t, e1)
v = dot(d, q) * inv
if v < 0 or v > 1:
return None
t = dot(e2, q) * inv
if t > eps:
return t
return None
def pack_list(fmt, data):
'''Convert a Python list into a ctypes buffer.
This appears to be faster than the typical method of creating a ctypes
array, e.g. (c_float * len(data))(*data)
'''
func = struct.Struct(fmt).pack
return create_string_buffer(''.join([func(x) for x in data]))
|
fogleman/pg | pg/util.py | interpolate | python | def interpolate(v1, v2, t):
'''Interpolate from one vector to another.
'''
return add(v1, mul(sub(v2, v1), t)) | Interpolate from one vector to another. | train | https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L63-L66 | [
"def add(v1, v2):\n '''Adds two vectors.\n '''\n return tuple(a + b for a, b in zip(v1, v2))\n",
"def mul(v, s):\n '''Multiplies a vector and a scalar.\n '''\n return tuple(a * s for a in v)\n",
"def sub(v1, v2):\n '''Subtracts two vectors.\n '''\n return tuple(a - b for a, b in zip(v... | from collections import defaultdict
from ctypes import create_string_buffer
import struct
def hex_color(value):
'''Accepts a hexadecimal color `value` in the format ``0xrrggbb`` and
returns an (r, g, b) tuple where 0.0 <= r, g, b <= 1.0.
'''
r = ((value >> (8 * 2)) & 255) / 255.0
g = ((value >> (8 * 1)) & 255) / 255.0
b = ((value >> (8 * 0)) & 255) / 255.0
return (r, g, b)
def normalize(vector):
'''Normalizes the `vector` so that its length is 1. `vector` can have
any number of components.
'''
d = sum(x * x for x in vector) ** 0.5
return tuple(x / d for x in vector)
def distance(p1, p2):
'''Computes and returns the distance between two points, `p1` and `p2`.
The points can have any number of components.
'''
return sum((a - b) ** 2 for a, b in zip(p1, p2)) ** 0.5
def cross(v1, v2):
'''Computes the cross product of two vectors.
'''
return (
v1[1] * v2[2] - v1[2] * v2[1],
v1[2] * v2[0] - v1[0] * v2[2],
v1[0] * v2[1] - v1[1] * v2[0],
)
def dot(v1, v2):
'''Computes the dot product of two vectors.
'''
x1, y1, z1 = v1
x2, y2, z2 = v2
return x1 * x2 + y1 * y2 + z1 * z2
def add(v1, v2):
'''Adds two vectors.
'''
return tuple(a + b for a, b in zip(v1, v2))
def sub(v1, v2):
'''Subtracts two vectors.
'''
return tuple(a - b for a, b in zip(v1, v2))
def mul(v, s):
'''Multiplies a vector and a scalar.
'''
return tuple(a * s for a in v)
def neg(vector):
'''Negates a vector.
'''
return tuple(-x for x in vector)
def normal_from_points(a, b, c):
'''Computes a normal vector given three points.
'''
x1, y1, z1 = a
x2, y2, z2 = b
x3, y3, z3 = c
ab = (x2 - x1, y2 - y1, z2 - z1)
ac = (x3 - x1, y3 - y1, z3 - z1)
x, y, z = cross(ab, ac)
d = (x * x + y * y + z * z) ** 0.5
return (x / d, y / d, z / d)
def smooth_normals(positions, normals):
'''Assigns an averaged normal to each position based on all of the normals
originally used for the position.
'''
lookup = defaultdict(list)
for position, normal in zip(positions, normals):
lookup[position].append(normal)
result = []
for position in positions:
tx = ty = tz = 0
for x, y, z in lookup[position]:
tx += x
ty += y
tz += z
d = (tx * tx + ty * ty + tz * tz) ** 0.5
result.append((tx / d, ty / d, tz / d))
return result
def bounding_box(positions):
'''Computes the bounding box for a list of 3-dimensional points.
'''
(x0, y0, z0) = (x1, y1, z1) = positions[0]
for x, y, z in positions:
x0 = min(x0, x)
y0 = min(y0, y)
z0 = min(z0, z)
x1 = max(x1, x)
y1 = max(y1, y)
z1 = max(z1, z)
return (x0, y0, z0), (x1, y1, z1)
def recenter(positions):
'''Returns a list of new positions centered around the origin.
'''
(x0, y0, z0), (x1, y1, z1) = bounding_box(positions)
dx = x1 - (x1 - x0) / 2.0
dy = y1 - (y1 - y0) / 2.0
dz = z1 - (z1 - z0) / 2.0
result = []
for x, y, z in positions:
result.append((x - dx, y - dy, z - dz))
return result
def interleave(*args):
'''Interleaves the elements of the provided arrays.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> b = [(0, 0), (0, 1), (0, 2), (0, 3)]
>>> interleave(a, b)
[(0, 0, 0, 0), (1, 0, 0, 1), (2, 0, 0, 2), (3, 0, 0, 3)]
This is useful for combining multiple vertex attributes into a single
vertex buffer. The shader attributes can be assigned a slice of the
vertex buffer.
'''
result = []
for array in zip(*args):
result.append(tuple(flatten(array)))
return result
def flatten(array):
'''Flattens the elements of the provided array, `data`.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> flatten(a)
[0, 0, 1, 0, 2, 0, 3, 0]
The flattening process is not recursive, it is only one level deep.
'''
result = []
for value in array:
result.extend(value)
return result
def distinct(iterable, keyfunc=None):
'''Yields distinct items from `iterable` in the order that they appear.
'''
seen = set()
for item in iterable:
key = item if keyfunc is None else keyfunc(item)
if key not in seen:
seen.add(key)
yield item
def ray_triangle_intersection(v1, v2, v3, o, d):
'''Computes the distance from a point to a triangle given a ray.
'''
eps = 1e-6
e1 = sub(v2, v1)
e2 = sub(v3, v1)
p = cross(d, e2)
det = dot(e1, p)
if abs(det) < eps:
return None
inv = 1.0 / det
t = sub(o, v1)
u = dot(t, p) * inv
if u < 0 or u > 1:
return None
q = cross(t, e1)
v = dot(d, q) * inv
if v < 0 or v > 1:
return None
t = dot(e2, q) * inv
if t > eps:
return t
return None
def pack_list(fmt, data):
'''Convert a Python list into a ctypes buffer.
This appears to be faster than the typical method of creating a ctypes
array, e.g. (c_float * len(data))(*data)
'''
func = struct.Struct(fmt).pack
return create_string_buffer(''.join([func(x) for x in data]))
|
fogleman/pg | pg/util.py | normal_from_points | python | def normal_from_points(a, b, c):
'''Computes a normal vector given three points.
'''
x1, y1, z1 = a
x2, y2, z2 = b
x3, y3, z3 = c
ab = (x2 - x1, y2 - y1, z2 - z1)
ac = (x3 - x1, y3 - y1, z3 - z1)
x, y, z = cross(ab, ac)
d = (x * x + y * y + z * z) ** 0.5
return (x / d, y / d, z / d) | Computes a normal vector given three points. | train | https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L68-L78 | [
"def cross(v1, v2):\n '''Computes the cross product of two vectors.\n '''\n return (\n v1[1] * v2[2] - v1[2] * v2[1],\n v1[2] * v2[0] - v1[0] * v2[2],\n v1[0] * v2[1] - v1[1] * v2[0],\n )\n"
] | from collections import defaultdict
from ctypes import create_string_buffer
import struct
def hex_color(value):
'''Accepts a hexadecimal color `value` in the format ``0xrrggbb`` and
returns an (r, g, b) tuple where 0.0 <= r, g, b <= 1.0.
'''
r = ((value >> (8 * 2)) & 255) / 255.0
g = ((value >> (8 * 1)) & 255) / 255.0
b = ((value >> (8 * 0)) & 255) / 255.0
return (r, g, b)
def normalize(vector):
'''Normalizes the `vector` so that its length is 1. `vector` can have
any number of components.
'''
d = sum(x * x for x in vector) ** 0.5
return tuple(x / d for x in vector)
def distance(p1, p2):
'''Computes and returns the distance between two points, `p1` and `p2`.
The points can have any number of components.
'''
return sum((a - b) ** 2 for a, b in zip(p1, p2)) ** 0.5
def cross(v1, v2):
'''Computes the cross product of two vectors.
'''
return (
v1[1] * v2[2] - v1[2] * v2[1],
v1[2] * v2[0] - v1[0] * v2[2],
v1[0] * v2[1] - v1[1] * v2[0],
)
def dot(v1, v2):
'''Computes the dot product of two vectors.
'''
x1, y1, z1 = v1
x2, y2, z2 = v2
return x1 * x2 + y1 * y2 + z1 * z2
def add(v1, v2):
'''Adds two vectors.
'''
return tuple(a + b for a, b in zip(v1, v2))
def sub(v1, v2):
'''Subtracts two vectors.
'''
return tuple(a - b for a, b in zip(v1, v2))
def mul(v, s):
'''Multiplies a vector and a scalar.
'''
return tuple(a * s for a in v)
def neg(vector):
'''Negates a vector.
'''
return tuple(-x for x in vector)
def interpolate(v1, v2, t):
'''Interpolate from one vector to another.
'''
return add(v1, mul(sub(v2, v1), t))
def smooth_normals(positions, normals):
'''Assigns an averaged normal to each position based on all of the normals
originally used for the position.
'''
lookup = defaultdict(list)
for position, normal in zip(positions, normals):
lookup[position].append(normal)
result = []
for position in positions:
tx = ty = tz = 0
for x, y, z in lookup[position]:
tx += x
ty += y
tz += z
d = (tx * tx + ty * ty + tz * tz) ** 0.5
result.append((tx / d, ty / d, tz / d))
return result
def bounding_box(positions):
'''Computes the bounding box for a list of 3-dimensional points.
'''
(x0, y0, z0) = (x1, y1, z1) = positions[0]
for x, y, z in positions:
x0 = min(x0, x)
y0 = min(y0, y)
z0 = min(z0, z)
x1 = max(x1, x)
y1 = max(y1, y)
z1 = max(z1, z)
return (x0, y0, z0), (x1, y1, z1)
def recenter(positions):
'''Returns a list of new positions centered around the origin.
'''
(x0, y0, z0), (x1, y1, z1) = bounding_box(positions)
dx = x1 - (x1 - x0) / 2.0
dy = y1 - (y1 - y0) / 2.0
dz = z1 - (z1 - z0) / 2.0
result = []
for x, y, z in positions:
result.append((x - dx, y - dy, z - dz))
return result
def interleave(*args):
'''Interleaves the elements of the provided arrays.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> b = [(0, 0), (0, 1), (0, 2), (0, 3)]
>>> interleave(a, b)
[(0, 0, 0, 0), (1, 0, 0, 1), (2, 0, 0, 2), (3, 0, 0, 3)]
This is useful for combining multiple vertex attributes into a single
vertex buffer. The shader attributes can be assigned a slice of the
vertex buffer.
'''
result = []
for array in zip(*args):
result.append(tuple(flatten(array)))
return result
def flatten(array):
'''Flattens the elements of the provided array, `data`.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> flatten(a)
[0, 0, 1, 0, 2, 0, 3, 0]
The flattening process is not recursive, it is only one level deep.
'''
result = []
for value in array:
result.extend(value)
return result
def distinct(iterable, keyfunc=None):
'''Yields distinct items from `iterable` in the order that they appear.
'''
seen = set()
for item in iterable:
key = item if keyfunc is None else keyfunc(item)
if key not in seen:
seen.add(key)
yield item
def ray_triangle_intersection(v1, v2, v3, o, d):
'''Computes the distance from a point to a triangle given a ray.
'''
eps = 1e-6
e1 = sub(v2, v1)
e2 = sub(v3, v1)
p = cross(d, e2)
det = dot(e1, p)
if abs(det) < eps:
return None
inv = 1.0 / det
t = sub(o, v1)
u = dot(t, p) * inv
if u < 0 or u > 1:
return None
q = cross(t, e1)
v = dot(d, q) * inv
if v < 0 or v > 1:
return None
t = dot(e2, q) * inv
if t > eps:
return t
return None
def pack_list(fmt, data):
'''Convert a Python list into a ctypes buffer.
This appears to be faster than the typical method of creating a ctypes
array, e.g. (c_float * len(data))(*data)
'''
func = struct.Struct(fmt).pack
return create_string_buffer(''.join([func(x) for x in data]))
|
fogleman/pg | pg/util.py | smooth_normals | python | def smooth_normals(positions, normals):
'''Assigns an averaged normal to each position based on all of the normals
originally used for the position.
'''
lookup = defaultdict(list)
for position, normal in zip(positions, normals):
lookup[position].append(normal)
result = []
for position in positions:
tx = ty = tz = 0
for x, y, z in lookup[position]:
tx += x
ty += y
tz += z
d = (tx * tx + ty * ty + tz * tz) ** 0.5
result.append((tx / d, ty / d, tz / d))
return result | Assigns an averaged normal to each position based on all of the normals
originally used for the position. | train | https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L80-L96 | null | from collections import defaultdict
from ctypes import create_string_buffer
import struct
def hex_color(value):
'''Accepts a hexadecimal color `value` in the format ``0xrrggbb`` and
returns an (r, g, b) tuple where 0.0 <= r, g, b <= 1.0.
'''
r = ((value >> (8 * 2)) & 255) / 255.0
g = ((value >> (8 * 1)) & 255) / 255.0
b = ((value >> (8 * 0)) & 255) / 255.0
return (r, g, b)
def normalize(vector):
'''Normalizes the `vector` so that its length is 1. `vector` can have
any number of components.
'''
d = sum(x * x for x in vector) ** 0.5
return tuple(x / d for x in vector)
def distance(p1, p2):
'''Computes and returns the distance between two points, `p1` and `p2`.
The points can have any number of components.
'''
return sum((a - b) ** 2 for a, b in zip(p1, p2)) ** 0.5
def cross(v1, v2):
'''Computes the cross product of two vectors.
'''
return (
v1[1] * v2[2] - v1[2] * v2[1],
v1[2] * v2[0] - v1[0] * v2[2],
v1[0] * v2[1] - v1[1] * v2[0],
)
def dot(v1, v2):
'''Computes the dot product of two vectors.
'''
x1, y1, z1 = v1
x2, y2, z2 = v2
return x1 * x2 + y1 * y2 + z1 * z2
def add(v1, v2):
'''Adds two vectors.
'''
return tuple(a + b for a, b in zip(v1, v2))
def sub(v1, v2):
'''Subtracts two vectors.
'''
return tuple(a - b for a, b in zip(v1, v2))
def mul(v, s):
'''Multiplies a vector and a scalar.
'''
return tuple(a * s for a in v)
def neg(vector):
'''Negates a vector.
'''
return tuple(-x for x in vector)
def interpolate(v1, v2, t):
'''Interpolate from one vector to another.
'''
return add(v1, mul(sub(v2, v1), t))
def normal_from_points(a, b, c):
'''Computes a normal vector given three points.
'''
x1, y1, z1 = a
x2, y2, z2 = b
x3, y3, z3 = c
ab = (x2 - x1, y2 - y1, z2 - z1)
ac = (x3 - x1, y3 - y1, z3 - z1)
x, y, z = cross(ab, ac)
d = (x * x + y * y + z * z) ** 0.5
return (x / d, y / d, z / d)
def bounding_box(positions):
'''Computes the bounding box for a list of 3-dimensional points.
'''
(x0, y0, z0) = (x1, y1, z1) = positions[0]
for x, y, z in positions:
x0 = min(x0, x)
y0 = min(y0, y)
z0 = min(z0, z)
x1 = max(x1, x)
y1 = max(y1, y)
z1 = max(z1, z)
return (x0, y0, z0), (x1, y1, z1)
def recenter(positions):
'''Returns a list of new positions centered around the origin.
'''
(x0, y0, z0), (x1, y1, z1) = bounding_box(positions)
dx = x1 - (x1 - x0) / 2.0
dy = y1 - (y1 - y0) / 2.0
dz = z1 - (z1 - z0) / 2.0
result = []
for x, y, z in positions:
result.append((x - dx, y - dy, z - dz))
return result
def interleave(*args):
'''Interleaves the elements of the provided arrays.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> b = [(0, 0), (0, 1), (0, 2), (0, 3)]
>>> interleave(a, b)
[(0, 0, 0, 0), (1, 0, 0, 1), (2, 0, 0, 2), (3, 0, 0, 3)]
This is useful for combining multiple vertex attributes into a single
vertex buffer. The shader attributes can be assigned a slice of the
vertex buffer.
'''
result = []
for array in zip(*args):
result.append(tuple(flatten(array)))
return result
def flatten(array):
'''Flattens the elements of the provided array, `data`.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> flatten(a)
[0, 0, 1, 0, 2, 0, 3, 0]
The flattening process is not recursive, it is only one level deep.
'''
result = []
for value in array:
result.extend(value)
return result
def distinct(iterable, keyfunc=None):
'''Yields distinct items from `iterable` in the order that they appear.
'''
seen = set()
for item in iterable:
key = item if keyfunc is None else keyfunc(item)
if key not in seen:
seen.add(key)
yield item
def ray_triangle_intersection(v1, v2, v3, o, d):
'''Computes the distance from a point to a triangle given a ray.
'''
eps = 1e-6
e1 = sub(v2, v1)
e2 = sub(v3, v1)
p = cross(d, e2)
det = dot(e1, p)
if abs(det) < eps:
return None
inv = 1.0 / det
t = sub(o, v1)
u = dot(t, p) * inv
if u < 0 or u > 1:
return None
q = cross(t, e1)
v = dot(d, q) * inv
if v < 0 or v > 1:
return None
t = dot(e2, q) * inv
if t > eps:
return t
return None
def pack_list(fmt, data):
'''Convert a Python list into a ctypes buffer.
This appears to be faster than the typical method of creating a ctypes
array, e.g. (c_float * len(data))(*data)
'''
func = struct.Struct(fmt).pack
return create_string_buffer(''.join([func(x) for x in data]))
|
fogleman/pg | pg/util.py | bounding_box | python | def bounding_box(positions):
'''Computes the bounding box for a list of 3-dimensional points.
'''
(x0, y0, z0) = (x1, y1, z1) = positions[0]
for x, y, z in positions:
x0 = min(x0, x)
y0 = min(y0, y)
z0 = min(z0, z)
x1 = max(x1, x)
y1 = max(y1, y)
z1 = max(z1, z)
return (x0, y0, z0), (x1, y1, z1) | Computes the bounding box for a list of 3-dimensional points. | train | https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L98-L109 | null | from collections import defaultdict
from ctypes import create_string_buffer
import struct
def hex_color(value):
'''Accepts a hexadecimal color `value` in the format ``0xrrggbb`` and
returns an (r, g, b) tuple where 0.0 <= r, g, b <= 1.0.
'''
r = ((value >> (8 * 2)) & 255) / 255.0
g = ((value >> (8 * 1)) & 255) / 255.0
b = ((value >> (8 * 0)) & 255) / 255.0
return (r, g, b)
def normalize(vector):
'''Normalizes the `vector` so that its length is 1. `vector` can have
any number of components.
'''
d = sum(x * x for x in vector) ** 0.5
return tuple(x / d for x in vector)
def distance(p1, p2):
'''Computes and returns the distance between two points, `p1` and `p2`.
The points can have any number of components.
'''
return sum((a - b) ** 2 for a, b in zip(p1, p2)) ** 0.5
def cross(v1, v2):
'''Computes the cross product of two vectors.
'''
return (
v1[1] * v2[2] - v1[2] * v2[1],
v1[2] * v2[0] - v1[0] * v2[2],
v1[0] * v2[1] - v1[1] * v2[0],
)
def dot(v1, v2):
'''Computes the dot product of two vectors.
'''
x1, y1, z1 = v1
x2, y2, z2 = v2
return x1 * x2 + y1 * y2 + z1 * z2
def add(v1, v2):
'''Adds two vectors.
'''
return tuple(a + b for a, b in zip(v1, v2))
def sub(v1, v2):
'''Subtracts two vectors.
'''
return tuple(a - b for a, b in zip(v1, v2))
def mul(v, s):
'''Multiplies a vector and a scalar.
'''
return tuple(a * s for a in v)
def neg(vector):
'''Negates a vector.
'''
return tuple(-x for x in vector)
def interpolate(v1, v2, t):
'''Interpolate from one vector to another.
'''
return add(v1, mul(sub(v2, v1), t))
def normal_from_points(a, b, c):
'''Computes a normal vector given three points.
'''
x1, y1, z1 = a
x2, y2, z2 = b
x3, y3, z3 = c
ab = (x2 - x1, y2 - y1, z2 - z1)
ac = (x3 - x1, y3 - y1, z3 - z1)
x, y, z = cross(ab, ac)
d = (x * x + y * y + z * z) ** 0.5
return (x / d, y / d, z / d)
def smooth_normals(positions, normals):
'''Assigns an averaged normal to each position based on all of the normals
originally used for the position.
'''
lookup = defaultdict(list)
for position, normal in zip(positions, normals):
lookup[position].append(normal)
result = []
for position in positions:
tx = ty = tz = 0
for x, y, z in lookup[position]:
tx += x
ty += y
tz += z
d = (tx * tx + ty * ty + tz * tz) ** 0.5
result.append((tx / d, ty / d, tz / d))
return result
def recenter(positions):
'''Returns a list of new positions centered around the origin.
'''
(x0, y0, z0), (x1, y1, z1) = bounding_box(positions)
dx = x1 - (x1 - x0) / 2.0
dy = y1 - (y1 - y0) / 2.0
dz = z1 - (z1 - z0) / 2.0
result = []
for x, y, z in positions:
result.append((x - dx, y - dy, z - dz))
return result
def interleave(*args):
'''Interleaves the elements of the provided arrays.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> b = [(0, 0), (0, 1), (0, 2), (0, 3)]
>>> interleave(a, b)
[(0, 0, 0, 0), (1, 0, 0, 1), (2, 0, 0, 2), (3, 0, 0, 3)]
This is useful for combining multiple vertex attributes into a single
vertex buffer. The shader attributes can be assigned a slice of the
vertex buffer.
'''
result = []
for array in zip(*args):
result.append(tuple(flatten(array)))
return result
def flatten(array):
'''Flattens the elements of the provided array, `data`.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> flatten(a)
[0, 0, 1, 0, 2, 0, 3, 0]
The flattening process is not recursive, it is only one level deep.
'''
result = []
for value in array:
result.extend(value)
return result
def distinct(iterable, keyfunc=None):
'''Yields distinct items from `iterable` in the order that they appear.
'''
seen = set()
for item in iterable:
key = item if keyfunc is None else keyfunc(item)
if key not in seen:
seen.add(key)
yield item
def ray_triangle_intersection(v1, v2, v3, o, d):
'''Computes the distance from a point to a triangle given a ray.
'''
eps = 1e-6
e1 = sub(v2, v1)
e2 = sub(v3, v1)
p = cross(d, e2)
det = dot(e1, p)
if abs(det) < eps:
return None
inv = 1.0 / det
t = sub(o, v1)
u = dot(t, p) * inv
if u < 0 or u > 1:
return None
q = cross(t, e1)
v = dot(d, q) * inv
if v < 0 or v > 1:
return None
t = dot(e2, q) * inv
if t > eps:
return t
return None
def pack_list(fmt, data):
'''Convert a Python list into a ctypes buffer.
This appears to be faster than the typical method of creating a ctypes
array, e.g. (c_float * len(data))(*data)
'''
func = struct.Struct(fmt).pack
return create_string_buffer(''.join([func(x) for x in data]))
|
fogleman/pg | pg/util.py | recenter | python | def recenter(positions):
'''Returns a list of new positions centered around the origin.
'''
(x0, y0, z0), (x1, y1, z1) = bounding_box(positions)
dx = x1 - (x1 - x0) / 2.0
dy = y1 - (y1 - y0) / 2.0
dz = z1 - (z1 - z0) / 2.0
result = []
for x, y, z in positions:
result.append((x - dx, y - dy, z - dz))
return result | Returns a list of new positions centered around the origin. | train | https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L111-L121 | [
"def bounding_box(positions):\n '''Computes the bounding box for a list of 3-dimensional points.\n '''\n (x0, y0, z0) = (x1, y1, z1) = positions[0]\n for x, y, z in positions:\n x0 = min(x0, x)\n y0 = min(y0, y)\n z0 = min(z0, z)\n x1 = max(x1, x)\n y1 = max(y1, y)\n ... | from collections import defaultdict
from ctypes import create_string_buffer
import struct
def hex_color(value):
'''Accepts a hexadecimal color `value` in the format ``0xrrggbb`` and
returns an (r, g, b) tuple where 0.0 <= r, g, b <= 1.0.
'''
r = ((value >> (8 * 2)) & 255) / 255.0
g = ((value >> (8 * 1)) & 255) / 255.0
b = ((value >> (8 * 0)) & 255) / 255.0
return (r, g, b)
def normalize(vector):
'''Normalizes the `vector` so that its length is 1. `vector` can have
any number of components.
'''
d = sum(x * x for x in vector) ** 0.5
return tuple(x / d for x in vector)
def distance(p1, p2):
'''Computes and returns the distance between two points, `p1` and `p2`.
The points can have any number of components.
'''
return sum((a - b) ** 2 for a, b in zip(p1, p2)) ** 0.5
def cross(v1, v2):
'''Computes the cross product of two vectors.
'''
return (
v1[1] * v2[2] - v1[2] * v2[1],
v1[2] * v2[0] - v1[0] * v2[2],
v1[0] * v2[1] - v1[1] * v2[0],
)
def dot(v1, v2):
'''Computes the dot product of two vectors.
'''
x1, y1, z1 = v1
x2, y2, z2 = v2
return x1 * x2 + y1 * y2 + z1 * z2
def add(v1, v2):
'''Adds two vectors.
'''
return tuple(a + b for a, b in zip(v1, v2))
def sub(v1, v2):
'''Subtracts two vectors.
'''
return tuple(a - b for a, b in zip(v1, v2))
def mul(v, s):
'''Multiplies a vector and a scalar.
'''
return tuple(a * s for a in v)
def neg(vector):
'''Negates a vector.
'''
return tuple(-x for x in vector)
def interpolate(v1, v2, t):
'''Interpolate from one vector to another.
'''
return add(v1, mul(sub(v2, v1), t))
def normal_from_points(a, b, c):
'''Computes a normal vector given three points.
'''
x1, y1, z1 = a
x2, y2, z2 = b
x3, y3, z3 = c
ab = (x2 - x1, y2 - y1, z2 - z1)
ac = (x3 - x1, y3 - y1, z3 - z1)
x, y, z = cross(ab, ac)
d = (x * x + y * y + z * z) ** 0.5
return (x / d, y / d, z / d)
def smooth_normals(positions, normals):
'''Assigns an averaged normal to each position based on all of the normals
originally used for the position.
'''
lookup = defaultdict(list)
for position, normal in zip(positions, normals):
lookup[position].append(normal)
result = []
for position in positions:
tx = ty = tz = 0
for x, y, z in lookup[position]:
tx += x
ty += y
tz += z
d = (tx * tx + ty * ty + tz * tz) ** 0.5
result.append((tx / d, ty / d, tz / d))
return result
def bounding_box(positions):
'''Computes the bounding box for a list of 3-dimensional points.
'''
(x0, y0, z0) = (x1, y1, z1) = positions[0]
for x, y, z in positions:
x0 = min(x0, x)
y0 = min(y0, y)
z0 = min(z0, z)
x1 = max(x1, x)
y1 = max(y1, y)
z1 = max(z1, z)
return (x0, y0, z0), (x1, y1, z1)
def interleave(*args):
'''Interleaves the elements of the provided arrays.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> b = [(0, 0), (0, 1), (0, 2), (0, 3)]
>>> interleave(a, b)
[(0, 0, 0, 0), (1, 0, 0, 1), (2, 0, 0, 2), (3, 0, 0, 3)]
This is useful for combining multiple vertex attributes into a single
vertex buffer. The shader attributes can be assigned a slice of the
vertex buffer.
'''
result = []
for array in zip(*args):
result.append(tuple(flatten(array)))
return result
def flatten(array):
'''Flattens the elements of the provided array, `data`.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> flatten(a)
[0, 0, 1, 0, 2, 0, 3, 0]
The flattening process is not recursive, it is only one level deep.
'''
result = []
for value in array:
result.extend(value)
return result
def distinct(iterable, keyfunc=None):
'''Yields distinct items from `iterable` in the order that they appear.
'''
seen = set()
for item in iterable:
key = item if keyfunc is None else keyfunc(item)
if key not in seen:
seen.add(key)
yield item
def ray_triangle_intersection(v1, v2, v3, o, d):
'''Computes the distance from a point to a triangle given a ray.
'''
eps = 1e-6
e1 = sub(v2, v1)
e2 = sub(v3, v1)
p = cross(d, e2)
det = dot(e1, p)
if abs(det) < eps:
return None
inv = 1.0 / det
t = sub(o, v1)
u = dot(t, p) * inv
if u < 0 or u > 1:
return None
q = cross(t, e1)
v = dot(d, q) * inv
if v < 0 or v > 1:
return None
t = dot(e2, q) * inv
if t > eps:
return t
return None
def pack_list(fmt, data):
'''Convert a Python list into a ctypes buffer.
This appears to be faster than the typical method of creating a ctypes
array, e.g. (c_float * len(data))(*data)
'''
func = struct.Struct(fmt).pack
return create_string_buffer(''.join([func(x) for x in data]))
|
fogleman/pg | pg/util.py | interleave | python | def interleave(*args):
'''Interleaves the elements of the provided arrays.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> b = [(0, 0), (0, 1), (0, 2), (0, 3)]
>>> interleave(a, b)
[(0, 0, 0, 0), (1, 0, 0, 1), (2, 0, 0, 2), (3, 0, 0, 3)]
This is useful for combining multiple vertex attributes into a single
vertex buffer. The shader attributes can be assigned a slice of the
vertex buffer.
'''
result = []
for array in zip(*args):
result.append(tuple(flatten(array)))
return result | Interleaves the elements of the provided arrays.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> b = [(0, 0), (0, 1), (0, 2), (0, 3)]
>>> interleave(a, b)
[(0, 0, 0, 0), (1, 0, 0, 1), (2, 0, 0, 2), (3, 0, 0, 3)]
This is useful for combining multiple vertex attributes into a single
vertex buffer. The shader attributes can be assigned a slice of the
vertex buffer. | train | https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L123-L138 | [
"def flatten(array):\n '''Flattens the elements of the provided array, `data`.\n\n >>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]\n >>> flatten(a)\n [0, 0, 1, 0, 2, 0, 3, 0]\n\n The flattening process is not recursive, it is only one level deep.\n '''\n result = []\n for value in array... | from collections import defaultdict
from ctypes import create_string_buffer
import struct
def hex_color(value):
'''Accepts a hexadecimal color `value` in the format ``0xrrggbb`` and
returns an (r, g, b) tuple where 0.0 <= r, g, b <= 1.0.
'''
r = ((value >> (8 * 2)) & 255) / 255.0
g = ((value >> (8 * 1)) & 255) / 255.0
b = ((value >> (8 * 0)) & 255) / 255.0
return (r, g, b)
def normalize(vector):
'''Normalizes the `vector` so that its length is 1. `vector` can have
any number of components.
'''
d = sum(x * x for x in vector) ** 0.5
return tuple(x / d for x in vector)
def distance(p1, p2):
'''Computes and returns the distance between two points, `p1` and `p2`.
The points can have any number of components.
'''
return sum((a - b) ** 2 for a, b in zip(p1, p2)) ** 0.5
def cross(v1, v2):
'''Computes the cross product of two vectors.
'''
return (
v1[1] * v2[2] - v1[2] * v2[1],
v1[2] * v2[0] - v1[0] * v2[2],
v1[0] * v2[1] - v1[1] * v2[0],
)
def dot(v1, v2):
'''Computes the dot product of two vectors.
'''
x1, y1, z1 = v1
x2, y2, z2 = v2
return x1 * x2 + y1 * y2 + z1 * z2
def add(v1, v2):
'''Adds two vectors.
'''
return tuple(a + b for a, b in zip(v1, v2))
def sub(v1, v2):
'''Subtracts two vectors.
'''
return tuple(a - b for a, b in zip(v1, v2))
def mul(v, s):
'''Multiplies a vector and a scalar.
'''
return tuple(a * s for a in v)
def neg(vector):
'''Negates a vector.
'''
return tuple(-x for x in vector)
def interpolate(v1, v2, t):
'''Interpolate from one vector to another.
'''
return add(v1, mul(sub(v2, v1), t))
def normal_from_points(a, b, c):
'''Computes a normal vector given three points.
'''
x1, y1, z1 = a
x2, y2, z2 = b
x3, y3, z3 = c
ab = (x2 - x1, y2 - y1, z2 - z1)
ac = (x3 - x1, y3 - y1, z3 - z1)
x, y, z = cross(ab, ac)
d = (x * x + y * y + z * z) ** 0.5
return (x / d, y / d, z / d)
def smooth_normals(positions, normals):
'''Assigns an averaged normal to each position based on all of the normals
originally used for the position.
'''
lookup = defaultdict(list)
for position, normal in zip(positions, normals):
lookup[position].append(normal)
result = []
for position in positions:
tx = ty = tz = 0
for x, y, z in lookup[position]:
tx += x
ty += y
tz += z
d = (tx * tx + ty * ty + tz * tz) ** 0.5
result.append((tx / d, ty / d, tz / d))
return result
def bounding_box(positions):
'''Computes the bounding box for a list of 3-dimensional points.
'''
(x0, y0, z0) = (x1, y1, z1) = positions[0]
for x, y, z in positions:
x0 = min(x0, x)
y0 = min(y0, y)
z0 = min(z0, z)
x1 = max(x1, x)
y1 = max(y1, y)
z1 = max(z1, z)
return (x0, y0, z0), (x1, y1, z1)
def recenter(positions):
'''Returns a list of new positions centered around the origin.
'''
(x0, y0, z0), (x1, y1, z1) = bounding_box(positions)
dx = x1 - (x1 - x0) / 2.0
dy = y1 - (y1 - y0) / 2.0
dz = z1 - (z1 - z0) / 2.0
result = []
for x, y, z in positions:
result.append((x - dx, y - dy, z - dz))
return result
def flatten(array):
'''Flattens the elements of the provided array, `data`.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> flatten(a)
[0, 0, 1, 0, 2, 0, 3, 0]
The flattening process is not recursive, it is only one level deep.
'''
result = []
for value in array:
result.extend(value)
return result
def distinct(iterable, keyfunc=None):
'''Yields distinct items from `iterable` in the order that they appear.
'''
seen = set()
for item in iterable:
key = item if keyfunc is None else keyfunc(item)
if key not in seen:
seen.add(key)
yield item
def ray_triangle_intersection(v1, v2, v3, o, d):
'''Computes the distance from a point to a triangle given a ray.
'''
eps = 1e-6
e1 = sub(v2, v1)
e2 = sub(v3, v1)
p = cross(d, e2)
det = dot(e1, p)
if abs(det) < eps:
return None
inv = 1.0 / det
t = sub(o, v1)
u = dot(t, p) * inv
if u < 0 or u > 1:
return None
q = cross(t, e1)
v = dot(d, q) * inv
if v < 0 or v > 1:
return None
t = dot(e2, q) * inv
if t > eps:
return t
return None
def pack_list(fmt, data):
'''Convert a Python list into a ctypes buffer.
This appears to be faster than the typical method of creating a ctypes
array, e.g. (c_float * len(data))(*data)
'''
func = struct.Struct(fmt).pack
return create_string_buffer(''.join([func(x) for x in data]))
|
fogleman/pg | pg/util.py | distinct | python | def distinct(iterable, keyfunc=None):
'''Yields distinct items from `iterable` in the order that they appear.
'''
seen = set()
for item in iterable:
key = item if keyfunc is None else keyfunc(item)
if key not in seen:
seen.add(key)
yield item | Yields distinct items from `iterable` in the order that they appear. | train | https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L154-L162 | null | from collections import defaultdict
from ctypes import create_string_buffer
import struct
def hex_color(value):
'''Accepts a hexadecimal color `value` in the format ``0xrrggbb`` and
returns an (r, g, b) tuple where 0.0 <= r, g, b <= 1.0.
'''
r = ((value >> (8 * 2)) & 255) / 255.0
g = ((value >> (8 * 1)) & 255) / 255.0
b = ((value >> (8 * 0)) & 255) / 255.0
return (r, g, b)
def normalize(vector):
'''Normalizes the `vector` so that its length is 1. `vector` can have
any number of components.
'''
d = sum(x * x for x in vector) ** 0.5
return tuple(x / d for x in vector)
def distance(p1, p2):
'''Computes and returns the distance between two points, `p1` and `p2`.
The points can have any number of components.
'''
return sum((a - b) ** 2 for a, b in zip(p1, p2)) ** 0.5
def cross(v1, v2):
'''Computes the cross product of two vectors.
'''
return (
v1[1] * v2[2] - v1[2] * v2[1],
v1[2] * v2[0] - v1[0] * v2[2],
v1[0] * v2[1] - v1[1] * v2[0],
)
def dot(v1, v2):
'''Computes the dot product of two vectors.
'''
x1, y1, z1 = v1
x2, y2, z2 = v2
return x1 * x2 + y1 * y2 + z1 * z2
def add(v1, v2):
'''Adds two vectors.
'''
return tuple(a + b for a, b in zip(v1, v2))
def sub(v1, v2):
'''Subtracts two vectors.
'''
return tuple(a - b for a, b in zip(v1, v2))
def mul(v, s):
'''Multiplies a vector and a scalar.
'''
return tuple(a * s for a in v)
def neg(vector):
'''Negates a vector.
'''
return tuple(-x for x in vector)
def interpolate(v1, v2, t):
'''Interpolate from one vector to another.
'''
return add(v1, mul(sub(v2, v1), t))
def normal_from_points(a, b, c):
'''Computes a normal vector given three points.
'''
x1, y1, z1 = a
x2, y2, z2 = b
x3, y3, z3 = c
ab = (x2 - x1, y2 - y1, z2 - z1)
ac = (x3 - x1, y3 - y1, z3 - z1)
x, y, z = cross(ab, ac)
d = (x * x + y * y + z * z) ** 0.5
return (x / d, y / d, z / d)
def smooth_normals(positions, normals):
'''Assigns an averaged normal to each position based on all of the normals
originally used for the position.
'''
lookup = defaultdict(list)
for position, normal in zip(positions, normals):
lookup[position].append(normal)
result = []
for position in positions:
tx = ty = tz = 0
for x, y, z in lookup[position]:
tx += x
ty += y
tz += z
d = (tx * tx + ty * ty + tz * tz) ** 0.5
result.append((tx / d, ty / d, tz / d))
return result
def bounding_box(positions):
'''Computes the bounding box for a list of 3-dimensional points.
'''
(x0, y0, z0) = (x1, y1, z1) = positions[0]
for x, y, z in positions:
x0 = min(x0, x)
y0 = min(y0, y)
z0 = min(z0, z)
x1 = max(x1, x)
y1 = max(y1, y)
z1 = max(z1, z)
return (x0, y0, z0), (x1, y1, z1)
def recenter(positions):
'''Returns a list of new positions centered around the origin.
'''
(x0, y0, z0), (x1, y1, z1) = bounding_box(positions)
dx = x1 - (x1 - x0) / 2.0
dy = y1 - (y1 - y0) / 2.0
dz = z1 - (z1 - z0) / 2.0
result = []
for x, y, z in positions:
result.append((x - dx, y - dy, z - dz))
return result
def interleave(*args):
'''Interleaves the elements of the provided arrays.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> b = [(0, 0), (0, 1), (0, 2), (0, 3)]
>>> interleave(a, b)
[(0, 0, 0, 0), (1, 0, 0, 1), (2, 0, 0, 2), (3, 0, 0, 3)]
This is useful for combining multiple vertex attributes into a single
vertex buffer. The shader attributes can be assigned a slice of the
vertex buffer.
'''
result = []
for array in zip(*args):
result.append(tuple(flatten(array)))
return result
def flatten(array):
'''Flattens the elements of the provided array, `data`.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> flatten(a)
[0, 0, 1, 0, 2, 0, 3, 0]
The flattening process is not recursive, it is only one level deep.
'''
result = []
for value in array:
result.extend(value)
return result
def ray_triangle_intersection(v1, v2, v3, o, d):
'''Computes the distance from a point to a triangle given a ray.
'''
eps = 1e-6
e1 = sub(v2, v1)
e2 = sub(v3, v1)
p = cross(d, e2)
det = dot(e1, p)
if abs(det) < eps:
return None
inv = 1.0 / det
t = sub(o, v1)
u = dot(t, p) * inv
if u < 0 or u > 1:
return None
q = cross(t, e1)
v = dot(d, q) * inv
if v < 0 or v > 1:
return None
t = dot(e2, q) * inv
if t > eps:
return t
return None
def pack_list(fmt, data):
'''Convert a Python list into a ctypes buffer.
This appears to be faster than the typical method of creating a ctypes
array, e.g. (c_float * len(data))(*data)
'''
func = struct.Struct(fmt).pack
return create_string_buffer(''.join([func(x) for x in data]))
|
fogleman/pg | pg/util.py | ray_triangle_intersection | python | def ray_triangle_intersection(v1, v2, v3, o, d):
'''Computes the distance from a point to a triangle given a ray.
'''
eps = 1e-6
e1 = sub(v2, v1)
e2 = sub(v3, v1)
p = cross(d, e2)
det = dot(e1, p)
if abs(det) < eps:
return None
inv = 1.0 / det
t = sub(o, v1)
u = dot(t, p) * inv
if u < 0 or u > 1:
return None
q = cross(t, e1)
v = dot(d, q) * inv
if v < 0 or v > 1:
return None
t = dot(e2, q) * inv
if t > eps:
return t
return None | Computes the distance from a point to a triangle given a ray. | train | https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L164-L186 | [
"def sub(v1, v2):\n '''Subtracts two vectors.\n '''\n return tuple(a - b for a, b in zip(v1, v2))\n",
"def cross(v1, v2):\n '''Computes the cross product of two vectors.\n '''\n return (\n v1[1] * v2[2] - v1[2] * v2[1],\n v1[2] * v2[0] - v1[0] * v2[2],\n v1[0] * v2[1] - v1[1... | from collections import defaultdict
from ctypes import create_string_buffer
import struct
def hex_color(value):
'''Accepts a hexadecimal color `value` in the format ``0xrrggbb`` and
returns an (r, g, b) tuple where 0.0 <= r, g, b <= 1.0.
'''
r = ((value >> (8 * 2)) & 255) / 255.0
g = ((value >> (8 * 1)) & 255) / 255.0
b = ((value >> (8 * 0)) & 255) / 255.0
return (r, g, b)
def normalize(vector):
'''Normalizes the `vector` so that its length is 1. `vector` can have
any number of components.
'''
d = sum(x * x for x in vector) ** 0.5
return tuple(x / d for x in vector)
def distance(p1, p2):
'''Computes and returns the distance between two points, `p1` and `p2`.
The points can have any number of components.
'''
return sum((a - b) ** 2 for a, b in zip(p1, p2)) ** 0.5
def cross(v1, v2):
'''Computes the cross product of two vectors.
'''
return (
v1[1] * v2[2] - v1[2] * v2[1],
v1[2] * v2[0] - v1[0] * v2[2],
v1[0] * v2[1] - v1[1] * v2[0],
)
def dot(v1, v2):
'''Computes the dot product of two vectors.
'''
x1, y1, z1 = v1
x2, y2, z2 = v2
return x1 * x2 + y1 * y2 + z1 * z2
def add(v1, v2):
'''Adds two vectors.
'''
return tuple(a + b for a, b in zip(v1, v2))
def sub(v1, v2):
'''Subtracts two vectors.
'''
return tuple(a - b for a, b in zip(v1, v2))
def mul(v, s):
'''Multiplies a vector and a scalar.
'''
return tuple(a * s for a in v)
def neg(vector):
'''Negates a vector.
'''
return tuple(-x for x in vector)
def interpolate(v1, v2, t):
'''Interpolate from one vector to another.
'''
return add(v1, mul(sub(v2, v1), t))
def normal_from_points(a, b, c):
'''Computes a normal vector given three points.
'''
x1, y1, z1 = a
x2, y2, z2 = b
x3, y3, z3 = c
ab = (x2 - x1, y2 - y1, z2 - z1)
ac = (x3 - x1, y3 - y1, z3 - z1)
x, y, z = cross(ab, ac)
d = (x * x + y * y + z * z) ** 0.5
return (x / d, y / d, z / d)
def smooth_normals(positions, normals):
'''Assigns an averaged normal to each position based on all of the normals
originally used for the position.
'''
lookup = defaultdict(list)
for position, normal in zip(positions, normals):
lookup[position].append(normal)
result = []
for position in positions:
tx = ty = tz = 0
for x, y, z in lookup[position]:
tx += x
ty += y
tz += z
d = (tx * tx + ty * ty + tz * tz) ** 0.5
result.append((tx / d, ty / d, tz / d))
return result
def bounding_box(positions):
'''Computes the bounding box for a list of 3-dimensional points.
'''
(x0, y0, z0) = (x1, y1, z1) = positions[0]
for x, y, z in positions:
x0 = min(x0, x)
y0 = min(y0, y)
z0 = min(z0, z)
x1 = max(x1, x)
y1 = max(y1, y)
z1 = max(z1, z)
return (x0, y0, z0), (x1, y1, z1)
def recenter(positions):
'''Returns a list of new positions centered around the origin.
'''
(x0, y0, z0), (x1, y1, z1) = bounding_box(positions)
dx = x1 - (x1 - x0) / 2.0
dy = y1 - (y1 - y0) / 2.0
dz = z1 - (z1 - z0) / 2.0
result = []
for x, y, z in positions:
result.append((x - dx, y - dy, z - dz))
return result
def interleave(*args):
'''Interleaves the elements of the provided arrays.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> b = [(0, 0), (0, 1), (0, 2), (0, 3)]
>>> interleave(a, b)
[(0, 0, 0, 0), (1, 0, 0, 1), (2, 0, 0, 2), (3, 0, 0, 3)]
This is useful for combining multiple vertex attributes into a single
vertex buffer. The shader attributes can be assigned a slice of the
vertex buffer.
'''
result = []
for array in zip(*args):
result.append(tuple(flatten(array)))
return result
def flatten(array):
'''Flattens the elements of the provided array, `data`.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> flatten(a)
[0, 0, 1, 0, 2, 0, 3, 0]
The flattening process is not recursive, it is only one level deep.
'''
result = []
for value in array:
result.extend(value)
return result
def distinct(iterable, keyfunc=None):
'''Yields distinct items from `iterable` in the order that they appear.
'''
seen = set()
for item in iterable:
key = item if keyfunc is None else keyfunc(item)
if key not in seen:
seen.add(key)
yield item
def pack_list(fmt, data):
'''Convert a Python list into a ctypes buffer.
This appears to be faster than the typical method of creating a ctypes
array, e.g. (c_float * len(data))(*data)
'''
func = struct.Struct(fmt).pack
return create_string_buffer(''.join([func(x) for x in data]))
|
fogleman/pg | pg/util.py | pack_list | python | def pack_list(fmt, data):
'''Convert a Python list into a ctypes buffer.
This appears to be faster than the typical method of creating a ctypes
array, e.g. (c_float * len(data))(*data)
'''
func = struct.Struct(fmt).pack
return create_string_buffer(''.join([func(x) for x in data])) | Convert a Python list into a ctypes buffer.
This appears to be faster than the typical method of creating a ctypes
array, e.g. (c_float * len(data))(*data) | train | https://github.com/fogleman/pg/blob/124ea3803c788b2c98c4f3a428e5d26842a67b58/pg/util.py#L188-L195 | null | from collections import defaultdict
from ctypes import create_string_buffer
import struct
def hex_color(value):
'''Accepts a hexadecimal color `value` in the format ``0xrrggbb`` and
returns an (r, g, b) tuple where 0.0 <= r, g, b <= 1.0.
'''
r = ((value >> (8 * 2)) & 255) / 255.0
g = ((value >> (8 * 1)) & 255) / 255.0
b = ((value >> (8 * 0)) & 255) / 255.0
return (r, g, b)
def normalize(vector):
'''Normalizes the `vector` so that its length is 1. `vector` can have
any number of components.
'''
d = sum(x * x for x in vector) ** 0.5
return tuple(x / d for x in vector)
def distance(p1, p2):
'''Computes and returns the distance between two points, `p1` and `p2`.
The points can have any number of components.
'''
return sum((a - b) ** 2 for a, b in zip(p1, p2)) ** 0.5
def cross(v1, v2):
'''Computes the cross product of two vectors.
'''
return (
v1[1] * v2[2] - v1[2] * v2[1],
v1[2] * v2[0] - v1[0] * v2[2],
v1[0] * v2[1] - v1[1] * v2[0],
)
def dot(v1, v2):
'''Computes the dot product of two vectors.
'''
x1, y1, z1 = v1
x2, y2, z2 = v2
return x1 * x2 + y1 * y2 + z1 * z2
def add(v1, v2):
'''Adds two vectors.
'''
return tuple(a + b for a, b in zip(v1, v2))
def sub(v1, v2):
'''Subtracts two vectors.
'''
return tuple(a - b for a, b in zip(v1, v2))
def mul(v, s):
'''Multiplies a vector and a scalar.
'''
return tuple(a * s for a in v)
def neg(vector):
'''Negates a vector.
'''
return tuple(-x for x in vector)
def interpolate(v1, v2, t):
'''Interpolate from one vector to another.
'''
return add(v1, mul(sub(v2, v1), t))
def normal_from_points(a, b, c):
'''Computes a normal vector given three points.
'''
x1, y1, z1 = a
x2, y2, z2 = b
x3, y3, z3 = c
ab = (x2 - x1, y2 - y1, z2 - z1)
ac = (x3 - x1, y3 - y1, z3 - z1)
x, y, z = cross(ab, ac)
d = (x * x + y * y + z * z) ** 0.5
return (x / d, y / d, z / d)
def smooth_normals(positions, normals):
'''Assigns an averaged normal to each position based on all of the normals
originally used for the position.
'''
lookup = defaultdict(list)
for position, normal in zip(positions, normals):
lookup[position].append(normal)
result = []
for position in positions:
tx = ty = tz = 0
for x, y, z in lookup[position]:
tx += x
ty += y
tz += z
d = (tx * tx + ty * ty + tz * tz) ** 0.5
result.append((tx / d, ty / d, tz / d))
return result
def bounding_box(positions):
'''Computes the bounding box for a list of 3-dimensional points.
'''
(x0, y0, z0) = (x1, y1, z1) = positions[0]
for x, y, z in positions:
x0 = min(x0, x)
y0 = min(y0, y)
z0 = min(z0, z)
x1 = max(x1, x)
y1 = max(y1, y)
z1 = max(z1, z)
return (x0, y0, z0), (x1, y1, z1)
def recenter(positions):
'''Returns a list of new positions centered around the origin.
'''
(x0, y0, z0), (x1, y1, z1) = bounding_box(positions)
dx = x1 - (x1 - x0) / 2.0
dy = y1 - (y1 - y0) / 2.0
dz = z1 - (z1 - z0) / 2.0
result = []
for x, y, z in positions:
result.append((x - dx, y - dy, z - dz))
return result
def interleave(*args):
'''Interleaves the elements of the provided arrays.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> b = [(0, 0), (0, 1), (0, 2), (0, 3)]
>>> interleave(a, b)
[(0, 0, 0, 0), (1, 0, 0, 1), (2, 0, 0, 2), (3, 0, 0, 3)]
This is useful for combining multiple vertex attributes into a single
vertex buffer. The shader attributes can be assigned a slice of the
vertex buffer.
'''
result = []
for array in zip(*args):
result.append(tuple(flatten(array)))
return result
def flatten(array):
'''Flattens the elements of the provided array, `data`.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> flatten(a)
[0, 0, 1, 0, 2, 0, 3, 0]
The flattening process is not recursive, it is only one level deep.
'''
result = []
for value in array:
result.extend(value)
return result
def distinct(iterable, keyfunc=None):
'''Yields distinct items from `iterable` in the order that they appear.
'''
seen = set()
for item in iterable:
key = item if keyfunc is None else keyfunc(item)
if key not in seen:
seen.add(key)
yield item
def ray_triangle_intersection(v1, v2, v3, o, d):
'''Computes the distance from a point to a triangle given a ray.
'''
eps = 1e-6
e1 = sub(v2, v1)
e2 = sub(v3, v1)
p = cross(d, e2)
det = dot(e1, p)
if abs(det) < eps:
return None
inv = 1.0 / det
t = sub(o, v1)
u = dot(t, p) * inv
if u < 0 or u > 1:
return None
q = cross(t, e1)
v = dot(d, q) * inv
if v < 0 or v > 1:
return None
t = dot(e2, q) * inv
if t > eps:
return t
return None
|
remind101/stacker_blueprints | stacker_blueprints/sns.py | Topics.create_sqs_policy | python | def create_sqs_policy(self, topic_name, topic_arn, topic_subs):
t = self.template
arn_endpoints = []
url_endpoints = []
for sub in topic_subs:
arn_endpoints.append(sub["Endpoint"])
split_endpoint = sub["Endpoint"].split(":")
queue_url = "https://%s.%s.amazonaws.com/%s/%s" % (
split_endpoint[2], # literally "sqs"
split_endpoint[3], # AWS region
split_endpoint[4], # AWS ID
split_endpoint[5], # Queue name
)
url_endpoints.append(queue_url)
policy_doc = queue_policy(topic_arn, arn_endpoints)
t.add_resource(
sqs.QueuePolicy(
topic_name + "SubPolicy",
PolicyDocument=policy_doc,
Queues=url_endpoints,
)
) | This method creates the SQS policy needed for an SNS subscription. It
also takes the ARN of the SQS queue and converts it to the URL needed
for the subscription, as that takes a URL rather than the ARN. | train | https://github.com/remind101/stacker_blueprints/blob/71624f6e1bd4ea794dc98fb621a04235e1931cae/stacker_blueprints/sns.py#L81-L110 | [
"def queue_policy(sns_arn, sqs_arns):\n stmts = []\n for arn in sqs_arns:\n stmts.append(\n Statement(\n Effect=\"Allow\",\n Principal=Principal(\"*\"),\n Action=[awacs.sqs.SendMessage],\n Resource=[arn],\n Condition=... | class Topics(Blueprint):
"""
Manages the creation of SNS topics.
"""
VARIABLES = {
"Topics": {
"type": dict,
"description": "Dictionary of SNS Topic definitions",
"validator": validate_topics,
}
}
def create_template(self):
variables = self.get_variables()
for topic_name, topic_config in variables["Topics"].iteritems():
self.create_topic(topic_name, topic_config)
def create_topic(self, topic_name, topic_config):
"""
Creates the SNS topic, along with any subscriptions requested.
"""
topic_subs = []
t = self.template
if "Subscription" in topic_config:
topic_subs = topic_config["Subscription"]
t.add_resource(
sns.Topic.from_dict(
topic_name,
topic_config
)
)
topic_arn = Ref(topic_name)
t.add_output(
Output(topic_name + "Name", Value=GetAtt(topic_name, "TopicName"))
)
t.add_output(Output(topic_name + "Arn", Value=topic_arn))
sqs_subs = [sub for sub in topic_subs if sub["Protocol"] == "sqs"]
if sqs_subs:
self.create_sqs_policy(topic_name, topic_arn, sqs_subs)
|
remind101/stacker_blueprints | stacker_blueprints/sns.py | Topics.create_topic | python | def create_topic(self, topic_name, topic_config):
topic_subs = []
t = self.template
if "Subscription" in topic_config:
topic_subs = topic_config["Subscription"]
t.add_resource(
sns.Topic.from_dict(
topic_name,
topic_config
)
)
topic_arn = Ref(topic_name)
t.add_output(
Output(topic_name + "Name", Value=GetAtt(topic_name, "TopicName"))
)
t.add_output(Output(topic_name + "Arn", Value=topic_arn))
sqs_subs = [sub for sub in topic_subs if sub["Protocol"] == "sqs"]
if sqs_subs:
self.create_sqs_policy(topic_name, topic_arn, sqs_subs) | Creates the SNS topic, along with any subscriptions requested. | train | https://github.com/remind101/stacker_blueprints/blob/71624f6e1bd4ea794dc98fb621a04235e1931cae/stacker_blueprints/sns.py#L112-L138 | null | class Topics(Blueprint):
"""
Manages the creation of SNS topics.
"""
VARIABLES = {
"Topics": {
"type": dict,
"description": "Dictionary of SNS Topic definitions",
"validator": validate_topics,
}
}
def create_template(self):
variables = self.get_variables()
for topic_name, topic_config in variables["Topics"].iteritems():
self.create_topic(topic_name, topic_config)
def create_sqs_policy(self, topic_name, topic_arn, topic_subs):
"""
This method creates the SQS policy needed for an SNS subscription. It
also takes the ARN of the SQS queue and converts it to the URL needed
for the subscription, as that takes a URL rather than the ARN.
"""
t = self.template
arn_endpoints = []
url_endpoints = []
for sub in topic_subs:
arn_endpoints.append(sub["Endpoint"])
split_endpoint = sub["Endpoint"].split(":")
queue_url = "https://%s.%s.amazonaws.com/%s/%s" % (
split_endpoint[2], # literally "sqs"
split_endpoint[3], # AWS region
split_endpoint[4], # AWS ID
split_endpoint[5], # Queue name
)
url_endpoints.append(queue_url)
policy_doc = queue_policy(topic_arn, arn_endpoints)
t.add_resource(
sqs.QueuePolicy(
topic_name + "SubPolicy",
PolicyDocument=policy_doc,
Queues=url_endpoints,
)
)
|
remind101/stacker_blueprints | stacker_blueprints/aws_lambda.py | get_stream_action_type | python | def get_stream_action_type(stream_arn):
stream_type_map = {
"kinesis": awacs.kinesis.Action,
"dynamodb": awacs.dynamodb.Action,
}
stream_type = stream_arn.split(":")[2]
try:
return stream_type_map[stream_type]
except KeyError:
raise ValueError(
"Invalid stream type '%s' in arn '%s'" % (stream_type, stream_arn)
) | Returns the awacs Action for a stream type given an arn
Args:
stream_arn (str): The Arn of the stream.
Returns:
:class:`awacs.aws.Action`: The appropriate stream type awacs Action
class
Raises:
ValueError: If the stream type doesn't match kinesis or dynamodb. | train | https://github.com/remind101/stacker_blueprints/blob/71624f6e1bd4ea794dc98fb621a04235e1931cae/stacker_blueprints/aws_lambda.py#L37-L62 | null | import logging
from stacker.blueprints.base import Blueprint
from stacker.blueprints.variables.types import TroposphereType
from stacker.util import cf_safe_name
from troposphere import (
NoValue,
Output,
Ref,
Sub,
iam,
)
from troposphere import awslambda
from troposphere import events
import awacs.logs
import awacs.kinesis
import awacs.dynamodb
from awacs.aws import Statement, Allow, Policy
from awacs.helpers.trust import get_lambda_assumerole_policy
from .policies import (
lambda_basic_execution_statements,
lambda_vpc_execution_statements,
)
logger = logging.getLogger(name=__name__)
def stream_reader_statements(stream_arn):
"""Returns statements to allow Lambda to read from a stream.
Handles both DynamoDB & Kinesis streams. Automatically figures out the
type of stream, and provides the correct actions from the supplied Arn.
Arg:
stream_arn (str): A kinesis or dynamodb stream arn.
Returns:
list: A list of statements.
"""
action_type = get_stream_action_type(stream_arn)
arn_parts = stream_arn.split("/")
# Cut off the last bit and replace it with a wildcard
wildcard_arn_parts = arn_parts[:-1]
wildcard_arn_parts.append("*")
wildcard_arn = "/".join(wildcard_arn_parts)
return [
Statement(
Effect=Allow,
Resource=[stream_arn],
Action=[
action_type("DescribeStream"),
action_type("GetRecords"),
action_type("GetShardIterator"),
]
),
Statement(
Effect=Allow,
Resource=[wildcard_arn],
Action=[action_type("ListStreams")]
)
]
class Function(Blueprint):
VARIABLES = {
"Code": {
"type": awslambda.Code,
"description": "The troposphere.awslambda.Code object "
"returned by the aws lambda hook.",
},
"DeadLetterArn": {
"type": str,
"description": "Dead Letter Queue (DLQ) Arn (SQS, SNS, etc) "
"that AWS Lambda (Lambda) sends events to when it "
"can't process them.",
"default": "",
},
"Description": {
"type": str,
"description": "Description of the function.",
"default": "",
},
"Environment": {
"type": dict,
"description": "Key-value pairs that Lambda caches and makes "
"available for your Lambda functions.",
"default": {},
},
"Handler": {
"type": str,
"description": "The name of the function (within your source "
"code) that Lambda calls to start running your "
"code.",
"default": "handler",
},
"KmsKeyArn": {
"type": str,
"description": "The Amazon Resource Name (ARN) of an AWS Key "
"Management Service (AWS KMS) key that Lambda "
"uses to encrypt and decrypt environment variable "
"values.",
"default": "",
},
"MemorySize": {
"type": int,
"description": "The amount of memory, in MB, that is allocated "
"to your Lambda function. Default: 128",
"default": 128,
},
"Runtime": {
"type": str,
"description": "The runtime environment for the Lambda function "
"that you are uploading.",
},
"Timeout": {
"type": int,
"description": "The function execution time (in seconds) after "
"which Lambda terminates the function. Default: 3",
"default": 3,
},
"VpcConfig": {
"type": dict,
"description": "If the Lambda function requires access to "
"resources in a VPC, specify a VPC configuration "
"that Lambda uses to set up an elastic network "
"interface (ENI). Valid keys are: "
"SecurityGroupIds (a list of Ids), and SubnetIds "
"(a list of Ids). We automatically add an inline "
"policy to allow the lambda to create ENIs.",
"default": {},
},
"Role": {
"type": str,
"description": "Arn of the Role to create the function as - if "
"not specified, a role will be created with the "
"basic permissions necessary for Lambda to run.",
"default": "",
},
"AliasName": {
"type": str,
"description": "The name of an optional alias.",
"default": "",
},
"AliasVersion": {
"type": str,
"description": "The version string for the alias without the "
"function Arn prepended.",
"default": "$LATEST",
},
"EventSourceMapping": {
"type": dict,
"description": "An optional event source mapping config.",
"default": {},
},
}
def code(self):
return self.get_variables()["Code"]
def dead_letter_config(self):
arn = self.get_variables()["DeadLetterArn"]
dlc = NoValue
if arn:
dlc = awslambda.DeadLetterConfig(TargetArn=arn)
return dlc
def environment(self):
environment = self.get_variables()["Environment"]
env = NoValue
if environment:
env = awslambda.Environment(Variables=environment)
return env
def vpc_config(self):
vpc_config = self.get_variables()["VpcConfig"]
config = NoValue
if vpc_config:
if isinstance(vpc_config['SubnetIds'], str):
vpc_config['SubnetIds'] = vpc_config['SubnetIds'].split(',')
config = awslambda.VPCConfig(**vpc_config)
return config
def add_policy_statements(self, statements):
"""Adds statements to the policy.
Args:
statements (:class:`awacs.aws.Statement` or list): Either a single
Statment, or a list of statements.
"""
if isinstance(statements, Statement):
statements = [statements]
self._policy_statements.extend(statements)
def extended_policy_statements(self):
"""Override this and add statements to add them to the lambda policy
Returns:
list: A list of :class:`awacs.aws.Statement` objects.
"""
return []
def generate_policy_statements(self):
"""Generates the policy statements for the role used by the function.
To add additional statements you can either override the
`extended_policy_statements` method to return a list of Statements
to be added to the policy, or override this method itself if you
need more control.
Returns:
list: A list of :class:`awacs.aws.Statement` objects.
"""
statements = self._policy_statements
statements.extend(
lambda_basic_execution_statements(
self.function.Ref()
)
)
extended_statements = self.extended_policy_statements()
if extended_statements:
statements.extend(extended_statements)
return statements
def create_policy(self):
t = self.template
self.policy = t.add_resource(
iam.PolicyType(
"Policy",
PolicyName=Sub("${AWS::StackName}-policy"),
PolicyDocument=Policy(
Statement=self.generate_policy_statements()
),
Roles=[self.role.Ref()],
)
)
t.add_output(
Output("PolicyName", Value=Ref(self.policy))
)
def create_role(self):
t = self.template
self.role = t.add_resource(
iam.Role(
"Role",
AssumeRolePolicyDocument=get_lambda_assumerole_policy()
)
)
if self.get_variables()["VpcConfig"]:
# allow this Lambda to modify ENIs to allow it to run in our VPC.
self.role.Policies = [
iam.Policy(
PolicyName=Sub("${AWS::StackName}-vpc-policy"),
PolicyDocument=Policy(
Statement=lambda_vpc_execution_statements()
),
)
]
t.add_output(
Output("RoleName", Value=Ref(self.role))
)
role_arn = self.role.GetAtt("Arn")
self.role_arn = role_arn
t.add_output(
Output("RoleArn", Value=role_arn)
)
def create_function(self):
t = self.template
variables = self.get_variables()
self.function = t.add_resource(
awslambda.Function(
"Function",
Code=self.code(),
DeadLetterConfig=self.dead_letter_config(),
Description=variables["Description"] or NoValue,
Environment=self.environment(),
Handler=variables["Handler"],
KmsKeyArn=variables["KmsKeyArn"] or NoValue,
MemorySize=variables["MemorySize"],
Role=self.role_arn,
Runtime=variables["Runtime"],
Timeout=variables["Timeout"],
VpcConfig=self.vpc_config(),
)
)
t.add_output(
Output("FunctionName", Value=self.function.Ref())
)
t.add_output(
Output("FunctionArn", Value=self.function.GetAtt("Arn"))
)
self.function_version = t.add_resource(
awslambda.Version(
"LatestVersion",
FunctionName=self.function.Ref()
)
)
t.add_output(
Output("LatestVersion",
Value=self.function_version.GetAtt("Version"))
)
t.add_output(
Output("LatestVersionArn",
Value=self.function_version.Ref())
)
alias_name = variables["AliasName"]
if alias_name:
self.alias = t.add_resource(
awslambda.Alias(
"Alias",
Name=alias_name,
FunctionName=self.function.Ref(),
FunctionVersion=variables["AliasVersion"] or "$LATEST",
)
)
t.add_output(Output("AliasArn", Value=self.alias.Ref()))
def create_event_source_mapping(self):
t = self.template
variables = self.get_variables()
mapping = variables["EventSourceMapping"]
if mapping:
if "FunctionName" in mapping:
logger.warn(
Sub("FunctionName defined in EventSourceMapping in "
"${AWS::StackName}. Overriding.")
)
mapping["FunctionName"] = self.function.GetAtt("Arn")
resource = t.add_resource(
awslambda.EventSourceMapping.from_dict(
"EventSourceMapping", mapping
)
)
if not variables["Role"]:
self.add_policy_statements(
stream_reader_statements(
mapping["EventSourceArn"]
)
)
t.add_output(
Output("EventSourceMappingId", Value=resource.Ref())
)
def create_template(self):
variables = self.get_variables()
self._policy_statements = []
role_arn = variables["Role"]
# Set here - used in `create_role` to determine if an external role
# was passed in. If an external role is passed in, no new role is
# created, and no policies are generated/added to the external
# role.
self.role_arn = role_arn
if not role_arn:
self.create_role()
self.create_function()
self.create_event_source_mapping()
# We don't use self.role_arn here because it is set internally if a
# role is created
if not role_arn:
self.create_policy()
class FunctionScheduler(Blueprint):
VARIABLES = {
"CloudwatchEventsRule": {
"type": TroposphereType(events.Rule),
"description": "The troposphere.events.Rule object params.",
},
}
def create_scheduler(self):
variables = self.get_variables()
troposphere_events_rule = variables["CloudwatchEventsRule"]
aws_lambda_arns = {}
# iterate over targets in the event Rule & gather aws_lambda_arns.
for target in getattr(troposphere_events_rule, "Targets", []):
if target.Arn.startswith("arn:aws:lambda:"):
safe_id = cf_safe_name(target.Id)
aws_lambda_arns[safe_id] = target.Arn
# schedule a Cloudwatch event rule to invoke the Targets.
rule = self.template.add_resource(troposphere_events_rule)
# allow cloudwatch to invoke on any of the given lambda targets.
for event_rule_target_id, aws_lambda_arn in aws_lambda_arns.items():
self.template.add_resource(
awslambda.Permission(
"PermToInvokeFunctionFor{}".format(event_rule_target_id),
Principal="events.amazonaws.com",
Action="lambda:InvokeFunction",
FunctionName=aws_lambda_arn,
SourceArn=rule.GetAtt("Arn")
)
)
def create_template(self):
self.create_scheduler()
|
remind101/stacker_blueprints | stacker_blueprints/aws_lambda.py | stream_reader_statements | python | def stream_reader_statements(stream_arn):
action_type = get_stream_action_type(stream_arn)
arn_parts = stream_arn.split("/")
# Cut off the last bit and replace it with a wildcard
wildcard_arn_parts = arn_parts[:-1]
wildcard_arn_parts.append("*")
wildcard_arn = "/".join(wildcard_arn_parts)
return [
Statement(
Effect=Allow,
Resource=[stream_arn],
Action=[
action_type("DescribeStream"),
action_type("GetRecords"),
action_type("GetShardIterator"),
]
),
Statement(
Effect=Allow,
Resource=[wildcard_arn],
Action=[action_type("ListStreams")]
)
] | Returns statements to allow Lambda to read from a stream.
Handles both DynamoDB & Kinesis streams. Automatically figures out the
type of stream, and provides the correct actions from the supplied Arn.
Arg:
stream_arn (str): A kinesis or dynamodb stream arn.
Returns:
list: A list of statements. | train | https://github.com/remind101/stacker_blueprints/blob/71624f6e1bd4ea794dc98fb621a04235e1931cae/stacker_blueprints/aws_lambda.py#L65-L99 | [
"def get_stream_action_type(stream_arn):\n \"\"\"Returns the awacs Action for a stream type given an arn\n\n Args:\n stream_arn (str): The Arn of the stream.\n\n Returns:\n :class:`awacs.aws.Action`: The appropriate stream type awacs Action\n class\n\n Raises:\n ValueErro... | import logging
from stacker.blueprints.base import Blueprint
from stacker.blueprints.variables.types import TroposphereType
from stacker.util import cf_safe_name
from troposphere import (
NoValue,
Output,
Ref,
Sub,
iam,
)
from troposphere import awslambda
from troposphere import events
import awacs.logs
import awacs.kinesis
import awacs.dynamodb
from awacs.aws import Statement, Allow, Policy
from awacs.helpers.trust import get_lambda_assumerole_policy
from .policies import (
lambda_basic_execution_statements,
lambda_vpc_execution_statements,
)
logger = logging.getLogger(name=__name__)
def get_stream_action_type(stream_arn):
"""Returns the awacs Action for a stream type given an arn
Args:
stream_arn (str): The Arn of the stream.
Returns:
:class:`awacs.aws.Action`: The appropriate stream type awacs Action
class
Raises:
ValueError: If the stream type doesn't match kinesis or dynamodb.
"""
stream_type_map = {
"kinesis": awacs.kinesis.Action,
"dynamodb": awacs.dynamodb.Action,
}
stream_type = stream_arn.split(":")[2]
try:
return stream_type_map[stream_type]
except KeyError:
raise ValueError(
"Invalid stream type '%s' in arn '%s'" % (stream_type, stream_arn)
)
class Function(Blueprint):
VARIABLES = {
"Code": {
"type": awslambda.Code,
"description": "The troposphere.awslambda.Code object "
"returned by the aws lambda hook.",
},
"DeadLetterArn": {
"type": str,
"description": "Dead Letter Queue (DLQ) Arn (SQS, SNS, etc) "
"that AWS Lambda (Lambda) sends events to when it "
"can't process them.",
"default": "",
},
"Description": {
"type": str,
"description": "Description of the function.",
"default": "",
},
"Environment": {
"type": dict,
"description": "Key-value pairs that Lambda caches and makes "
"available for your Lambda functions.",
"default": {},
},
"Handler": {
"type": str,
"description": "The name of the function (within your source "
"code) that Lambda calls to start running your "
"code.",
"default": "handler",
},
"KmsKeyArn": {
"type": str,
"description": "The Amazon Resource Name (ARN) of an AWS Key "
"Management Service (AWS KMS) key that Lambda "
"uses to encrypt and decrypt environment variable "
"values.",
"default": "",
},
"MemorySize": {
"type": int,
"description": "The amount of memory, in MB, that is allocated "
"to your Lambda function. Default: 128",
"default": 128,
},
"Runtime": {
"type": str,
"description": "The runtime environment for the Lambda function "
"that you are uploading.",
},
"Timeout": {
"type": int,
"description": "The function execution time (in seconds) after "
"which Lambda terminates the function. Default: 3",
"default": 3,
},
"VpcConfig": {
"type": dict,
"description": "If the Lambda function requires access to "
"resources in a VPC, specify a VPC configuration "
"that Lambda uses to set up an elastic network "
"interface (ENI). Valid keys are: "
"SecurityGroupIds (a list of Ids), and SubnetIds "
"(a list of Ids). We automatically add an inline "
"policy to allow the lambda to create ENIs.",
"default": {},
},
"Role": {
"type": str,
"description": "Arn of the Role to create the function as - if "
"not specified, a role will be created with the "
"basic permissions necessary for Lambda to run.",
"default": "",
},
"AliasName": {
"type": str,
"description": "The name of an optional alias.",
"default": "",
},
"AliasVersion": {
"type": str,
"description": "The version string for the alias without the "
"function Arn prepended.",
"default": "$LATEST",
},
"EventSourceMapping": {
"type": dict,
"description": "An optional event source mapping config.",
"default": {},
},
}
def code(self):
return self.get_variables()["Code"]
def dead_letter_config(self):
arn = self.get_variables()["DeadLetterArn"]
dlc = NoValue
if arn:
dlc = awslambda.DeadLetterConfig(TargetArn=arn)
return dlc
def environment(self):
environment = self.get_variables()["Environment"]
env = NoValue
if environment:
env = awslambda.Environment(Variables=environment)
return env
def vpc_config(self):
vpc_config = self.get_variables()["VpcConfig"]
config = NoValue
if vpc_config:
if isinstance(vpc_config['SubnetIds'], str):
vpc_config['SubnetIds'] = vpc_config['SubnetIds'].split(',')
config = awslambda.VPCConfig(**vpc_config)
return config
def add_policy_statements(self, statements):
"""Adds statements to the policy.
Args:
statements (:class:`awacs.aws.Statement` or list): Either a single
Statment, or a list of statements.
"""
if isinstance(statements, Statement):
statements = [statements]
self._policy_statements.extend(statements)
def extended_policy_statements(self):
"""Override this and add statements to add them to the lambda policy
Returns:
list: A list of :class:`awacs.aws.Statement` objects.
"""
return []
def generate_policy_statements(self):
"""Generates the policy statements for the role used by the function.
To add additional statements you can either override the
`extended_policy_statements` method to return a list of Statements
to be added to the policy, or override this method itself if you
need more control.
Returns:
list: A list of :class:`awacs.aws.Statement` objects.
"""
statements = self._policy_statements
statements.extend(
lambda_basic_execution_statements(
self.function.Ref()
)
)
extended_statements = self.extended_policy_statements()
if extended_statements:
statements.extend(extended_statements)
return statements
def create_policy(self):
t = self.template
self.policy = t.add_resource(
iam.PolicyType(
"Policy",
PolicyName=Sub("${AWS::StackName}-policy"),
PolicyDocument=Policy(
Statement=self.generate_policy_statements()
),
Roles=[self.role.Ref()],
)
)
t.add_output(
Output("PolicyName", Value=Ref(self.policy))
)
def create_role(self):
t = self.template
self.role = t.add_resource(
iam.Role(
"Role",
AssumeRolePolicyDocument=get_lambda_assumerole_policy()
)
)
if self.get_variables()["VpcConfig"]:
# allow this Lambda to modify ENIs to allow it to run in our VPC.
self.role.Policies = [
iam.Policy(
PolicyName=Sub("${AWS::StackName}-vpc-policy"),
PolicyDocument=Policy(
Statement=lambda_vpc_execution_statements()
),
)
]
t.add_output(
Output("RoleName", Value=Ref(self.role))
)
role_arn = self.role.GetAtt("Arn")
self.role_arn = role_arn
t.add_output(
Output("RoleArn", Value=role_arn)
)
def create_function(self):
t = self.template
variables = self.get_variables()
self.function = t.add_resource(
awslambda.Function(
"Function",
Code=self.code(),
DeadLetterConfig=self.dead_letter_config(),
Description=variables["Description"] or NoValue,
Environment=self.environment(),
Handler=variables["Handler"],
KmsKeyArn=variables["KmsKeyArn"] or NoValue,
MemorySize=variables["MemorySize"],
Role=self.role_arn,
Runtime=variables["Runtime"],
Timeout=variables["Timeout"],
VpcConfig=self.vpc_config(),
)
)
t.add_output(
Output("FunctionName", Value=self.function.Ref())
)
t.add_output(
Output("FunctionArn", Value=self.function.GetAtt("Arn"))
)
self.function_version = t.add_resource(
awslambda.Version(
"LatestVersion",
FunctionName=self.function.Ref()
)
)
t.add_output(
Output("LatestVersion",
Value=self.function_version.GetAtt("Version"))
)
t.add_output(
Output("LatestVersionArn",
Value=self.function_version.Ref())
)
alias_name = variables["AliasName"]
if alias_name:
self.alias = t.add_resource(
awslambda.Alias(
"Alias",
Name=alias_name,
FunctionName=self.function.Ref(),
FunctionVersion=variables["AliasVersion"] or "$LATEST",
)
)
t.add_output(Output("AliasArn", Value=self.alias.Ref()))
def create_event_source_mapping(self):
t = self.template
variables = self.get_variables()
mapping = variables["EventSourceMapping"]
if mapping:
if "FunctionName" in mapping:
logger.warn(
Sub("FunctionName defined in EventSourceMapping in "
"${AWS::StackName}. Overriding.")
)
mapping["FunctionName"] = self.function.GetAtt("Arn")
resource = t.add_resource(
awslambda.EventSourceMapping.from_dict(
"EventSourceMapping", mapping
)
)
if not variables["Role"]:
self.add_policy_statements(
stream_reader_statements(
mapping["EventSourceArn"]
)
)
t.add_output(
Output("EventSourceMappingId", Value=resource.Ref())
)
def create_template(self):
variables = self.get_variables()
self._policy_statements = []
role_arn = variables["Role"]
# Set here - used in `create_role` to determine if an external role
# was passed in. If an external role is passed in, no new role is
# created, and no policies are generated/added to the external
# role.
self.role_arn = role_arn
if not role_arn:
self.create_role()
self.create_function()
self.create_event_source_mapping()
# We don't use self.role_arn here because it is set internally if a
# role is created
if not role_arn:
self.create_policy()
class FunctionScheduler(Blueprint):
VARIABLES = {
"CloudwatchEventsRule": {
"type": TroposphereType(events.Rule),
"description": "The troposphere.events.Rule object params.",
},
}
def create_scheduler(self):
variables = self.get_variables()
troposphere_events_rule = variables["CloudwatchEventsRule"]
aws_lambda_arns = {}
# iterate over targets in the event Rule & gather aws_lambda_arns.
for target in getattr(troposphere_events_rule, "Targets", []):
if target.Arn.startswith("arn:aws:lambda:"):
safe_id = cf_safe_name(target.Id)
aws_lambda_arns[safe_id] = target.Arn
# schedule a Cloudwatch event rule to invoke the Targets.
rule = self.template.add_resource(troposphere_events_rule)
# allow cloudwatch to invoke on any of the given lambda targets.
for event_rule_target_id, aws_lambda_arn in aws_lambda_arns.items():
self.template.add_resource(
awslambda.Permission(
"PermToInvokeFunctionFor{}".format(event_rule_target_id),
Principal="events.amazonaws.com",
Action="lambda:InvokeFunction",
FunctionName=aws_lambda_arn,
SourceArn=rule.GetAtt("Arn")
)
)
def create_template(self):
self.create_scheduler()
|
remind101/stacker_blueprints | stacker_blueprints/aws_lambda.py | Function.add_policy_statements | python | def add_policy_statements(self, statements):
if isinstance(statements, Statement):
statements = [statements]
self._policy_statements.extend(statements) | Adds statements to the policy.
Args:
statements (:class:`awacs.aws.Statement` or list): Either a single
Statment, or a list of statements. | train | https://github.com/remind101/stacker_blueprints/blob/71624f6e1bd4ea794dc98fb621a04235e1931cae/stacker_blueprints/aws_lambda.py#L221-L230 | null | class Function(Blueprint):
VARIABLES = {
"Code": {
"type": awslambda.Code,
"description": "The troposphere.awslambda.Code object "
"returned by the aws lambda hook.",
},
"DeadLetterArn": {
"type": str,
"description": "Dead Letter Queue (DLQ) Arn (SQS, SNS, etc) "
"that AWS Lambda (Lambda) sends events to when it "
"can't process them.",
"default": "",
},
"Description": {
"type": str,
"description": "Description of the function.",
"default": "",
},
"Environment": {
"type": dict,
"description": "Key-value pairs that Lambda caches and makes "
"available for your Lambda functions.",
"default": {},
},
"Handler": {
"type": str,
"description": "The name of the function (within your source "
"code) that Lambda calls to start running your "
"code.",
"default": "handler",
},
"KmsKeyArn": {
"type": str,
"description": "The Amazon Resource Name (ARN) of an AWS Key "
"Management Service (AWS KMS) key that Lambda "
"uses to encrypt and decrypt environment variable "
"values.",
"default": "",
},
"MemorySize": {
"type": int,
"description": "The amount of memory, in MB, that is allocated "
"to your Lambda function. Default: 128",
"default": 128,
},
"Runtime": {
"type": str,
"description": "The runtime environment for the Lambda function "
"that you are uploading.",
},
"Timeout": {
"type": int,
"description": "The function execution time (in seconds) after "
"which Lambda terminates the function. Default: 3",
"default": 3,
},
"VpcConfig": {
"type": dict,
"description": "If the Lambda function requires access to "
"resources in a VPC, specify a VPC configuration "
"that Lambda uses to set up an elastic network "
"interface (ENI). Valid keys are: "
"SecurityGroupIds (a list of Ids), and SubnetIds "
"(a list of Ids). We automatically add an inline "
"policy to allow the lambda to create ENIs.",
"default": {},
},
"Role": {
"type": str,
"description": "Arn of the Role to create the function as - if "
"not specified, a role will be created with the "
"basic permissions necessary for Lambda to run.",
"default": "",
},
"AliasName": {
"type": str,
"description": "The name of an optional alias.",
"default": "",
},
"AliasVersion": {
"type": str,
"description": "The version string for the alias without the "
"function Arn prepended.",
"default": "$LATEST",
},
"EventSourceMapping": {
"type": dict,
"description": "An optional event source mapping config.",
"default": {},
},
}
def code(self):
return self.get_variables()["Code"]
def dead_letter_config(self):
arn = self.get_variables()["DeadLetterArn"]
dlc = NoValue
if arn:
dlc = awslambda.DeadLetterConfig(TargetArn=arn)
return dlc
def environment(self):
environment = self.get_variables()["Environment"]
env = NoValue
if environment:
env = awslambda.Environment(Variables=environment)
return env
def vpc_config(self):
vpc_config = self.get_variables()["VpcConfig"]
config = NoValue
if vpc_config:
if isinstance(vpc_config['SubnetIds'], str):
vpc_config['SubnetIds'] = vpc_config['SubnetIds'].split(',')
config = awslambda.VPCConfig(**vpc_config)
return config
def extended_policy_statements(self):
"""Override this and add statements to add them to the lambda policy
Returns:
list: A list of :class:`awacs.aws.Statement` objects.
"""
return []
def generate_policy_statements(self):
"""Generates the policy statements for the role used by the function.
To add additional statements you can either override the
`extended_policy_statements` method to return a list of Statements
to be added to the policy, or override this method itself if you
need more control.
Returns:
list: A list of :class:`awacs.aws.Statement` objects.
"""
statements = self._policy_statements
statements.extend(
lambda_basic_execution_statements(
self.function.Ref()
)
)
extended_statements = self.extended_policy_statements()
if extended_statements:
statements.extend(extended_statements)
return statements
def create_policy(self):
t = self.template
self.policy = t.add_resource(
iam.PolicyType(
"Policy",
PolicyName=Sub("${AWS::StackName}-policy"),
PolicyDocument=Policy(
Statement=self.generate_policy_statements()
),
Roles=[self.role.Ref()],
)
)
t.add_output(
Output("PolicyName", Value=Ref(self.policy))
)
def create_role(self):
t = self.template
self.role = t.add_resource(
iam.Role(
"Role",
AssumeRolePolicyDocument=get_lambda_assumerole_policy()
)
)
if self.get_variables()["VpcConfig"]:
# allow this Lambda to modify ENIs to allow it to run in our VPC.
self.role.Policies = [
iam.Policy(
PolicyName=Sub("${AWS::StackName}-vpc-policy"),
PolicyDocument=Policy(
Statement=lambda_vpc_execution_statements()
),
)
]
t.add_output(
Output("RoleName", Value=Ref(self.role))
)
role_arn = self.role.GetAtt("Arn")
self.role_arn = role_arn
t.add_output(
Output("RoleArn", Value=role_arn)
)
def create_function(self):
t = self.template
variables = self.get_variables()
self.function = t.add_resource(
awslambda.Function(
"Function",
Code=self.code(),
DeadLetterConfig=self.dead_letter_config(),
Description=variables["Description"] or NoValue,
Environment=self.environment(),
Handler=variables["Handler"],
KmsKeyArn=variables["KmsKeyArn"] or NoValue,
MemorySize=variables["MemorySize"],
Role=self.role_arn,
Runtime=variables["Runtime"],
Timeout=variables["Timeout"],
VpcConfig=self.vpc_config(),
)
)
t.add_output(
Output("FunctionName", Value=self.function.Ref())
)
t.add_output(
Output("FunctionArn", Value=self.function.GetAtt("Arn"))
)
self.function_version = t.add_resource(
awslambda.Version(
"LatestVersion",
FunctionName=self.function.Ref()
)
)
t.add_output(
Output("LatestVersion",
Value=self.function_version.GetAtt("Version"))
)
t.add_output(
Output("LatestVersionArn",
Value=self.function_version.Ref())
)
alias_name = variables["AliasName"]
if alias_name:
self.alias = t.add_resource(
awslambda.Alias(
"Alias",
Name=alias_name,
FunctionName=self.function.Ref(),
FunctionVersion=variables["AliasVersion"] or "$LATEST",
)
)
t.add_output(Output("AliasArn", Value=self.alias.Ref()))
def create_event_source_mapping(self):
t = self.template
variables = self.get_variables()
mapping = variables["EventSourceMapping"]
if mapping:
if "FunctionName" in mapping:
logger.warn(
Sub("FunctionName defined in EventSourceMapping in "
"${AWS::StackName}. Overriding.")
)
mapping["FunctionName"] = self.function.GetAtt("Arn")
resource = t.add_resource(
awslambda.EventSourceMapping.from_dict(
"EventSourceMapping", mapping
)
)
if not variables["Role"]:
self.add_policy_statements(
stream_reader_statements(
mapping["EventSourceArn"]
)
)
t.add_output(
Output("EventSourceMappingId", Value=resource.Ref())
)
def create_template(self):
variables = self.get_variables()
self._policy_statements = []
role_arn = variables["Role"]
# Set here - used in `create_role` to determine if an external role
# was passed in. If an external role is passed in, no new role is
# created, and no policies are generated/added to the external
# role.
self.role_arn = role_arn
if not role_arn:
self.create_role()
self.create_function()
self.create_event_source_mapping()
# We don't use self.role_arn here because it is set internally if a
# role is created
if not role_arn:
self.create_policy()
|
remind101/stacker_blueprints | stacker_blueprints/aws_lambda.py | Function.generate_policy_statements | python | def generate_policy_statements(self):
statements = self._policy_statements
statements.extend(
lambda_basic_execution_statements(
self.function.Ref()
)
)
extended_statements = self.extended_policy_statements()
if extended_statements:
statements.extend(extended_statements)
return statements | Generates the policy statements for the role used by the function.
To add additional statements you can either override the
`extended_policy_statements` method to return a list of Statements
to be added to the policy, or override this method itself if you
need more control.
Returns:
list: A list of :class:`awacs.aws.Statement` objects. | train | https://github.com/remind101/stacker_blueprints/blob/71624f6e1bd4ea794dc98fb621a04235e1931cae/stacker_blueprints/aws_lambda.py#L240-L260 | [
"def lambda_basic_execution_statements(function_name):\n log_group = Join(\"/\", [\"/aws/lambda\", function_name])\n return cloudwatch_logs_write_statements(log_group)\n"
] | class Function(Blueprint):
VARIABLES = {
"Code": {
"type": awslambda.Code,
"description": "The troposphere.awslambda.Code object "
"returned by the aws lambda hook.",
},
"DeadLetterArn": {
"type": str,
"description": "Dead Letter Queue (DLQ) Arn (SQS, SNS, etc) "
"that AWS Lambda (Lambda) sends events to when it "
"can't process them.",
"default": "",
},
"Description": {
"type": str,
"description": "Description of the function.",
"default": "",
},
"Environment": {
"type": dict,
"description": "Key-value pairs that Lambda caches and makes "
"available for your Lambda functions.",
"default": {},
},
"Handler": {
"type": str,
"description": "The name of the function (within your source "
"code) that Lambda calls to start running your "
"code.",
"default": "handler",
},
"KmsKeyArn": {
"type": str,
"description": "The Amazon Resource Name (ARN) of an AWS Key "
"Management Service (AWS KMS) key that Lambda "
"uses to encrypt and decrypt environment variable "
"values.",
"default": "",
},
"MemorySize": {
"type": int,
"description": "The amount of memory, in MB, that is allocated "
"to your Lambda function. Default: 128",
"default": 128,
},
"Runtime": {
"type": str,
"description": "The runtime environment for the Lambda function "
"that you are uploading.",
},
"Timeout": {
"type": int,
"description": "The function execution time (in seconds) after "
"which Lambda terminates the function. Default: 3",
"default": 3,
},
"VpcConfig": {
"type": dict,
"description": "If the Lambda function requires access to "
"resources in a VPC, specify a VPC configuration "
"that Lambda uses to set up an elastic network "
"interface (ENI). Valid keys are: "
"SecurityGroupIds (a list of Ids), and SubnetIds "
"(a list of Ids). We automatically add an inline "
"policy to allow the lambda to create ENIs.",
"default": {},
},
"Role": {
"type": str,
"description": "Arn of the Role to create the function as - if "
"not specified, a role will be created with the "
"basic permissions necessary for Lambda to run.",
"default": "",
},
"AliasName": {
"type": str,
"description": "The name of an optional alias.",
"default": "",
},
"AliasVersion": {
"type": str,
"description": "The version string for the alias without the "
"function Arn prepended.",
"default": "$LATEST",
},
"EventSourceMapping": {
"type": dict,
"description": "An optional event source mapping config.",
"default": {},
},
}
def code(self):
return self.get_variables()["Code"]
def dead_letter_config(self):
arn = self.get_variables()["DeadLetterArn"]
dlc = NoValue
if arn:
dlc = awslambda.DeadLetterConfig(TargetArn=arn)
return dlc
def environment(self):
environment = self.get_variables()["Environment"]
env = NoValue
if environment:
env = awslambda.Environment(Variables=environment)
return env
def vpc_config(self):
vpc_config = self.get_variables()["VpcConfig"]
config = NoValue
if vpc_config:
if isinstance(vpc_config['SubnetIds'], str):
vpc_config['SubnetIds'] = vpc_config['SubnetIds'].split(',')
config = awslambda.VPCConfig(**vpc_config)
return config
def add_policy_statements(self, statements):
"""Adds statements to the policy.
Args:
statements (:class:`awacs.aws.Statement` or list): Either a single
Statment, or a list of statements.
"""
if isinstance(statements, Statement):
statements = [statements]
self._policy_statements.extend(statements)
def extended_policy_statements(self):
"""Override this and add statements to add them to the lambda policy
Returns:
list: A list of :class:`awacs.aws.Statement` objects.
"""
return []
def create_policy(self):
t = self.template
self.policy = t.add_resource(
iam.PolicyType(
"Policy",
PolicyName=Sub("${AWS::StackName}-policy"),
PolicyDocument=Policy(
Statement=self.generate_policy_statements()
),
Roles=[self.role.Ref()],
)
)
t.add_output(
Output("PolicyName", Value=Ref(self.policy))
)
def create_role(self):
t = self.template
self.role = t.add_resource(
iam.Role(
"Role",
AssumeRolePolicyDocument=get_lambda_assumerole_policy()
)
)
if self.get_variables()["VpcConfig"]:
# allow this Lambda to modify ENIs to allow it to run in our VPC.
self.role.Policies = [
iam.Policy(
PolicyName=Sub("${AWS::StackName}-vpc-policy"),
PolicyDocument=Policy(
Statement=lambda_vpc_execution_statements()
),
)
]
t.add_output(
Output("RoleName", Value=Ref(self.role))
)
role_arn = self.role.GetAtt("Arn")
self.role_arn = role_arn
t.add_output(
Output("RoleArn", Value=role_arn)
)
def create_function(self):
t = self.template
variables = self.get_variables()
self.function = t.add_resource(
awslambda.Function(
"Function",
Code=self.code(),
DeadLetterConfig=self.dead_letter_config(),
Description=variables["Description"] or NoValue,
Environment=self.environment(),
Handler=variables["Handler"],
KmsKeyArn=variables["KmsKeyArn"] or NoValue,
MemorySize=variables["MemorySize"],
Role=self.role_arn,
Runtime=variables["Runtime"],
Timeout=variables["Timeout"],
VpcConfig=self.vpc_config(),
)
)
t.add_output(
Output("FunctionName", Value=self.function.Ref())
)
t.add_output(
Output("FunctionArn", Value=self.function.GetAtt("Arn"))
)
self.function_version = t.add_resource(
awslambda.Version(
"LatestVersion",
FunctionName=self.function.Ref()
)
)
t.add_output(
Output("LatestVersion",
Value=self.function_version.GetAtt("Version"))
)
t.add_output(
Output("LatestVersionArn",
Value=self.function_version.Ref())
)
alias_name = variables["AliasName"]
if alias_name:
self.alias = t.add_resource(
awslambda.Alias(
"Alias",
Name=alias_name,
FunctionName=self.function.Ref(),
FunctionVersion=variables["AliasVersion"] or "$LATEST",
)
)
t.add_output(Output("AliasArn", Value=self.alias.Ref()))
def create_event_source_mapping(self):
t = self.template
variables = self.get_variables()
mapping = variables["EventSourceMapping"]
if mapping:
if "FunctionName" in mapping:
logger.warn(
Sub("FunctionName defined in EventSourceMapping in "
"${AWS::StackName}. Overriding.")
)
mapping["FunctionName"] = self.function.GetAtt("Arn")
resource = t.add_resource(
awslambda.EventSourceMapping.from_dict(
"EventSourceMapping", mapping
)
)
if not variables["Role"]:
self.add_policy_statements(
stream_reader_statements(
mapping["EventSourceArn"]
)
)
t.add_output(
Output("EventSourceMappingId", Value=resource.Ref())
)
def create_template(self):
variables = self.get_variables()
self._policy_statements = []
role_arn = variables["Role"]
# Set here - used in `create_role` to determine if an external role
# was passed in. If an external role is passed in, no new role is
# created, and no policies are generated/added to the external
# role.
self.role_arn = role_arn
if not role_arn:
self.create_role()
self.create_function()
self.create_event_source_mapping()
# We don't use self.role_arn here because it is set internally if a
# role is created
if not role_arn:
self.create_policy()
|
remind101/stacker_blueprints | stacker_blueprints/dynamodb.py | snake_to_camel_case | python | def snake_to_camel_case(name):
name = name.replace("-", "_")
return "".join(word.capitalize() for word in name.split("_")) | Accept a snake_case string and return a CamelCase string.
For example::
>>> snake_to_camel_case('cidr_block')
'CidrBlock' | train | https://github.com/remind101/stacker_blueprints/blob/71624f6e1bd4ea794dc98fb621a04235e1931cae/stacker_blueprints/dynamodb.py#L39-L47 | null | from stacker.blueprints.base import Blueprint
from stacker.blueprints.variables.types import TroposphereType
from troposphere import (
iam,
applicationautoscaling as aas,
dynamodb,
Ref,
GetAtt,
Output,
Sub,
)
from .policies import (
dynamodb_autoscaling_policy,
)
# TODO: Factor out the below two functions, once this PR is merged:
# https://github.com/cloudtools/awacs/pull/93
# from awacs.helpers.trust import get_application_autoscaling_assumerole_policy
from awacs.helpers.trust import make_simple_assume_policy
def make_service_domain_name(service, region=''):
"""Helper function for creating proper service domain names."""
tld = ".com.cn" if region == "cn-north-1" else ".com"
return "{}.amazonaws{}".format(service, tld)
def get_application_autoscaling_assumerole_policy(region=''):
""" Helper function for building the AWS Lambda AssumeRole Policy"""
service = make_service_domain_name('application-autoscaling', region)
return make_simple_assume_policy(service)
# end of TODO.
class DynamoDB(Blueprint):
"""Manages the creation of DynamoDB tables.
Example::
- name: users
class_path: stacker_blueprints.dynamodb.DynamoDB
variables:
Tables:
UserTable:
TableName: prod-user-table
KeySchema:
- AttributeName: id
KeyType: HASH
- AttributeName: name
KeyType: RANGE
AttributeDefinitions:
- AttributeName: id
AttributeType: S
- AttributeName: name
AttributeType: S
ProvisionedThroughput:
ReadCapacityUnits: 5
WriteCapacityUnits: 5
StreamSpecification:
StreamViewType: ALL
"""
VARIABLES = {
"Tables": {
"type": TroposphereType(dynamodb.Table, many=True),
"description": "DynamoDB tables to create.",
}
}
def create_template(self):
t = self.template
variables = self.get_variables()
for table in variables["Tables"]:
t.add_resource(table)
stream_enabled = table.properties.get("StreamSpecification")
if stream_enabled:
t.add_output(Output("{}StreamArn".format(table.title),
Value=GetAtt(table, "StreamArn")))
t.add_output(Output("{}Name".format(table.title),
Value=Ref(table)))
class AutoScaling(Blueprint):
"""Manages the AutoScaling of DynamoDB tables.
Ref: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-dynamodb-table.html#cfn-dynamodb-table-examples-application-autoscaling # noqa
Example::
- name: dynamodb-autoscaling
class_path: stacker_blueprints.dynamodb.AutoScaling
variables:
AutoScalingConfigs:
- table: test-user-table
read:
min: 5
max: 100
target: 75.0
write:
min: 5
max: 50
target: 80.0
- table: test-group-table
read:
min: 10
max: 50
scale-in-cooldown: 180
scale-out-cooldown: 180
write:
max: 25
"""
VARIABLES = {
"AutoScalingConfigs": {
"type": list,
"description": "A list of dicts, each of which represent "
"a DynamoDB AutoScaling Configuration.",
}
}
# reference: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-dynamodb-table.html#cfn-dynamodb-table-examples-application-autoscaling # noqa
def create_scaling_iam_role(self):
assumerole_policy = get_application_autoscaling_assumerole_policy()
return self.template.add_resource(
iam.Role(
"Role",
Policies=[
iam.Policy(
PolicyName=Sub(
"${AWS::StackName}-dynamodb-autoscaling"
),
PolicyDocument=dynamodb_autoscaling_policy(self.tables)
)
],
AssumeRolePolicyDocument=assumerole_policy
)
)
def create_scalable_target_and_scaling_policy(self, table, asc, capacity_type="read"): # noqa
capacity_type = capacity_type.title()
if capacity_type not in ("Read", "Write"):
raise Exception("capacity_type must be either `read` or `write`.")
dimension = "dynamodb:table:{}CapacityUnits".format(capacity_type)
camel_table = snake_to_camel_case(table)
scalable_target_name = "{}{}ScalableTarget".format(
camel_table,
capacity_type,
)
scalable_target = self.template.add_resource(
aas.ScalableTarget(
scalable_target_name,
MinCapacity=asc.get("min", 1),
MaxCapacity=asc.get("max", 1000),
ResourceId="table/{}".format(table),
RoleARN=self.iam_role_arn,
ScalableDimension=dimension,
ServiceNamespace="dynamodb"
)
)
# https://docs.aws.amazon.com/autoscaling/application/APIReference/API_PredefinedMetricSpecification.html # noqa
predefined_metric_spec = aas.PredefinedMetricSpecification(
PredefinedMetricType="DynamoDB{}CapacityUtilization".format(
capacity_type
)
)
ttspc = aas.TargetTrackingScalingPolicyConfiguration(
TargetValue=asc.get("target", 50.0),
ScaleInCooldown=asc.get("scale-in-cooldown", 60),
ScaleOutCooldown=asc.get("scale-out-cooldown", 60),
PredefinedMetricSpecification=predefined_metric_spec,
)
scaling_policy_name = "{}{}ScalablePolicy".format(
camel_table,
capacity_type,
)
# dynamodb only supports TargetTrackingScaling polcy type.
# https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-applicationautoscaling-scalingpolicy.html#cfn-applicationautoscaling-scalingpolicy-policytype # noqa
self.template.add_resource(
aas.ScalingPolicy(
scaling_policy_name,
PolicyName=scaling_policy_name,
PolicyType="TargetTrackingScaling",
ScalingTargetId=scalable_target.ref(),
TargetTrackingScalingPolicyConfiguration=ttspc,
)
)
def create_template(self):
variables = self.get_variables()
self.auto_scaling_configs = variables["AutoScalingConfigs"]
self.tables = [config["table"] for config in self.auto_scaling_configs]
self.iam_role = self.create_scaling_iam_role()
self.iam_role_arn = GetAtt(self.iam_role, "Arn")
for table_asc in self.auto_scaling_configs:
self.create_scalable_target_and_scaling_policy(
table_asc["table"], table_asc["read"], "read"
)
self.create_scalable_target_and_scaling_policy(
table_asc["table"], table_asc["write"], "write"
)
|
remind101/stacker_blueprints | stacker_blueprints/generic.py | GenericResourceCreator.setup_resource | python | def setup_resource(self):
template = self.template
variables = self.get_variables()
tclass = variables['Class']
tprops = variables['Properties']
output = variables['Output']
klass = load_object_from_string('troposphere.' + tclass)
instance = klass.from_dict('ResourceRefName', tprops)
template.add_resource(instance)
template.add_output(Output(
output,
Description="A reference to the object created in this blueprint",
Value=Ref(instance)
)) | Setting Up Resource | train | https://github.com/remind101/stacker_blueprints/blob/71624f6e1bd4ea794dc98fb621a04235e1931cae/stacker_blueprints/generic.py#L57-L75 | null | class GenericResourceCreator(Blueprint):
""" Generic Blueprint for creating a resource
Example config - this would create a stack with a single resource in it,
an ec2.Volume resource:
- name: generic-resource-volume
class_path: blueprints.generic.GenericResourceCreator
variables:
Class: ec2.Volume
Output: VolumeId
Properties:
VolumeType: gp2
Size: 5
Encrypted: true
AvailabilityZone: us-east-1b
"""
VARIABLES = {
'Class':
{'type': str,
'description': 'The troposphere class to create, '
'e.g.: ec2.Volume'},
'Output':
{'type': str,
'description': 'The output field that should be created, '
'e.g.: VolumeId'},
'Properties':
{'type': dict,
'description': 'The list of properties to use for the '
'Troposphere class'},
}
def add_cfn_description(self):
""" Boilerplate for CFN Template
*** NOTE *** Template Version Reminder
Make Sure you bump up the template version number above if submitting
updates to the repo. This is the only way we can tell which version of
a template is in place on a running resouce.
"""
template = self.template
template.add_version('2010-09-09')
template.add_description('Generic Resource Creator - 1.0.0')
def create_template(self):
""" Create the CFN template """
self.add_cfn_description()
self.setup_resource()
|
remind101/stacker_blueprints | stacker_blueprints/kms.py | kms_key_policy | python | def kms_key_policy():
statements = []
statements.extend(kms_key_root_statements())
return Policy(
Version="2012-10-17",
Id="root-account-access",
Statement=statements
) | Creates a key policy for use of a KMS Key. | train | https://github.com/remind101/stacker_blueprints/blob/71624f6e1bd4ea794dc98fb621a04235e1931cae/stacker_blueprints/kms.py#L40-L50 | [
"def kms_key_root_statements():\n root_arn = Join(\":\", [\"arn:aws:iam:\", Ref(\"AWS::AccountId\"), \"root\"])\n\n return [\n Statement(\n Sid=\"Enable IAM User Permissions\",\n Effect=Allow,\n Principal=AWSPrincipal(root_arn),\n Action=[\n aw... | import logging
from awacs.aws import (
Allow,
AWSPrincipal,
Policy,
Statement,
)
import awacs.kms
from troposphere import (
Join,
Output,
Ref,
kms,
)
from stacker.blueprints.base import Blueprint
logger = logging.getLogger(__name__)
def kms_key_root_statements():
root_arn = Join(":", ["arn:aws:iam:", Ref("AWS::AccountId"), "root"])
return [
Statement(
Sid="Enable IAM User Permissions",
Effect=Allow,
Principal=AWSPrincipal(root_arn),
Action=[
awacs.kms.Action("*"),
],
Resource=["*"]
)
]
class Key(Blueprint):
VARIABLES = {
"KeyAlias": {
"type": str,
"description": "The alias to give the key.",
"default": "",
},
"Properties": {
"type": dict,
"description": "A dictionary of KMS key attributes which should "
"match the attributes for AWS::KMS::Key "
"Cloudformation resource. Note: You should "
"not supply a `KeyPolicy` attribute.",
"default": {},
},
"Attributes": {
"type": dict,
"description": "Deprecated. Use Properties instead.",
"default": {},
}
}
def generate_key_policy(self):
return kms_key_policy()
def create_template(self):
t = self.template
variables = self.get_variables()
key_policy = self.generate_key_policy()
props = variables["Properties"]
if variables["Attributes"]:
raise DeprecationWarning(
"Attributes was deprecated, use Properties instead.")
if "KeyPolicy" in props:
logger.warning("KeyPolicy provided, but not used. To write "
"your own policy you need to subclass this "
"blueprint and override `generate_key_policy`.")
props["KeyPolicy"] = key_policy
key = t.add_resource(
kms.Key.from_dict("Key", props)
)
key_arn = Join(
"",
[
"arn:aws:kms:",
Ref("AWS::Region"),
":",
Ref("AWS::AccountId"),
":key/",
Ref(key)
]
)
t.add_output(Output("KeyArn", Value=key_arn))
t.add_output(Output("KeyId", Value=Ref(key)))
key_alias = variables["KeyAlias"]
if key_alias:
if not key_alias.startswith("alias/"):
key_alias = "alias/%s" % key_alias
alias = t.add_resource(
kms.Alias(
"Alias",
AliasName="%s" % key_alias,
TargetKeyId=Ref(key)
)
)
t.add_output(Output("KeyAlias", Value=Ref(alias)))
|
remind101/stacker_blueprints | stacker_blueprints/empire/policies.py | logstream_policy | python | def logstream_policy():
p = Policy(
Statement=[
Statement(
Effect=Allow,
Resource=["*"],
Action=[
kinesis.CreateStream, kinesis.DescribeStream,
Action(kinesis.prefix, "AddTagsToStream"),
Action(kinesis.prefix, "PutRecords")
])])
return p | Policy needed for logspout -> kinesis log streaming. | train | https://github.com/remind101/stacker_blueprints/blob/71624f6e1bd4ea794dc98fb621a04235e1931cae/stacker_blueprints/empire/policies.py#L245-L257 | null | import logging
from awacs import (
awslambda,
ecs,
ec2,
events,
iam,
route53,
kinesis,
sns,
logs,
sqs,
s3,
cloudformation,
elasticloadbalancing as elb,
ecr,
)
from awacs.aws import (
Statement,
Allow,
Policy,
Action,
Principal,
Condition,
SourceArn,
ArnEquals,
)
from troposphere import (
Ref,
Join,
)
logger = logging.getLogger(__name__)
def ecs_agent_policy():
p = Policy(
Statement=[
Statement(
Effect=Allow,
Resource=["*"],
Action=[
ecs.CreateCluster,
ecs.RegisterContainerInstance,
ecs.DeregisterContainerInstance,
ecs.DiscoverPollEndpoint,
ecs.Action("Submit*"),
ecs.Poll,
ecs.Action("StartTelemetrySession")]),
Statement(
Effect=Allow,
Action=[
ecr.GetAuthorizationToken,
ecr.BatchCheckLayerAvailability,
ecr.GetDownloadUrlForLayer,
ecr.BatchGetImage,
],
Resource=["*"],
),
]
)
return p
def service_role_policy():
p = Policy(
Statement=[
Statement(
Effect=Allow,
Resource=["*"],
Action=[
ec2.AuthorizeSecurityGroupIngress,
Action("ec2", "Describe*"),
elb.DeregisterInstancesFromLoadBalancer,
Action("elasticloadbalancing", "Describe*"),
elb.RegisterInstancesWithLoadBalancer,
elb.Action("RegisterTargets"),
elb.Action("DeregisterTargets"),
]
)
]
)
return p
def empire_policy(resources):
p = Policy(
Statement=[
Statement(
Effect=Allow,
Resource=[resources['CustomResourcesTopic']],
Action=[sns.Publish]),
Statement(
Effect=Allow,
Resource=[resources['CustomResourcesQueue']],
Action=[
sqs.ReceiveMessage,
sqs.DeleteMessage,
sqs.ChangeMessageVisibility
]),
Statement(
Effect=Allow,
Resource=[resources['TemplateBucket']],
Action=[
s3.PutObject,
s3.PutObjectAcl,
s3.PutObjectVersionAcl,
s3.GetObject,
s3.GetObjectVersion,
s3.GetObjectAcl,
s3.GetObjectVersionAcl]),
Statement(
Effect=Allow,
Resource=["*"],
Action=[
awslambda.CreateFunction,
awslambda.DeleteFunction,
awslambda.UpdateFunctionCode,
awslambda.GetFunctionConfiguration,
awslambda.AddPermission,
awslambda.RemovePermission]),
Statement(
Effect=Allow,
Resource=["*"],
Action=[
events.PutRule,
events.DeleteRule,
events.DescribeRule,
events.EnableRule,
events.DisableRule,
events.PutTargets,
events.RemoveTargets]),
Statement(
Effect=Allow,
Resource=[
Join('', [
'arn:aws:cloudformation:', Ref('AWS::Region'), ':',
Ref('AWS::AccountId'), ':stack/',
resources['Environment'], '-*'])],
Action=[
cloudformation.CreateStack,
cloudformation.UpdateStack,
cloudformation.DeleteStack,
cloudformation.ListStackResources,
cloudformation.DescribeStackResource,
cloudformation.DescribeStacks]),
Statement(
Effect=Allow,
Resource=['*'],
Action=[cloudformation.ValidateTemplate]),
Statement(
Effect=Allow,
Resource=["*"],
Action=[ecs.CreateService, ecs.DeleteService,
ecs.DeregisterTaskDefinition,
ecs.Action("Describe*"), ecs.Action("List*"),
ecs.RegisterTaskDefinition, ecs.RunTask,
ecs.StartTask, ecs.StopTask, ecs.SubmitTaskStateChange,
ecs.UpdateService]),
Statement(
Effect=Allow,
# TODO: Limit to specific ELB?
Resource=["*"],
Action=[
elb.Action("Describe*"),
elb.AddTags,
elb.CreateLoadBalancer,
elb.CreateLoadBalancerListeners,
elb.DescribeTags,
elb.DeleteLoadBalancer,
elb.ConfigureHealthCheck,
elb.ModifyLoadBalancerAttributes,
elb.SetLoadBalancerListenerSSLCertificate,
elb.SetLoadBalancerPoliciesOfListener,
elb.Action("CreateTargetGroup"),
elb.Action("CreateListener"),
elb.Action("DeleteListener"),
elb.Action("DeleteTargetGroup"),
elb.Action("ModifyTargetGroup"),
elb.Action("ModifyTargetGroupAttributes"),
]
),
Statement(
Effect=Allow,
Resource=["*"],
Action=[ec2.DescribeSubnets, ec2.DescribeSecurityGroups]
),
Statement(
Effect=Allow,
Action=[iam.GetServerCertificate, iam.UploadServerCertificate,
iam.DeleteServerCertificate, iam.PassRole],
Resource=["*"]
),
Statement(
Effect=Allow,
Action=[
Action("route53", "ListHostedZonesByName"),
route53.ChangeResourceRecordSets,
route53.ListHostedZones,
route53.GetHostedZone,
route53.GetChange,
],
# TODO: Limit to specific zones
Resource=["*"]
),
Statement(
Effect=Allow,
Action=[
kinesis.DescribeStream,
Action(kinesis.prefix, "Get*"),
Action(kinesis.prefix, "List*"),
kinesis.PutRecord,
],
Resource=["*"]
),
Statement(
Effect=Allow,
Action=[
ecr.GetAuthorizationToken,
ecr.BatchCheckLayerAvailability,
ecr.GetDownloadUrlForLayer,
ecr.BatchGetImage,
],
Resource=["*"],
),
]
)
return p
def sns_events_policy(topic_arn):
p = Policy(
Statement=[
Statement(
Effect=Allow,
Action=[sns.Publish],
Resource=[topic_arn],
)])
return p
def runlogs_policy(log_group_ref):
"""Policy needed for Empire -> Cloudwatch logs to record run output."""
p = Policy(
Statement=[
Statement(
Effect=Allow,
Resource=[
Join('', [
'arn:aws:logs:*:*:log-group:',
log_group_ref,
':log-stream:*'])],
Action=[
logs.CreateLogStream,
logs.PutLogEvents,
])])
return p
def sns_to_sqs_policy(topic):
p = Policy(
Statement=[
Statement(
Effect=Allow,
Principal=Principal('*'),
Action=[sqs.SendMessage],
Resource=["*"],
Condition=Condition(ArnEquals(SourceArn, topic)))])
return p
|
remind101/stacker_blueprints | stacker_blueprints/empire/policies.py | runlogs_policy | python | def runlogs_policy(log_group_ref):
p = Policy(
Statement=[
Statement(
Effect=Allow,
Resource=[
Join('', [
'arn:aws:logs:*:*:log-group:',
log_group_ref,
':log-stream:*'])],
Action=[
logs.CreateLogStream,
logs.PutLogEvents,
])])
return p | Policy needed for Empire -> Cloudwatch logs to record run output. | train | https://github.com/remind101/stacker_blueprints/blob/71624f6e1bd4ea794dc98fb621a04235e1931cae/stacker_blueprints/empire/policies.py#L260-L275 | null | import logging
from awacs import (
awslambda,
ecs,
ec2,
events,
iam,
route53,
kinesis,
sns,
logs,
sqs,
s3,
cloudformation,
elasticloadbalancing as elb,
ecr,
)
from awacs.aws import (
Statement,
Allow,
Policy,
Action,
Principal,
Condition,
SourceArn,
ArnEquals,
)
from troposphere import (
Ref,
Join,
)
logger = logging.getLogger(__name__)
def ecs_agent_policy():
p = Policy(
Statement=[
Statement(
Effect=Allow,
Resource=["*"],
Action=[
ecs.CreateCluster,
ecs.RegisterContainerInstance,
ecs.DeregisterContainerInstance,
ecs.DiscoverPollEndpoint,
ecs.Action("Submit*"),
ecs.Poll,
ecs.Action("StartTelemetrySession")]),
Statement(
Effect=Allow,
Action=[
ecr.GetAuthorizationToken,
ecr.BatchCheckLayerAvailability,
ecr.GetDownloadUrlForLayer,
ecr.BatchGetImage,
],
Resource=["*"],
),
]
)
return p
def service_role_policy():
p = Policy(
Statement=[
Statement(
Effect=Allow,
Resource=["*"],
Action=[
ec2.AuthorizeSecurityGroupIngress,
Action("ec2", "Describe*"),
elb.DeregisterInstancesFromLoadBalancer,
Action("elasticloadbalancing", "Describe*"),
elb.RegisterInstancesWithLoadBalancer,
elb.Action("RegisterTargets"),
elb.Action("DeregisterTargets"),
]
)
]
)
return p
def empire_policy(resources):
p = Policy(
Statement=[
Statement(
Effect=Allow,
Resource=[resources['CustomResourcesTopic']],
Action=[sns.Publish]),
Statement(
Effect=Allow,
Resource=[resources['CustomResourcesQueue']],
Action=[
sqs.ReceiveMessage,
sqs.DeleteMessage,
sqs.ChangeMessageVisibility
]),
Statement(
Effect=Allow,
Resource=[resources['TemplateBucket']],
Action=[
s3.PutObject,
s3.PutObjectAcl,
s3.PutObjectVersionAcl,
s3.GetObject,
s3.GetObjectVersion,
s3.GetObjectAcl,
s3.GetObjectVersionAcl]),
Statement(
Effect=Allow,
Resource=["*"],
Action=[
awslambda.CreateFunction,
awslambda.DeleteFunction,
awslambda.UpdateFunctionCode,
awslambda.GetFunctionConfiguration,
awslambda.AddPermission,
awslambda.RemovePermission]),
Statement(
Effect=Allow,
Resource=["*"],
Action=[
events.PutRule,
events.DeleteRule,
events.DescribeRule,
events.EnableRule,
events.DisableRule,
events.PutTargets,
events.RemoveTargets]),
Statement(
Effect=Allow,
Resource=[
Join('', [
'arn:aws:cloudformation:', Ref('AWS::Region'), ':',
Ref('AWS::AccountId'), ':stack/',
resources['Environment'], '-*'])],
Action=[
cloudformation.CreateStack,
cloudformation.UpdateStack,
cloudformation.DeleteStack,
cloudformation.ListStackResources,
cloudformation.DescribeStackResource,
cloudformation.DescribeStacks]),
Statement(
Effect=Allow,
Resource=['*'],
Action=[cloudformation.ValidateTemplate]),
Statement(
Effect=Allow,
Resource=["*"],
Action=[ecs.CreateService, ecs.DeleteService,
ecs.DeregisterTaskDefinition,
ecs.Action("Describe*"), ecs.Action("List*"),
ecs.RegisterTaskDefinition, ecs.RunTask,
ecs.StartTask, ecs.StopTask, ecs.SubmitTaskStateChange,
ecs.UpdateService]),
Statement(
Effect=Allow,
# TODO: Limit to specific ELB?
Resource=["*"],
Action=[
elb.Action("Describe*"),
elb.AddTags,
elb.CreateLoadBalancer,
elb.CreateLoadBalancerListeners,
elb.DescribeTags,
elb.DeleteLoadBalancer,
elb.ConfigureHealthCheck,
elb.ModifyLoadBalancerAttributes,
elb.SetLoadBalancerListenerSSLCertificate,
elb.SetLoadBalancerPoliciesOfListener,
elb.Action("CreateTargetGroup"),
elb.Action("CreateListener"),
elb.Action("DeleteListener"),
elb.Action("DeleteTargetGroup"),
elb.Action("ModifyTargetGroup"),
elb.Action("ModifyTargetGroupAttributes"),
]
),
Statement(
Effect=Allow,
Resource=["*"],
Action=[ec2.DescribeSubnets, ec2.DescribeSecurityGroups]
),
Statement(
Effect=Allow,
Action=[iam.GetServerCertificate, iam.UploadServerCertificate,
iam.DeleteServerCertificate, iam.PassRole],
Resource=["*"]
),
Statement(
Effect=Allow,
Action=[
Action("route53", "ListHostedZonesByName"),
route53.ChangeResourceRecordSets,
route53.ListHostedZones,
route53.GetHostedZone,
route53.GetChange,
],
# TODO: Limit to specific zones
Resource=["*"]
),
Statement(
Effect=Allow,
Action=[
kinesis.DescribeStream,
Action(kinesis.prefix, "Get*"),
Action(kinesis.prefix, "List*"),
kinesis.PutRecord,
],
Resource=["*"]
),
Statement(
Effect=Allow,
Action=[
ecr.GetAuthorizationToken,
ecr.BatchCheckLayerAvailability,
ecr.GetDownloadUrlForLayer,
ecr.BatchGetImage,
],
Resource=["*"],
),
]
)
return p
def sns_events_policy(topic_arn):
p = Policy(
Statement=[
Statement(
Effect=Allow,
Action=[sns.Publish],
Resource=[topic_arn],
)])
return p
def logstream_policy():
"""Policy needed for logspout -> kinesis log streaming."""
p = Policy(
Statement=[
Statement(
Effect=Allow,
Resource=["*"],
Action=[
kinesis.CreateStream, kinesis.DescribeStream,
Action(kinesis.prefix, "AddTagsToStream"),
Action(kinesis.prefix, "PutRecords")
])])
return p
def sns_to_sqs_policy(topic):
p = Policy(
Statement=[
Statement(
Effect=Allow,
Principal=Principal('*'),
Action=[sqs.SendMessage],
Resource=["*"],
Condition=Condition(ArnEquals(SourceArn, topic)))])
return p
|
remind101/stacker_blueprints | stacker_blueprints/util.py | check_properties | python | def check_properties(properties, allowed_properties, resource):
for key in properties.keys():
if key not in allowed_properties:
raise ValueError(
"%s is not a valid property of %s" % (key, resource)
) | Checks the list of properties in the properties variable against the
property list provided by the allowed_properties variable. If any property
does not match the properties in allowed_properties, a ValueError is
raised to prevent unexpected behavior when creating resources.
properties: The config (as dict) provided by the configuration file
allowed_properties: A list of strings representing the available params
for a resource.
resource: A string naming the resource in question for the error
message. | train | https://github.com/remind101/stacker_blueprints/blob/71624f6e1bd4ea794dc98fb621a04235e1931cae/stacker_blueprints/util.py#L6-L22 | null | from collections import Mapping
from troposphere import Tags
def _tags_to_dict(tag_list):
return dict((tag['Key'], tag['Value']) for tag in tag_list)
def merge_tags(left, right, factory=Tags):
"""
Merge two sets of tags into a new troposphere object
Args:
left (Union[dict, troposphere.Tags]): dictionary or Tags object to be
merged with lower priority
right (Union[dict, troposphere.Tags]): dictionary or Tags object to be
merged with higher priority
factory (type): Type of object to create. Defaults to the troposphere
Tags class.
"""
if isinstance(left, Mapping):
tags = dict(left)
elif hasattr(left, 'tags'):
tags = _tags_to_dict(left.tags)
else:
tags = _tags_to_dict(left)
if isinstance(right, Mapping):
tags.update(right)
elif hasattr(left, 'tags'):
tags.update(_tags_to_dict(right.tags))
else:
tags.update(_tags_to_dict(right))
return factory(**tags)
|
remind101/stacker_blueprints | stacker_blueprints/util.py | merge_tags | python | def merge_tags(left, right, factory=Tags):
if isinstance(left, Mapping):
tags = dict(left)
elif hasattr(left, 'tags'):
tags = _tags_to_dict(left.tags)
else:
tags = _tags_to_dict(left)
if isinstance(right, Mapping):
tags.update(right)
elif hasattr(left, 'tags'):
tags.update(_tags_to_dict(right.tags))
else:
tags.update(_tags_to_dict(right))
return factory(**tags) | Merge two sets of tags into a new troposphere object
Args:
left (Union[dict, troposphere.Tags]): dictionary or Tags object to be
merged with lower priority
right (Union[dict, troposphere.Tags]): dictionary or Tags object to be
merged with higher priority
factory (type): Type of object to create. Defaults to the troposphere
Tags class. | train | https://github.com/remind101/stacker_blueprints/blob/71624f6e1bd4ea794dc98fb621a04235e1931cae/stacker_blueprints/util.py#L29-L56 | null | from collections import Mapping
from troposphere import Tags
def check_properties(properties, allowed_properties, resource):
"""Checks the list of properties in the properties variable against the
property list provided by the allowed_properties variable. If any property
does not match the properties in allowed_properties, a ValueError is
raised to prevent unexpected behavior when creating resources.
properties: The config (as dict) provided by the configuration file
allowed_properties: A list of strings representing the available params
for a resource.
resource: A string naming the resource in question for the error
message.
"""
for key in properties.keys():
if key not in allowed_properties:
raise ValueError(
"%s is not a valid property of %s" % (key, resource)
)
def _tags_to_dict(tag_list):
return dict((tag['Key'], tag['Value']) for tag in tag_list)
|
remind101/stacker_blueprints | stacker_blueprints/route53.py | get_record_set_md5 | python | def get_record_set_md5(rs_name, rs_type):
rs_name = rs_name.lower()
rs_type = rs_type.upper()
# Make A and CNAME records hash to same sum to support updates.
rs_type = "ACNAME" if rs_type in ["A", "CNAME"] else rs_type
return md5(rs_name + rs_type).hexdigest() | Accept record_set Name and Type. Return MD5 sum of these values. | train | https://github.com/remind101/stacker_blueprints/blob/71624f6e1bd4ea794dc98fb621a04235e1931cae/stacker_blueprints/route53.py#L66-L72 | null | from hashlib import md5
from stacker.blueprints.base import Blueprint
from troposphere import (
Ref,
Output,
GetAtt,
Join,
Region,
route53,
)
import logging
logger = logging.getLogger(__name__)
# reference: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-route53-aliastarget.html # noqa
CLOUDFRONT_ZONE_ID = "Z2FDTNDATAQYW2"
# reference:
# https://docs.aws.amazon.com/general/latest/gr/rande.html#elb_region
ELB_ZONE_IDS = {
'us-east-2': 'Z3AADJGX6KTTL2',
'us-east-1': 'Z35SXDOTRQ7X7K',
'us-west-1': 'Z368ELLRRE2KJ0',
'us-west-2': 'Z1H1FL5HABSF5',
'ca-central-1': 'ZQSVJUPU6J1EY',
'ap-south-1': 'ZP97RAFLXTNZK',
'ap-northeast-2': 'ZWKZPGTI48KDX',
'ap-southeast-1': 'Z1LMS91P8CMLE5',
'ap-southeast-2': 'Z1GM3OXH4ZPM65',
'ap-northeast-1': 'Z14GRHDCWA56QT',
'eu-central-1': 'Z215JYRZR1TBD5',
'eu-west-1': 'Z32O12XQLNTSW2',
'eu-west-2': 'ZHURV8PSTC4K8',
'eu-west-3': 'Z3Q77PNBQS71R4',
'sa-east-1': 'Z2P70J7HTTTPLU',
}
# reference:
# https://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region
S3_WEBSITE_ZONE_IDS = {
"s3-website.us-east-2.amazonaws.com": "Z2O1EMRO9K5GLX",
"s3-website-us-east-1.amazonaws.com": "Z3AQBSTGFYJSTF",
"s3-website-us-west-1.amazonaws.com": "Z2F56UZL2M1ACD",
"s3-website-us-west-2.amazonaws.com": "Z3BJ6K6RIION7M",
"s3-website.ca-central-1.amazonaws.com": "Z1QDHH18159H29",
"s3-website.ap-south-1.amazonaws.com": "Z11RGJOFQNVJUP",
"s3-website.ap-northeast-2.amazonaws.com": "Z3W03O7B5YMIYP",
"s3-website-ap-southeast-1.amazonaws.com": "Z3O0J2DXBE1FTB",
"s3-website-ap-southeast-2.amazonaws.com": "Z1WCIGYICN2BYD",
"s3-website-ap-northeast-1.amazonaws.com": "Z2M4EHUR26P7ZW",
"s3-website.eu-central-1.amazonaws.com": "Z21DNDUVLTQW6Q",
"s3-website-eu-west-1.amazonaws.com": "Z1BKCTXD74EZPE",
"s3-website.eu-west-2.amazonaws.com": "Z3GKZC51ZF0DB4",
"s3-website.eu-west-3.amazonaws.com": "Z3R1K369G5AVDG",
"s3-website-sa-east-1.amazonaws.com": "Z7KQH4QJS55SO",
}
CF_DOMAIN = ".cloudfront.net."
ELB_DOMAIN = ".elb.amazonaws.com."
S3_WEBSITE_PREFIX = "s3-website"
def add_hosted_zone_id_if_missing(record_set, hosted_zone_id):
"""Add HostedZoneId to Trophosphere record_set object if missing."""
if not getattr(record_set, "HostedZoneId", None):
record_set.HostedZoneId = hosted_zone_id
return record_set
class DNSRecords(Blueprint):
VARIABLES = {
"VPC": {
"type": str,
"default": "",
"description": "A VPC that you want to associate with "
"this hosted zone. When you specify this property, "
"AWS CloudFormation creates a private hosted zone.",
},
"HostedZoneId": {
"type": str,
"description": "The id of an existing HostedZone.",
"default": "",
},
"HostedZoneName": {
"type": str,
"description": "The name of a HostedZone to create and manage.",
"default": "",
},
"Comment": {
"type": str,
"description": "The comment of a stacker managed HostedZone."
"Does nothing when HostedZoneId in variables.",
"default": "",
},
"RecordSets": {
"type": list,
"description": "A list of dictionaries representing the attributes"
"of a troposphere.route53.RecordSetType object."
"Also accepts an optional 'Enabled' boolean.",
"default": []
},
"RecordSetGroups": {
"type": dict,
"description": "A list of dictionaries representing the attributes"
"of a troposphere.route53.RecordSetGroup object."
"Also accepts an optional 'Enabled' boolean.",
"default": {}
},
}
def add_hosted_zone_id_for_alias_target_if_missing(self, rs):
"""Add proper hosted zone id to record set alias target if missing."""
alias_target = getattr(rs, "AliasTarget", None)
if alias_target:
hosted_zone_id = getattr(alias_target, "HostedZoneId", None)
if not hosted_zone_id:
dns_name = alias_target.DNSName
if dns_name.endswith(CF_DOMAIN):
alias_target.HostedZoneId = CLOUDFRONT_ZONE_ID
elif dns_name.endswith(ELB_DOMAIN):
region = dns_name.split('.')[-5]
alias_target.HostedZoneId = ELB_ZONE_IDS[region]
elif dns_name in S3_WEBSITE_ZONE_IDS:
alias_target.HostedZoneId = S3_WEBSITE_ZONE_IDS[dns_name]
else:
alias_target.HostedZoneId = self.hosted_zone_id
return rs
def create_record_set(self, rs_dict):
"""Accept a record_set dict. Return a Troposphere record_set object."""
record_set_md5 = get_record_set_md5(rs_dict["Name"], rs_dict["Type"])
rs = route53.RecordSetType.from_dict(record_set_md5, rs_dict)
rs = add_hosted_zone_id_if_missing(rs, self.hosted_zone_id)
rs = self.add_hosted_zone_id_for_alias_target_if_missing(rs)
return self.template.add_resource(rs)
def create_record_set_group(self, name, g_dict):
"""Accept a record_set dict. Return a Troposphere record_set object."""
rs = route53.RecordSetGroup.from_dict(name, g_dict)
rs = add_hosted_zone_id_if_missing(rs, self.hosted_zone_id)
rs = self.add_hosted_zone_id_for_alias_target_if_missing(rs)
return self.template.add_resource(rs)
def create_record_sets(self, record_set_dicts):
"""Accept list of record_set dicts.
Return list of record_set objects."""
record_set_objects = []
for record_set_dict in record_set_dicts:
# pop removes the 'Enabled' key and tests if True.
if record_set_dict.pop('Enabled', True):
record_set_objects.append(
self.create_record_set(record_set_dict)
)
return record_set_objects
def create_record_set_groups(self, record_set_group_dicts):
"""Accept list of record_set_group dicts.
Return list of record_set_group objects."""
record_set_groups = []
for name, group in record_set_group_dicts.iteritems():
# pop removes the 'Enabled' key and tests if True.
if group.pop('Enabled', True):
record_set_groups.append(
self.create_record_set_group(name, group)
)
return record_set_groups
def create_template(self):
variables = self.get_variables()
hosted_zone_name = variables["HostedZoneName"]
hosted_zone_id = variables["HostedZoneId"]
hosted_zone_comment = variables["Comment"]
if all([hosted_zone_comment, hosted_zone_id]):
logger.warning(
"The Comment variable works when HostedZoneName is passed."
"When HostedZoneId in variables, Comment is ignored."
)
if all([hosted_zone_name, hosted_zone_id]):
raise ValueError("Cannot specify both 'HostedZoneName' and "
"'HostedZoneId' variables.")
if not any([hosted_zone_name, hosted_zone_id]):
raise ValueError("Please specify either a 'HostedZoneName' or "
"'HostedZoneId' variable.")
if hosted_zone_id:
self.hosted_zone_id = hosted_zone_id
else:
hosted_zone_config = route53.HostedZoneConfiguration(
"HostedZoneConfiguration",
Comment=hosted_zone_comment
)
hosted_zone = route53.HostedZone(
"HostedZone",
Name=hosted_zone_name,
HostedZoneConfig=hosted_zone_config
)
if variables["VPC"]:
vpc = route53.HostedZoneVPCs(
VPCId=variables["VPC"],
VPCRegion=Region
)
hosted_zone.VPCs = [vpc]
else:
nameservers = Join(',', GetAtt(hosted_zone, "NameServers"))
self.template.add_output(
Output("NameServers", Value=nameservers)
)
self.template.add_resource(hosted_zone)
self.hosted_zone_id = Ref(hosted_zone)
self.template.add_output(
Output("HostedZoneId", Value=self.hosted_zone_id)
)
self.create_record_set_groups(variables["RecordSetGroups"])
return self.create_record_sets(variables["RecordSets"])
|
remind101/stacker_blueprints | stacker_blueprints/route53.py | DNSRecords.add_hosted_zone_id_for_alias_target_if_missing | python | def add_hosted_zone_id_for_alias_target_if_missing(self, rs):
alias_target = getattr(rs, "AliasTarget", None)
if alias_target:
hosted_zone_id = getattr(alias_target, "HostedZoneId", None)
if not hosted_zone_id:
dns_name = alias_target.DNSName
if dns_name.endswith(CF_DOMAIN):
alias_target.HostedZoneId = CLOUDFRONT_ZONE_ID
elif dns_name.endswith(ELB_DOMAIN):
region = dns_name.split('.')[-5]
alias_target.HostedZoneId = ELB_ZONE_IDS[region]
elif dns_name in S3_WEBSITE_ZONE_IDS:
alias_target.HostedZoneId = S3_WEBSITE_ZONE_IDS[dns_name]
else:
alias_target.HostedZoneId = self.hosted_zone_id
return rs | Add proper hosted zone id to record set alias target if missing. | train | https://github.com/remind101/stacker_blueprints/blob/71624f6e1bd4ea794dc98fb621a04235e1931cae/stacker_blueprints/route53.py#L123-L139 | null | class DNSRecords(Blueprint):
VARIABLES = {
"VPC": {
"type": str,
"default": "",
"description": "A VPC that you want to associate with "
"this hosted zone. When you specify this property, "
"AWS CloudFormation creates a private hosted zone.",
},
"HostedZoneId": {
"type": str,
"description": "The id of an existing HostedZone.",
"default": "",
},
"HostedZoneName": {
"type": str,
"description": "The name of a HostedZone to create and manage.",
"default": "",
},
"Comment": {
"type": str,
"description": "The comment of a stacker managed HostedZone."
"Does nothing when HostedZoneId in variables.",
"default": "",
},
"RecordSets": {
"type": list,
"description": "A list of dictionaries representing the attributes"
"of a troposphere.route53.RecordSetType object."
"Also accepts an optional 'Enabled' boolean.",
"default": []
},
"RecordSetGroups": {
"type": dict,
"description": "A list of dictionaries representing the attributes"
"of a troposphere.route53.RecordSetGroup object."
"Also accepts an optional 'Enabled' boolean.",
"default": {}
},
}
def create_record_set(self, rs_dict):
"""Accept a record_set dict. Return a Troposphere record_set object."""
record_set_md5 = get_record_set_md5(rs_dict["Name"], rs_dict["Type"])
rs = route53.RecordSetType.from_dict(record_set_md5, rs_dict)
rs = add_hosted_zone_id_if_missing(rs, self.hosted_zone_id)
rs = self.add_hosted_zone_id_for_alias_target_if_missing(rs)
return self.template.add_resource(rs)
def create_record_set_group(self, name, g_dict):
"""Accept a record_set dict. Return a Troposphere record_set object."""
rs = route53.RecordSetGroup.from_dict(name, g_dict)
rs = add_hosted_zone_id_if_missing(rs, self.hosted_zone_id)
rs = self.add_hosted_zone_id_for_alias_target_if_missing(rs)
return self.template.add_resource(rs)
def create_record_sets(self, record_set_dicts):
"""Accept list of record_set dicts.
Return list of record_set objects."""
record_set_objects = []
for record_set_dict in record_set_dicts:
# pop removes the 'Enabled' key and tests if True.
if record_set_dict.pop('Enabled', True):
record_set_objects.append(
self.create_record_set(record_set_dict)
)
return record_set_objects
def create_record_set_groups(self, record_set_group_dicts):
"""Accept list of record_set_group dicts.
Return list of record_set_group objects."""
record_set_groups = []
for name, group in record_set_group_dicts.iteritems():
# pop removes the 'Enabled' key and tests if True.
if group.pop('Enabled', True):
record_set_groups.append(
self.create_record_set_group(name, group)
)
return record_set_groups
def create_template(self):
variables = self.get_variables()
hosted_zone_name = variables["HostedZoneName"]
hosted_zone_id = variables["HostedZoneId"]
hosted_zone_comment = variables["Comment"]
if all([hosted_zone_comment, hosted_zone_id]):
logger.warning(
"The Comment variable works when HostedZoneName is passed."
"When HostedZoneId in variables, Comment is ignored."
)
if all([hosted_zone_name, hosted_zone_id]):
raise ValueError("Cannot specify both 'HostedZoneName' and "
"'HostedZoneId' variables.")
if not any([hosted_zone_name, hosted_zone_id]):
raise ValueError("Please specify either a 'HostedZoneName' or "
"'HostedZoneId' variable.")
if hosted_zone_id:
self.hosted_zone_id = hosted_zone_id
else:
hosted_zone_config = route53.HostedZoneConfiguration(
"HostedZoneConfiguration",
Comment=hosted_zone_comment
)
hosted_zone = route53.HostedZone(
"HostedZone",
Name=hosted_zone_name,
HostedZoneConfig=hosted_zone_config
)
if variables["VPC"]:
vpc = route53.HostedZoneVPCs(
VPCId=variables["VPC"],
VPCRegion=Region
)
hosted_zone.VPCs = [vpc]
else:
nameservers = Join(',', GetAtt(hosted_zone, "NameServers"))
self.template.add_output(
Output("NameServers", Value=nameservers)
)
self.template.add_resource(hosted_zone)
self.hosted_zone_id = Ref(hosted_zone)
self.template.add_output(
Output("HostedZoneId", Value=self.hosted_zone_id)
)
self.create_record_set_groups(variables["RecordSetGroups"])
return self.create_record_sets(variables["RecordSets"])
|
remind101/stacker_blueprints | stacker_blueprints/route53.py | DNSRecords.create_record_set | python | def create_record_set(self, rs_dict):
record_set_md5 = get_record_set_md5(rs_dict["Name"], rs_dict["Type"])
rs = route53.RecordSetType.from_dict(record_set_md5, rs_dict)
rs = add_hosted_zone_id_if_missing(rs, self.hosted_zone_id)
rs = self.add_hosted_zone_id_for_alias_target_if_missing(rs)
return self.template.add_resource(rs) | Accept a record_set dict. Return a Troposphere record_set object. | train | https://github.com/remind101/stacker_blueprints/blob/71624f6e1bd4ea794dc98fb621a04235e1931cae/stacker_blueprints/route53.py#L141-L147 | [
"def get_record_set_md5(rs_name, rs_type):\n \"\"\"Accept record_set Name and Type. Return MD5 sum of these values.\"\"\"\n rs_name = rs_name.lower()\n rs_type = rs_type.upper()\n # Make A and CNAME records hash to same sum to support updates.\n rs_type = \"ACNAME\" if rs_type in [\"A\", \"CNAME\"] e... | class DNSRecords(Blueprint):
VARIABLES = {
"VPC": {
"type": str,
"default": "",
"description": "A VPC that you want to associate with "
"this hosted zone. When you specify this property, "
"AWS CloudFormation creates a private hosted zone.",
},
"HostedZoneId": {
"type": str,
"description": "The id of an existing HostedZone.",
"default": "",
},
"HostedZoneName": {
"type": str,
"description": "The name of a HostedZone to create and manage.",
"default": "",
},
"Comment": {
"type": str,
"description": "The comment of a stacker managed HostedZone."
"Does nothing when HostedZoneId in variables.",
"default": "",
},
"RecordSets": {
"type": list,
"description": "A list of dictionaries representing the attributes"
"of a troposphere.route53.RecordSetType object."
"Also accepts an optional 'Enabled' boolean.",
"default": []
},
"RecordSetGroups": {
"type": dict,
"description": "A list of dictionaries representing the attributes"
"of a troposphere.route53.RecordSetGroup object."
"Also accepts an optional 'Enabled' boolean.",
"default": {}
},
}
def add_hosted_zone_id_for_alias_target_if_missing(self, rs):
"""Add proper hosted zone id to record set alias target if missing."""
alias_target = getattr(rs, "AliasTarget", None)
if alias_target:
hosted_zone_id = getattr(alias_target, "HostedZoneId", None)
if not hosted_zone_id:
dns_name = alias_target.DNSName
if dns_name.endswith(CF_DOMAIN):
alias_target.HostedZoneId = CLOUDFRONT_ZONE_ID
elif dns_name.endswith(ELB_DOMAIN):
region = dns_name.split('.')[-5]
alias_target.HostedZoneId = ELB_ZONE_IDS[region]
elif dns_name in S3_WEBSITE_ZONE_IDS:
alias_target.HostedZoneId = S3_WEBSITE_ZONE_IDS[dns_name]
else:
alias_target.HostedZoneId = self.hosted_zone_id
return rs
def create_record_set_group(self, name, g_dict):
"""Accept a record_set dict. Return a Troposphere record_set object."""
rs = route53.RecordSetGroup.from_dict(name, g_dict)
rs = add_hosted_zone_id_if_missing(rs, self.hosted_zone_id)
rs = self.add_hosted_zone_id_for_alias_target_if_missing(rs)
return self.template.add_resource(rs)
def create_record_sets(self, record_set_dicts):
"""Accept list of record_set dicts.
Return list of record_set objects."""
record_set_objects = []
for record_set_dict in record_set_dicts:
# pop removes the 'Enabled' key and tests if True.
if record_set_dict.pop('Enabled', True):
record_set_objects.append(
self.create_record_set(record_set_dict)
)
return record_set_objects
def create_record_set_groups(self, record_set_group_dicts):
"""Accept list of record_set_group dicts.
Return list of record_set_group objects."""
record_set_groups = []
for name, group in record_set_group_dicts.iteritems():
# pop removes the 'Enabled' key and tests if True.
if group.pop('Enabled', True):
record_set_groups.append(
self.create_record_set_group(name, group)
)
return record_set_groups
def create_template(self):
variables = self.get_variables()
hosted_zone_name = variables["HostedZoneName"]
hosted_zone_id = variables["HostedZoneId"]
hosted_zone_comment = variables["Comment"]
if all([hosted_zone_comment, hosted_zone_id]):
logger.warning(
"The Comment variable works when HostedZoneName is passed."
"When HostedZoneId in variables, Comment is ignored."
)
if all([hosted_zone_name, hosted_zone_id]):
raise ValueError("Cannot specify both 'HostedZoneName' and "
"'HostedZoneId' variables.")
if not any([hosted_zone_name, hosted_zone_id]):
raise ValueError("Please specify either a 'HostedZoneName' or "
"'HostedZoneId' variable.")
if hosted_zone_id:
self.hosted_zone_id = hosted_zone_id
else:
hosted_zone_config = route53.HostedZoneConfiguration(
"HostedZoneConfiguration",
Comment=hosted_zone_comment
)
hosted_zone = route53.HostedZone(
"HostedZone",
Name=hosted_zone_name,
HostedZoneConfig=hosted_zone_config
)
if variables["VPC"]:
vpc = route53.HostedZoneVPCs(
VPCId=variables["VPC"],
VPCRegion=Region
)
hosted_zone.VPCs = [vpc]
else:
nameservers = Join(',', GetAtt(hosted_zone, "NameServers"))
self.template.add_output(
Output("NameServers", Value=nameservers)
)
self.template.add_resource(hosted_zone)
self.hosted_zone_id = Ref(hosted_zone)
self.template.add_output(
Output("HostedZoneId", Value=self.hosted_zone_id)
)
self.create_record_set_groups(variables["RecordSetGroups"])
return self.create_record_sets(variables["RecordSets"])
|
remind101/stacker_blueprints | stacker_blueprints/route53.py | DNSRecords.create_record_set_group | python | def create_record_set_group(self, name, g_dict):
rs = route53.RecordSetGroup.from_dict(name, g_dict)
rs = add_hosted_zone_id_if_missing(rs, self.hosted_zone_id)
rs = self.add_hosted_zone_id_for_alias_target_if_missing(rs)
return self.template.add_resource(rs) | Accept a record_set dict. Return a Troposphere record_set object. | train | https://github.com/remind101/stacker_blueprints/blob/71624f6e1bd4ea794dc98fb621a04235e1931cae/stacker_blueprints/route53.py#L149-L154 | [
"def add_hosted_zone_id_if_missing(record_set, hosted_zone_id):\n \"\"\"Add HostedZoneId to Trophosphere record_set object if missing.\"\"\"\n if not getattr(record_set, \"HostedZoneId\", None):\n record_set.HostedZoneId = hosted_zone_id\n return record_set\n"
] | class DNSRecords(Blueprint):
VARIABLES = {
"VPC": {
"type": str,
"default": "",
"description": "A VPC that you want to associate with "
"this hosted zone. When you specify this property, "
"AWS CloudFormation creates a private hosted zone.",
},
"HostedZoneId": {
"type": str,
"description": "The id of an existing HostedZone.",
"default": "",
},
"HostedZoneName": {
"type": str,
"description": "The name of a HostedZone to create and manage.",
"default": "",
},
"Comment": {
"type": str,
"description": "The comment of a stacker managed HostedZone."
"Does nothing when HostedZoneId in variables.",
"default": "",
},
"RecordSets": {
"type": list,
"description": "A list of dictionaries representing the attributes"
"of a troposphere.route53.RecordSetType object."
"Also accepts an optional 'Enabled' boolean.",
"default": []
},
"RecordSetGroups": {
"type": dict,
"description": "A list of dictionaries representing the attributes"
"of a troposphere.route53.RecordSetGroup object."
"Also accepts an optional 'Enabled' boolean.",
"default": {}
},
}
def add_hosted_zone_id_for_alias_target_if_missing(self, rs):
"""Add proper hosted zone id to record set alias target if missing."""
alias_target = getattr(rs, "AliasTarget", None)
if alias_target:
hosted_zone_id = getattr(alias_target, "HostedZoneId", None)
if not hosted_zone_id:
dns_name = alias_target.DNSName
if dns_name.endswith(CF_DOMAIN):
alias_target.HostedZoneId = CLOUDFRONT_ZONE_ID
elif dns_name.endswith(ELB_DOMAIN):
region = dns_name.split('.')[-5]
alias_target.HostedZoneId = ELB_ZONE_IDS[region]
elif dns_name in S3_WEBSITE_ZONE_IDS:
alias_target.HostedZoneId = S3_WEBSITE_ZONE_IDS[dns_name]
else:
alias_target.HostedZoneId = self.hosted_zone_id
return rs
def create_record_set(self, rs_dict):
"""Accept a record_set dict. Return a Troposphere record_set object."""
record_set_md5 = get_record_set_md5(rs_dict["Name"], rs_dict["Type"])
rs = route53.RecordSetType.from_dict(record_set_md5, rs_dict)
rs = add_hosted_zone_id_if_missing(rs, self.hosted_zone_id)
rs = self.add_hosted_zone_id_for_alias_target_if_missing(rs)
return self.template.add_resource(rs)
def create_record_sets(self, record_set_dicts):
"""Accept list of record_set dicts.
Return list of record_set objects."""
record_set_objects = []
for record_set_dict in record_set_dicts:
# pop removes the 'Enabled' key and tests if True.
if record_set_dict.pop('Enabled', True):
record_set_objects.append(
self.create_record_set(record_set_dict)
)
return record_set_objects
def create_record_set_groups(self, record_set_group_dicts):
"""Accept list of record_set_group dicts.
Return list of record_set_group objects."""
record_set_groups = []
for name, group in record_set_group_dicts.iteritems():
# pop removes the 'Enabled' key and tests if True.
if group.pop('Enabled', True):
record_set_groups.append(
self.create_record_set_group(name, group)
)
return record_set_groups
def create_template(self):
variables = self.get_variables()
hosted_zone_name = variables["HostedZoneName"]
hosted_zone_id = variables["HostedZoneId"]
hosted_zone_comment = variables["Comment"]
if all([hosted_zone_comment, hosted_zone_id]):
logger.warning(
"The Comment variable works when HostedZoneName is passed."
"When HostedZoneId in variables, Comment is ignored."
)
if all([hosted_zone_name, hosted_zone_id]):
raise ValueError("Cannot specify both 'HostedZoneName' and "
"'HostedZoneId' variables.")
if not any([hosted_zone_name, hosted_zone_id]):
raise ValueError("Please specify either a 'HostedZoneName' or "
"'HostedZoneId' variable.")
if hosted_zone_id:
self.hosted_zone_id = hosted_zone_id
else:
hosted_zone_config = route53.HostedZoneConfiguration(
"HostedZoneConfiguration",
Comment=hosted_zone_comment
)
hosted_zone = route53.HostedZone(
"HostedZone",
Name=hosted_zone_name,
HostedZoneConfig=hosted_zone_config
)
if variables["VPC"]:
vpc = route53.HostedZoneVPCs(
VPCId=variables["VPC"],
VPCRegion=Region
)
hosted_zone.VPCs = [vpc]
else:
nameservers = Join(',', GetAtt(hosted_zone, "NameServers"))
self.template.add_output(
Output("NameServers", Value=nameservers)
)
self.template.add_resource(hosted_zone)
self.hosted_zone_id = Ref(hosted_zone)
self.template.add_output(
Output("HostedZoneId", Value=self.hosted_zone_id)
)
self.create_record_set_groups(variables["RecordSetGroups"])
return self.create_record_sets(variables["RecordSets"])
|
remind101/stacker_blueprints | stacker_blueprints/route53.py | DNSRecords.create_record_sets | python | def create_record_sets(self, record_set_dicts):
record_set_objects = []
for record_set_dict in record_set_dicts:
# pop removes the 'Enabled' key and tests if True.
if record_set_dict.pop('Enabled', True):
record_set_objects.append(
self.create_record_set(record_set_dict)
)
return record_set_objects | Accept list of record_set dicts.
Return list of record_set objects. | train | https://github.com/remind101/stacker_blueprints/blob/71624f6e1bd4ea794dc98fb621a04235e1931cae/stacker_blueprints/route53.py#L156-L166 | null | class DNSRecords(Blueprint):
VARIABLES = {
"VPC": {
"type": str,
"default": "",
"description": "A VPC that you want to associate with "
"this hosted zone. When you specify this property, "
"AWS CloudFormation creates a private hosted zone.",
},
"HostedZoneId": {
"type": str,
"description": "The id of an existing HostedZone.",
"default": "",
},
"HostedZoneName": {
"type": str,
"description": "The name of a HostedZone to create and manage.",
"default": "",
},
"Comment": {
"type": str,
"description": "The comment of a stacker managed HostedZone."
"Does nothing when HostedZoneId in variables.",
"default": "",
},
"RecordSets": {
"type": list,
"description": "A list of dictionaries representing the attributes"
"of a troposphere.route53.RecordSetType object."
"Also accepts an optional 'Enabled' boolean.",
"default": []
},
"RecordSetGroups": {
"type": dict,
"description": "A list of dictionaries representing the attributes"
"of a troposphere.route53.RecordSetGroup object."
"Also accepts an optional 'Enabled' boolean.",
"default": {}
},
}
def add_hosted_zone_id_for_alias_target_if_missing(self, rs):
"""Add proper hosted zone id to record set alias target if missing."""
alias_target = getattr(rs, "AliasTarget", None)
if alias_target:
hosted_zone_id = getattr(alias_target, "HostedZoneId", None)
if not hosted_zone_id:
dns_name = alias_target.DNSName
if dns_name.endswith(CF_DOMAIN):
alias_target.HostedZoneId = CLOUDFRONT_ZONE_ID
elif dns_name.endswith(ELB_DOMAIN):
region = dns_name.split('.')[-5]
alias_target.HostedZoneId = ELB_ZONE_IDS[region]
elif dns_name in S3_WEBSITE_ZONE_IDS:
alias_target.HostedZoneId = S3_WEBSITE_ZONE_IDS[dns_name]
else:
alias_target.HostedZoneId = self.hosted_zone_id
return rs
def create_record_set(self, rs_dict):
"""Accept a record_set dict. Return a Troposphere record_set object."""
record_set_md5 = get_record_set_md5(rs_dict["Name"], rs_dict["Type"])
rs = route53.RecordSetType.from_dict(record_set_md5, rs_dict)
rs = add_hosted_zone_id_if_missing(rs, self.hosted_zone_id)
rs = self.add_hosted_zone_id_for_alias_target_if_missing(rs)
return self.template.add_resource(rs)
def create_record_set_group(self, name, g_dict):
"""Accept a record_set dict. Return a Troposphere record_set object."""
rs = route53.RecordSetGroup.from_dict(name, g_dict)
rs = add_hosted_zone_id_if_missing(rs, self.hosted_zone_id)
rs = self.add_hosted_zone_id_for_alias_target_if_missing(rs)
return self.template.add_resource(rs)
def create_record_set_groups(self, record_set_group_dicts):
"""Accept list of record_set_group dicts.
Return list of record_set_group objects."""
record_set_groups = []
for name, group in record_set_group_dicts.iteritems():
# pop removes the 'Enabled' key and tests if True.
if group.pop('Enabled', True):
record_set_groups.append(
self.create_record_set_group(name, group)
)
return record_set_groups
def create_template(self):
variables = self.get_variables()
hosted_zone_name = variables["HostedZoneName"]
hosted_zone_id = variables["HostedZoneId"]
hosted_zone_comment = variables["Comment"]
if all([hosted_zone_comment, hosted_zone_id]):
logger.warning(
"The Comment variable works when HostedZoneName is passed."
"When HostedZoneId in variables, Comment is ignored."
)
if all([hosted_zone_name, hosted_zone_id]):
raise ValueError("Cannot specify both 'HostedZoneName' and "
"'HostedZoneId' variables.")
if not any([hosted_zone_name, hosted_zone_id]):
raise ValueError("Please specify either a 'HostedZoneName' or "
"'HostedZoneId' variable.")
if hosted_zone_id:
self.hosted_zone_id = hosted_zone_id
else:
hosted_zone_config = route53.HostedZoneConfiguration(
"HostedZoneConfiguration",
Comment=hosted_zone_comment
)
hosted_zone = route53.HostedZone(
"HostedZone",
Name=hosted_zone_name,
HostedZoneConfig=hosted_zone_config
)
if variables["VPC"]:
vpc = route53.HostedZoneVPCs(
VPCId=variables["VPC"],
VPCRegion=Region
)
hosted_zone.VPCs = [vpc]
else:
nameservers = Join(',', GetAtt(hosted_zone, "NameServers"))
self.template.add_output(
Output("NameServers", Value=nameservers)
)
self.template.add_resource(hosted_zone)
self.hosted_zone_id = Ref(hosted_zone)
self.template.add_output(
Output("HostedZoneId", Value=self.hosted_zone_id)
)
self.create_record_set_groups(variables["RecordSetGroups"])
return self.create_record_sets(variables["RecordSets"])
|
remind101/stacker_blueprints | stacker_blueprints/route53.py | DNSRecords.create_record_set_groups | python | def create_record_set_groups(self, record_set_group_dicts):
record_set_groups = []
for name, group in record_set_group_dicts.iteritems():
# pop removes the 'Enabled' key and tests if True.
if group.pop('Enabled', True):
record_set_groups.append(
self.create_record_set_group(name, group)
)
return record_set_groups | Accept list of record_set_group dicts.
Return list of record_set_group objects. | train | https://github.com/remind101/stacker_blueprints/blob/71624f6e1bd4ea794dc98fb621a04235e1931cae/stacker_blueprints/route53.py#L168-L178 | null | class DNSRecords(Blueprint):
VARIABLES = {
"VPC": {
"type": str,
"default": "",
"description": "A VPC that you want to associate with "
"this hosted zone. When you specify this property, "
"AWS CloudFormation creates a private hosted zone.",
},
"HostedZoneId": {
"type": str,
"description": "The id of an existing HostedZone.",
"default": "",
},
"HostedZoneName": {
"type": str,
"description": "The name of a HostedZone to create and manage.",
"default": "",
},
"Comment": {
"type": str,
"description": "The comment of a stacker managed HostedZone."
"Does nothing when HostedZoneId in variables.",
"default": "",
},
"RecordSets": {
"type": list,
"description": "A list of dictionaries representing the attributes"
"of a troposphere.route53.RecordSetType object."
"Also accepts an optional 'Enabled' boolean.",
"default": []
},
"RecordSetGroups": {
"type": dict,
"description": "A list of dictionaries representing the attributes"
"of a troposphere.route53.RecordSetGroup object."
"Also accepts an optional 'Enabled' boolean.",
"default": {}
},
}
def add_hosted_zone_id_for_alias_target_if_missing(self, rs):
"""Add proper hosted zone id to record set alias target if missing."""
alias_target = getattr(rs, "AliasTarget", None)
if alias_target:
hosted_zone_id = getattr(alias_target, "HostedZoneId", None)
if not hosted_zone_id:
dns_name = alias_target.DNSName
if dns_name.endswith(CF_DOMAIN):
alias_target.HostedZoneId = CLOUDFRONT_ZONE_ID
elif dns_name.endswith(ELB_DOMAIN):
region = dns_name.split('.')[-5]
alias_target.HostedZoneId = ELB_ZONE_IDS[region]
elif dns_name in S3_WEBSITE_ZONE_IDS:
alias_target.HostedZoneId = S3_WEBSITE_ZONE_IDS[dns_name]
else:
alias_target.HostedZoneId = self.hosted_zone_id
return rs
def create_record_set(self, rs_dict):
"""Accept a record_set dict. Return a Troposphere record_set object."""
record_set_md5 = get_record_set_md5(rs_dict["Name"], rs_dict["Type"])
rs = route53.RecordSetType.from_dict(record_set_md5, rs_dict)
rs = add_hosted_zone_id_if_missing(rs, self.hosted_zone_id)
rs = self.add_hosted_zone_id_for_alias_target_if_missing(rs)
return self.template.add_resource(rs)
def create_record_set_group(self, name, g_dict):
"""Accept a record_set dict. Return a Troposphere record_set object."""
rs = route53.RecordSetGroup.from_dict(name, g_dict)
rs = add_hosted_zone_id_if_missing(rs, self.hosted_zone_id)
rs = self.add_hosted_zone_id_for_alias_target_if_missing(rs)
return self.template.add_resource(rs)
def create_record_sets(self, record_set_dicts):
"""Accept list of record_set dicts.
Return list of record_set objects."""
record_set_objects = []
for record_set_dict in record_set_dicts:
# pop removes the 'Enabled' key and tests if True.
if record_set_dict.pop('Enabled', True):
record_set_objects.append(
self.create_record_set(record_set_dict)
)
return record_set_objects
def create_template(self):
variables = self.get_variables()
hosted_zone_name = variables["HostedZoneName"]
hosted_zone_id = variables["HostedZoneId"]
hosted_zone_comment = variables["Comment"]
if all([hosted_zone_comment, hosted_zone_id]):
logger.warning(
"The Comment variable works when HostedZoneName is passed."
"When HostedZoneId in variables, Comment is ignored."
)
if all([hosted_zone_name, hosted_zone_id]):
raise ValueError("Cannot specify both 'HostedZoneName' and "
"'HostedZoneId' variables.")
if not any([hosted_zone_name, hosted_zone_id]):
raise ValueError("Please specify either a 'HostedZoneName' or "
"'HostedZoneId' variable.")
if hosted_zone_id:
self.hosted_zone_id = hosted_zone_id
else:
hosted_zone_config = route53.HostedZoneConfiguration(
"HostedZoneConfiguration",
Comment=hosted_zone_comment
)
hosted_zone = route53.HostedZone(
"HostedZone",
Name=hosted_zone_name,
HostedZoneConfig=hosted_zone_config
)
if variables["VPC"]:
vpc = route53.HostedZoneVPCs(
VPCId=variables["VPC"],
VPCRegion=Region
)
hosted_zone.VPCs = [vpc]
else:
nameservers = Join(',', GetAtt(hosted_zone, "NameServers"))
self.template.add_output(
Output("NameServers", Value=nameservers)
)
self.template.add_resource(hosted_zone)
self.hosted_zone_id = Ref(hosted_zone)
self.template.add_output(
Output("HostedZoneId", Value=self.hosted_zone_id)
)
self.create_record_set_groups(variables["RecordSetGroups"])
return self.create_record_sets(variables["RecordSets"])
|
remind101/stacker_blueprints | stacker_blueprints/policies.py | read_only_s3_bucket_policy_statements | python | def read_only_s3_bucket_policy_statements(buckets, folder="*"):
list_buckets = [s3_arn(b) for b in buckets]
object_buckets = [s3_objects_arn(b, folder) for b in buckets]
bucket_resources = list_buckets + object_buckets
return [
Statement(
Effect=Allow,
Resource=[s3_arn("*")],
Action=[s3.ListAllMyBuckets]
),
Statement(
Effect=Allow,
Resource=bucket_resources,
Action=[Action('s3', 'Get*'), Action('s3', 'List*')]
)
] | Read only policy an s3 bucket. | train | https://github.com/remind101/stacker_blueprints/blob/71624f6e1bd4ea794dc98fb621a04235e1931cae/stacker_blueprints/policies.py#L62-L80 | [
"def s3_arn(bucket):\n if isinstance(bucket, AWSHelperFn):\n return Sub('arn:aws:s3:::${Bucket}', Bucket=bucket)\n else:\n return 'arn:aws:s3:::%s' % bucket\n"
] | from awacs.aws import (
Action,
Allow,
Policy,
Principal,
Statement,
)
from troposphere import (
Sub,
Join,
Region,
AccountId,
AWSHelperFn
)
from awacs import (
sts,
s3,
logs,
ec2,
dynamodb,
cloudwatch,
)
def make_simple_assume_statement(*principals):
return Statement(
Principal=Principal('Service', principals),
Effect=Allow,
Action=[sts.AssumeRole])
def make_simple_assume_policy(*principals):
return Policy(
Statement=[
make_simple_assume_statement(*principals)])
def dynamodb_arn(table_name):
return 'arn:aws:dynamodb:::table/{}'.format(table_name)
def dynamodb_arns(table_names):
return [dynamodb_arn(table_name) for table_name in table_names]
def s3_arn(bucket):
if isinstance(bucket, AWSHelperFn):
return Sub('arn:aws:s3:::${Bucket}', Bucket=bucket)
else:
return 'arn:aws:s3:::%s' % bucket
def s3_objects_arn(bucket, folder="*"):
if isinstance(bucket, AWSHelperFn):
return Sub('arn:aws:s3:::${Bucket}/%s' % folder, Bucket=bucket)
else:
return 'arn:aws:s3:::%s/%s' % (bucket, folder)
def read_only_s3_bucket_policy(buckets):
return Policy(Statement=read_only_s3_bucket_policy_statements(buckets))
def read_write_s3_bucket_policy_statements(buckets, folder="*"):
list_buckets = [s3_arn(b) for b in buckets]
object_buckets = [s3_objects_arn(b, folder) for b in buckets]
return [
Statement(
Effect="Allow",
Action=[
s3.GetBucketLocation,
s3.ListAllMyBuckets,
],
Resource=[s3_arn("*")]
),
Statement(
Effect=Allow,
Action=[
s3.ListBucket,
s3.GetBucketVersioning,
],
Resource=list_buckets,
),
Statement(
Effect=Allow,
Action=[
s3.GetObject,
s3.PutObject,
s3.PutObjectAcl,
s3.DeleteObject,
s3.GetObjectVersion,
s3.DeleteObjectVersion,
],
Resource=object_buckets,
),
]
def read_write_s3_bucket_policy(buckets):
return Policy(Statement=read_write_s3_bucket_policy_statements(buckets))
def static_website_bucket_policy(bucket):
"""
Attach this policy directly to an S3 bucket to make it a static website.
This policy grants read access to **all unauthenticated** users.
"""
return Policy(
Statement=[
Statement(
Effect=Allow,
Principal=Principal("*"),
Action=[s3.GetObject],
Resource=[s3_objects_arn(bucket)],
)
]
)
def log_stream_arn(log_group_name, log_stream_name):
return Join(
'',
[
"arn:aws:logs:", Region, ":", AccountId, ":log-group:",
log_group_name, ":log-stream:", log_stream_name
]
)
def write_to_cloudwatch_logs_stream_statements(log_group_name,
log_stream_name):
return [
Statement(
Effect=Allow,
Action=[logs.PutLogEvents],
Resource=[log_stream_arn(log_group_name, log_stream_name)]
)
]
def write_to_cloudwatch_logs_stream_policy(log_group_name, log_stream_name):
return Policy(
Statement=write_to_cloudwatch_logs_stream_statements(log_group_name,
log_stream_name)
)
def cloudwatch_logs_write_statements(log_group=None):
resources = ["arn:aws:logs:*:*:*"]
if log_group:
log_group_parts = ["arn:aws:logs:", Region, ":", AccountId,
":log-group:", log_group]
log_group_arn = Join("", log_group_parts)
log_stream_wild = Join("", log_group_parts + [":*"])
resources = [log_group_arn, log_stream_wild]
return [
Statement(
Effect=Allow,
Resource=resources,
Action=[
logs.CreateLogGroup,
logs.CreateLogStream,
logs.PutLogEvents
]
)
]
def lambda_basic_execution_statements(function_name):
log_group = Join("/", ["/aws/lambda", function_name])
return cloudwatch_logs_write_statements(log_group)
def lambda_basic_execution_policy(function_name):
return Policy(Statement=lambda_basic_execution_statements(function_name))
def lambda_vpc_execution_statements():
"""Allow Lambda to manipuate EC2 ENIs for VPC support."""
return [
Statement(
Effect=Allow,
Resource=['*'],
Action=[
ec2.CreateNetworkInterface,
ec2.DescribeNetworkInterfaces,
ec2.DeleteNetworkInterface,
]
)
]
def flowlogs_assumerole_policy():
return make_simple_assume_policy("vpc-flow-logs.amazonaws.com")
# reference: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-dynamodb-table.html#cfn-dynamodb-table-examples-application-autoscaling # noqa
def dynamodb_autoscaling_policy(tables):
"""Policy to allow AutoScaling a list of DynamoDB tables."""
return Policy(
Statement=[
Statement(
Effect=Allow,
Resource=dynamodb_arns(tables),
Action=[
dynamodb.DescribeTable,
dynamodb.UpdateTable,
]
),
Statement(
Effect=Allow,
Resource=['*'],
Action=[
cloudwatch.PutMetricAlarm,
cloudwatch.DescribeAlarms,
cloudwatch.GetMetricStatistics,
cloudwatch.SetAlarmState,
cloudwatch.DeleteAlarms,
]
),
]
)
|
remind101/stacker_blueprints | stacker_blueprints/policies.py | static_website_bucket_policy | python | def static_website_bucket_policy(bucket):
return Policy(
Statement=[
Statement(
Effect=Allow,
Principal=Principal("*"),
Action=[s3.GetObject],
Resource=[s3_objects_arn(bucket)],
)
]
) | Attach this policy directly to an S3 bucket to make it a static website.
This policy grants read access to **all unauthenticated** users. | train | https://github.com/remind101/stacker_blueprints/blob/71624f6e1bd4ea794dc98fb621a04235e1931cae/stacker_blueprints/policies.py#L126-L140 | [
"def s3_objects_arn(bucket, folder=\"*\"):\n if isinstance(bucket, AWSHelperFn):\n return Sub('arn:aws:s3:::${Bucket}/%s' % folder, Bucket=bucket)\n else:\n return 'arn:aws:s3:::%s/%s' % (bucket, folder)\n"
] | from awacs.aws import (
Action,
Allow,
Policy,
Principal,
Statement,
)
from troposphere import (
Sub,
Join,
Region,
AccountId,
AWSHelperFn
)
from awacs import (
sts,
s3,
logs,
ec2,
dynamodb,
cloudwatch,
)
def make_simple_assume_statement(*principals):
return Statement(
Principal=Principal('Service', principals),
Effect=Allow,
Action=[sts.AssumeRole])
def make_simple_assume_policy(*principals):
return Policy(
Statement=[
make_simple_assume_statement(*principals)])
def dynamodb_arn(table_name):
return 'arn:aws:dynamodb:::table/{}'.format(table_name)
def dynamodb_arns(table_names):
return [dynamodb_arn(table_name) for table_name in table_names]
def s3_arn(bucket):
if isinstance(bucket, AWSHelperFn):
return Sub('arn:aws:s3:::${Bucket}', Bucket=bucket)
else:
return 'arn:aws:s3:::%s' % bucket
def s3_objects_arn(bucket, folder="*"):
if isinstance(bucket, AWSHelperFn):
return Sub('arn:aws:s3:::${Bucket}/%s' % folder, Bucket=bucket)
else:
return 'arn:aws:s3:::%s/%s' % (bucket, folder)
def read_only_s3_bucket_policy_statements(buckets, folder="*"):
""" Read only policy an s3 bucket. """
list_buckets = [s3_arn(b) for b in buckets]
object_buckets = [s3_objects_arn(b, folder) for b in buckets]
bucket_resources = list_buckets + object_buckets
return [
Statement(
Effect=Allow,
Resource=[s3_arn("*")],
Action=[s3.ListAllMyBuckets]
),
Statement(
Effect=Allow,
Resource=bucket_resources,
Action=[Action('s3', 'Get*'), Action('s3', 'List*')]
)
]
def read_only_s3_bucket_policy(buckets):
return Policy(Statement=read_only_s3_bucket_policy_statements(buckets))
def read_write_s3_bucket_policy_statements(buckets, folder="*"):
list_buckets = [s3_arn(b) for b in buckets]
object_buckets = [s3_objects_arn(b, folder) for b in buckets]
return [
Statement(
Effect="Allow",
Action=[
s3.GetBucketLocation,
s3.ListAllMyBuckets,
],
Resource=[s3_arn("*")]
),
Statement(
Effect=Allow,
Action=[
s3.ListBucket,
s3.GetBucketVersioning,
],
Resource=list_buckets,
),
Statement(
Effect=Allow,
Action=[
s3.GetObject,
s3.PutObject,
s3.PutObjectAcl,
s3.DeleteObject,
s3.GetObjectVersion,
s3.DeleteObjectVersion,
],
Resource=object_buckets,
),
]
def read_write_s3_bucket_policy(buckets):
return Policy(Statement=read_write_s3_bucket_policy_statements(buckets))
def log_stream_arn(log_group_name, log_stream_name):
return Join(
'',
[
"arn:aws:logs:", Region, ":", AccountId, ":log-group:",
log_group_name, ":log-stream:", log_stream_name
]
)
def write_to_cloudwatch_logs_stream_statements(log_group_name,
log_stream_name):
return [
Statement(
Effect=Allow,
Action=[logs.PutLogEvents],
Resource=[log_stream_arn(log_group_name, log_stream_name)]
)
]
def write_to_cloudwatch_logs_stream_policy(log_group_name, log_stream_name):
return Policy(
Statement=write_to_cloudwatch_logs_stream_statements(log_group_name,
log_stream_name)
)
def cloudwatch_logs_write_statements(log_group=None):
resources = ["arn:aws:logs:*:*:*"]
if log_group:
log_group_parts = ["arn:aws:logs:", Region, ":", AccountId,
":log-group:", log_group]
log_group_arn = Join("", log_group_parts)
log_stream_wild = Join("", log_group_parts + [":*"])
resources = [log_group_arn, log_stream_wild]
return [
Statement(
Effect=Allow,
Resource=resources,
Action=[
logs.CreateLogGroup,
logs.CreateLogStream,
logs.PutLogEvents
]
)
]
def lambda_basic_execution_statements(function_name):
log_group = Join("/", ["/aws/lambda", function_name])
return cloudwatch_logs_write_statements(log_group)
def lambda_basic_execution_policy(function_name):
return Policy(Statement=lambda_basic_execution_statements(function_name))
def lambda_vpc_execution_statements():
"""Allow Lambda to manipuate EC2 ENIs for VPC support."""
return [
Statement(
Effect=Allow,
Resource=['*'],
Action=[
ec2.CreateNetworkInterface,
ec2.DescribeNetworkInterfaces,
ec2.DeleteNetworkInterface,
]
)
]
def flowlogs_assumerole_policy():
return make_simple_assume_policy("vpc-flow-logs.amazonaws.com")
# reference: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-dynamodb-table.html#cfn-dynamodb-table-examples-application-autoscaling # noqa
def dynamodb_autoscaling_policy(tables):
"""Policy to allow AutoScaling a list of DynamoDB tables."""
return Policy(
Statement=[
Statement(
Effect=Allow,
Resource=dynamodb_arns(tables),
Action=[
dynamodb.DescribeTable,
dynamodb.UpdateTable,
]
),
Statement(
Effect=Allow,
Resource=['*'],
Action=[
cloudwatch.PutMetricAlarm,
cloudwatch.DescribeAlarms,
cloudwatch.GetMetricStatistics,
cloudwatch.SetAlarmState,
cloudwatch.DeleteAlarms,
]
),
]
)
|
remind101/stacker_blueprints | stacker_blueprints/policies.py | lambda_vpc_execution_statements | python | def lambda_vpc_execution_statements():
return [
Statement(
Effect=Allow,
Resource=['*'],
Action=[
ec2.CreateNetworkInterface,
ec2.DescribeNetworkInterfaces,
ec2.DeleteNetworkInterface,
]
)
] | Allow Lambda to manipuate EC2 ENIs for VPC support. | train | https://github.com/remind101/stacker_blueprints/blob/71624f6e1bd4ea794dc98fb621a04235e1931cae/stacker_blueprints/policies.py#L202-L214 | null | from awacs.aws import (
Action,
Allow,
Policy,
Principal,
Statement,
)
from troposphere import (
Sub,
Join,
Region,
AccountId,
AWSHelperFn
)
from awacs import (
sts,
s3,
logs,
ec2,
dynamodb,
cloudwatch,
)
def make_simple_assume_statement(*principals):
return Statement(
Principal=Principal('Service', principals),
Effect=Allow,
Action=[sts.AssumeRole])
def make_simple_assume_policy(*principals):
return Policy(
Statement=[
make_simple_assume_statement(*principals)])
def dynamodb_arn(table_name):
return 'arn:aws:dynamodb:::table/{}'.format(table_name)
def dynamodb_arns(table_names):
return [dynamodb_arn(table_name) for table_name in table_names]
def s3_arn(bucket):
if isinstance(bucket, AWSHelperFn):
return Sub('arn:aws:s3:::${Bucket}', Bucket=bucket)
else:
return 'arn:aws:s3:::%s' % bucket
def s3_objects_arn(bucket, folder="*"):
if isinstance(bucket, AWSHelperFn):
return Sub('arn:aws:s3:::${Bucket}/%s' % folder, Bucket=bucket)
else:
return 'arn:aws:s3:::%s/%s' % (bucket, folder)
def read_only_s3_bucket_policy_statements(buckets, folder="*"):
""" Read only policy an s3 bucket. """
list_buckets = [s3_arn(b) for b in buckets]
object_buckets = [s3_objects_arn(b, folder) for b in buckets]
bucket_resources = list_buckets + object_buckets
return [
Statement(
Effect=Allow,
Resource=[s3_arn("*")],
Action=[s3.ListAllMyBuckets]
),
Statement(
Effect=Allow,
Resource=bucket_resources,
Action=[Action('s3', 'Get*'), Action('s3', 'List*')]
)
]
def read_only_s3_bucket_policy(buckets):
return Policy(Statement=read_only_s3_bucket_policy_statements(buckets))
def read_write_s3_bucket_policy_statements(buckets, folder="*"):
list_buckets = [s3_arn(b) for b in buckets]
object_buckets = [s3_objects_arn(b, folder) for b in buckets]
return [
Statement(
Effect="Allow",
Action=[
s3.GetBucketLocation,
s3.ListAllMyBuckets,
],
Resource=[s3_arn("*")]
),
Statement(
Effect=Allow,
Action=[
s3.ListBucket,
s3.GetBucketVersioning,
],
Resource=list_buckets,
),
Statement(
Effect=Allow,
Action=[
s3.GetObject,
s3.PutObject,
s3.PutObjectAcl,
s3.DeleteObject,
s3.GetObjectVersion,
s3.DeleteObjectVersion,
],
Resource=object_buckets,
),
]
def read_write_s3_bucket_policy(buckets):
return Policy(Statement=read_write_s3_bucket_policy_statements(buckets))
def static_website_bucket_policy(bucket):
"""
Attach this policy directly to an S3 bucket to make it a static website.
This policy grants read access to **all unauthenticated** users.
"""
return Policy(
Statement=[
Statement(
Effect=Allow,
Principal=Principal("*"),
Action=[s3.GetObject],
Resource=[s3_objects_arn(bucket)],
)
]
)
def log_stream_arn(log_group_name, log_stream_name):
return Join(
'',
[
"arn:aws:logs:", Region, ":", AccountId, ":log-group:",
log_group_name, ":log-stream:", log_stream_name
]
)
def write_to_cloudwatch_logs_stream_statements(log_group_name,
log_stream_name):
return [
Statement(
Effect=Allow,
Action=[logs.PutLogEvents],
Resource=[log_stream_arn(log_group_name, log_stream_name)]
)
]
def write_to_cloudwatch_logs_stream_policy(log_group_name, log_stream_name):
return Policy(
Statement=write_to_cloudwatch_logs_stream_statements(log_group_name,
log_stream_name)
)
def cloudwatch_logs_write_statements(log_group=None):
resources = ["arn:aws:logs:*:*:*"]
if log_group:
log_group_parts = ["arn:aws:logs:", Region, ":", AccountId,
":log-group:", log_group]
log_group_arn = Join("", log_group_parts)
log_stream_wild = Join("", log_group_parts + [":*"])
resources = [log_group_arn, log_stream_wild]
return [
Statement(
Effect=Allow,
Resource=resources,
Action=[
logs.CreateLogGroup,
logs.CreateLogStream,
logs.PutLogEvents
]
)
]
def lambda_basic_execution_statements(function_name):
log_group = Join("/", ["/aws/lambda", function_name])
return cloudwatch_logs_write_statements(log_group)
def lambda_basic_execution_policy(function_name):
return Policy(Statement=lambda_basic_execution_statements(function_name))
def flowlogs_assumerole_policy():
return make_simple_assume_policy("vpc-flow-logs.amazonaws.com")
# reference: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-dynamodb-table.html#cfn-dynamodb-table-examples-application-autoscaling # noqa
def dynamodb_autoscaling_policy(tables):
"""Policy to allow AutoScaling a list of DynamoDB tables."""
return Policy(
Statement=[
Statement(
Effect=Allow,
Resource=dynamodb_arns(tables),
Action=[
dynamodb.DescribeTable,
dynamodb.UpdateTable,
]
),
Statement(
Effect=Allow,
Resource=['*'],
Action=[
cloudwatch.PutMetricAlarm,
cloudwatch.DescribeAlarms,
cloudwatch.GetMetricStatistics,
cloudwatch.SetAlarmState,
cloudwatch.DeleteAlarms,
]
),
]
)
|
remind101/stacker_blueprints | stacker_blueprints/policies.py | dynamodb_autoscaling_policy | python | def dynamodb_autoscaling_policy(tables):
return Policy(
Statement=[
Statement(
Effect=Allow,
Resource=dynamodb_arns(tables),
Action=[
dynamodb.DescribeTable,
dynamodb.UpdateTable,
]
),
Statement(
Effect=Allow,
Resource=['*'],
Action=[
cloudwatch.PutMetricAlarm,
cloudwatch.DescribeAlarms,
cloudwatch.GetMetricStatistics,
cloudwatch.SetAlarmState,
cloudwatch.DeleteAlarms,
]
),
]
) | Policy to allow AutoScaling a list of DynamoDB tables. | train | https://github.com/remind101/stacker_blueprints/blob/71624f6e1bd4ea794dc98fb621a04235e1931cae/stacker_blueprints/policies.py#L222-L246 | [
"def dynamodb_arns(table_names):\n return [dynamodb_arn(table_name) for table_name in table_names]\n"
] | from awacs.aws import (
Action,
Allow,
Policy,
Principal,
Statement,
)
from troposphere import (
Sub,
Join,
Region,
AccountId,
AWSHelperFn
)
from awacs import (
sts,
s3,
logs,
ec2,
dynamodb,
cloudwatch,
)
def make_simple_assume_statement(*principals):
return Statement(
Principal=Principal('Service', principals),
Effect=Allow,
Action=[sts.AssumeRole])
def make_simple_assume_policy(*principals):
return Policy(
Statement=[
make_simple_assume_statement(*principals)])
def dynamodb_arn(table_name):
return 'arn:aws:dynamodb:::table/{}'.format(table_name)
def dynamodb_arns(table_names):
return [dynamodb_arn(table_name) for table_name in table_names]
def s3_arn(bucket):
if isinstance(bucket, AWSHelperFn):
return Sub('arn:aws:s3:::${Bucket}', Bucket=bucket)
else:
return 'arn:aws:s3:::%s' % bucket
def s3_objects_arn(bucket, folder="*"):
if isinstance(bucket, AWSHelperFn):
return Sub('arn:aws:s3:::${Bucket}/%s' % folder, Bucket=bucket)
else:
return 'arn:aws:s3:::%s/%s' % (bucket, folder)
def read_only_s3_bucket_policy_statements(buckets, folder="*"):
""" Read only policy an s3 bucket. """
list_buckets = [s3_arn(b) for b in buckets]
object_buckets = [s3_objects_arn(b, folder) for b in buckets]
bucket_resources = list_buckets + object_buckets
return [
Statement(
Effect=Allow,
Resource=[s3_arn("*")],
Action=[s3.ListAllMyBuckets]
),
Statement(
Effect=Allow,
Resource=bucket_resources,
Action=[Action('s3', 'Get*'), Action('s3', 'List*')]
)
]
def read_only_s3_bucket_policy(buckets):
return Policy(Statement=read_only_s3_bucket_policy_statements(buckets))
def read_write_s3_bucket_policy_statements(buckets, folder="*"):
list_buckets = [s3_arn(b) for b in buckets]
object_buckets = [s3_objects_arn(b, folder) for b in buckets]
return [
Statement(
Effect="Allow",
Action=[
s3.GetBucketLocation,
s3.ListAllMyBuckets,
],
Resource=[s3_arn("*")]
),
Statement(
Effect=Allow,
Action=[
s3.ListBucket,
s3.GetBucketVersioning,
],
Resource=list_buckets,
),
Statement(
Effect=Allow,
Action=[
s3.GetObject,
s3.PutObject,
s3.PutObjectAcl,
s3.DeleteObject,
s3.GetObjectVersion,
s3.DeleteObjectVersion,
],
Resource=object_buckets,
),
]
def read_write_s3_bucket_policy(buckets):
return Policy(Statement=read_write_s3_bucket_policy_statements(buckets))
def static_website_bucket_policy(bucket):
"""
Attach this policy directly to an S3 bucket to make it a static website.
This policy grants read access to **all unauthenticated** users.
"""
return Policy(
Statement=[
Statement(
Effect=Allow,
Principal=Principal("*"),
Action=[s3.GetObject],
Resource=[s3_objects_arn(bucket)],
)
]
)
def log_stream_arn(log_group_name, log_stream_name):
return Join(
'',
[
"arn:aws:logs:", Region, ":", AccountId, ":log-group:",
log_group_name, ":log-stream:", log_stream_name
]
)
def write_to_cloudwatch_logs_stream_statements(log_group_name,
log_stream_name):
return [
Statement(
Effect=Allow,
Action=[logs.PutLogEvents],
Resource=[log_stream_arn(log_group_name, log_stream_name)]
)
]
def write_to_cloudwatch_logs_stream_policy(log_group_name, log_stream_name):
return Policy(
Statement=write_to_cloudwatch_logs_stream_statements(log_group_name,
log_stream_name)
)
def cloudwatch_logs_write_statements(log_group=None):
resources = ["arn:aws:logs:*:*:*"]
if log_group:
log_group_parts = ["arn:aws:logs:", Region, ":", AccountId,
":log-group:", log_group]
log_group_arn = Join("", log_group_parts)
log_stream_wild = Join("", log_group_parts + [":*"])
resources = [log_group_arn, log_stream_wild]
return [
Statement(
Effect=Allow,
Resource=resources,
Action=[
logs.CreateLogGroup,
logs.CreateLogStream,
logs.PutLogEvents
]
)
]
def lambda_basic_execution_statements(function_name):
log_group = Join("/", ["/aws/lambda", function_name])
return cloudwatch_logs_write_statements(log_group)
def lambda_basic_execution_policy(function_name):
return Policy(Statement=lambda_basic_execution_statements(function_name))
def lambda_vpc_execution_statements():
"""Allow Lambda to manipuate EC2 ENIs for VPC support."""
return [
Statement(
Effect=Allow,
Resource=['*'],
Action=[
ec2.CreateNetworkInterface,
ec2.DescribeNetworkInterfaces,
ec2.DeleteNetworkInterface,
]
)
]
def flowlogs_assumerole_policy():
return make_simple_assume_policy("vpc-flow-logs.amazonaws.com")
# reference: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-dynamodb-table.html#cfn-dynamodb-table-examples-application-autoscaling # noqa
|
pyvisa/pyvisa-py | pyvisa-py/highlevel.py | PyVisaLibrary.get_debug_info | python | def get_debug_info():
from . import __version__
d = OrderedDict()
d['Version'] = '%s' % __version__
for key, val in PyVisaLibrary.get_session_classes().items():
key_name = '%s %s' % (key[0].name.upper(), key[1])
try:
d[key_name] = getattr(val, 'session_issue').split('\n')
except AttributeError:
d[key_name] = 'Available ' + val.get_low_level_info()
return d | Return a list of lines with backend info. | train | https://github.com/pyvisa/pyvisa-py/blob/dfbd509409675b59d71bb741cd72c5f256efd4cd/pyvisa-py/highlevel.py#L77-L91 | [
"def get_session_classes(cls):\n return sessions.Session._session_classes\n"
] | class PyVisaLibrary(highlevel.VisaLibraryBase):
"""A pure Python backend for PyVISA.
The object is basically a dispatcher with some common functions implemented.
When a new resource object is requested to pyvisa, the library creates a Session object
(that knows how to perform low-level communication operations) associated with a session handle
(a number, usually refered just as session).
A call to a library function is handled by PyVisaLibrary if it involves a resource agnosting
function or dispatched to the correct session object (obtained from the session id).
Importantly, the user is unaware of this. PyVisaLibrary behaves for the user just as NIVisaLibrary.
"""
# Try to import packages implementing lower level functionality.
try:
from .serial import SerialSession
logger.debug('SerialSession was correctly imported.')
except Exception as e:
logger.debug('SerialSession was not imported %s.' % e)
try:
from .usb import USBSession, USBRawSession
logger.debug('USBSession and USBRawSession were correctly imported.')
except Exception as e:
logger.debug('USBSession and USBRawSession were not imported %s.' % e)
try:
from .tcpip import TCPIPInstrSession, TCPIPSocketSession
logger.debug('TCPIPSession was correctly imported.')
except Exception as e:
logger.debug('TCPIPSession was not imported %s.' % e)
try:
from .gpib import GPIBSession
logger.debug('GPIBSession was correctly imported.')
except Exception as e:
logger.debug('GPIBSession was not imported %s.' % e)
@classmethod
def get_session_classes(cls):
return sessions.Session._session_classes
@classmethod
def iter_session_classes_issues(cls):
return sessions.Session.iter_session_classes_issues()
@staticmethod
def _init(self):
#: map session handle to session object.
#: dict[int, session.Session]
self.sessions = {}
def _register(self, obj):
"""Creates a random but unique session handle for a session object,
register it in the sessions dictionary and return the value
:param obj: a session object.
:return: session handle
:rtype: int
"""
session = None
while session is None or session in self.sessions:
session = random.randint(1000000, 9999999)
self.sessions[session] = obj
return session
def _return_handler(self, ret_value, func, arguments):
"""Check return values for errors and warnings.
TODO: THIS IS JUST COPIED PASTED FROM NIVisaLibrary.
Needs to be adapted.
"""
logger.debug('%s%s -> %r',
func.__name__, _args_to_str(arguments), ret_value,
extra=self._logging_extra)
try:
ret_value = StatusCode(ret_value)
except ValueError:
pass
self._last_status = ret_value
# The first argument of almost all registered visa functions is a session.
# We store the error code per session
session = None
if func.__name__ not in ('viFindNext', ):
try:
session = arguments[0]
except KeyError:
raise Exception('Function %r does not seem to be a valid '
'visa function (len args %d)' % (func, len(arguments)))
# Functions that use the first parameter to get a session value.
if func.__name__ in ('viOpenDefaultRM', ):
# noinspection PyProtectedMember
session = session._obj.value
if isinstance(session, integer_types):
self._last_status_in_session[session] = ret_value
else:
# Functions that might or might have a session in the first argument.
if func.__name__ not in ('viClose', 'viGetAttribute', 'viSetAttribute', 'viStatusDesc'):
raise Exception('Function %r does not seem to be a valid '
'visa function (type args[0] %r)' % (func, type(session)))
if ret_value < 0:
raise errors.VisaIOError(ret_value)
if ret_value in self.issue_warning_on:
if session and ret_value not in self._ignore_warning_in_session[session]:
warnings.warn(errors.VisaIOWarning(ret_value), stacklevel=2)
return ret_value
# noinspection PyShadowingBuiltins
def open(self, session, resource_name,
access_mode=constants.AccessModes.no_lock,
open_timeout=constants.VI_TMO_IMMEDIATE):
"""Opens a session to the specified resource.
Corresponds to viOpen function of the VISA library.
:param session: Resource Manager session (should always be a session returned from open_default_resource_manager()).
:param resource_name: Unique symbolic name of a resource.
:param access_mode: Specifies the mode by which the resource is to be accessed. (constants.AccessModes)
:param open_timeout: Specifies the maximum time period (in milliseconds) that this operation waits
before returning an error.
:return: Unique logical identifier reference to a session, return value of the library call.
:rtype: session, VISAStatus
"""
try:
open_timeout = int(open_timeout)
except ValueError:
raise ValueError('open_timeout (%r) must be an integer (or compatible type)' % open_timeout)
try:
parsed = rname.parse_resource_name(resource_name)
except rname.InvalidResourceName:
return 0, StatusCode.error_invalid_resource_name
cls = sessions.Session.get_session_class(parsed.interface_type_const, parsed.resource_class)
sess = cls(session, resource_name, parsed, open_timeout)
return self._register(sess), StatusCode.success
def clear(self, session):
"""Clears a device.
Corresponds to viClear function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.clear()
def gpib_command(self, session, command_byte):
"""Write GPIB command byte on the bus.
Corresponds to viGpibCommand function of the VISA library.
See: https://linux-gpib.sourceforge.io/doc_html/gpib-protocol.html#REFERENCE-COMMAND-BYTES
:param command_byte: command byte to send
:type command_byte: int, must be [0 255]
:return: return value of the library call
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].gpib_command(command_byte)
except KeyError:
return constants.StatusCode.error_invalid_object
def assert_trigger(self, session, protocol):
"""Asserts software or hardware trigger.
Corresponds to viAssertTrigger function of the VISA library.
:param session: Unique logical identifier to a session.
:param protocol: Trigger protocol to use during assertion. (Constants.PROT*)
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].assert_trigger(protocol)
except KeyError:
return constants.StatusCode.error_invalid_object
def gpib_send_ifc(self, session):
"""Pulse the interface clear line (IFC) for at least 100 microseconds.
Corresponds to viGpibSendIFC function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.gpib_send_ifc()
def read_stb(self, session):
"""Reads a status byte of the service request.
Corresponds to viReadSTB function of the VISA library.
:param session: Unique logical identifier to a session.
:return: Service request status byte, return value of the library call.
:rtype: int, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return 0, constants.StatusCode.error_invalid_object
return sess.read_stb()
def close(self, session):
"""Closes the specified session, event, or find list.
Corresponds to viClose function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
if sess is not self:
sess.close()
except KeyError:
return StatusCode.error_invalid_object
def open_default_resource_manager(self):
"""This function returns a session to the Default Resource Manager resource.
Corresponds to viOpenDefaultRM function of the VISA library.
:return: Unique logical identifier to a Default Resource Manager session, return value of the library call.
:rtype: session, VISAStatus
"""
return self._register(self), StatusCode.success
def list_resources(self, session, query='?*::INSTR'):
"""Returns a tuple of all connected devices matching query.
:param query: regular expression used to match devices.
"""
# For each session type, ask for the list of connected resources and
# merge them into a single list.
resources = sum([st.list_resources()
for key, st in sessions.Session.iter_valid_session_classes()], [])
resources = rname.filter(resources, query)
return resources
def read(self, session, count):
"""Reads data from device or interface synchronously.
Corresponds to viRead function of the VISA library.
:param session: Unique logical identifier to a session.
:param count: Number of bytes to be read.
:return: data read, return value of the library call.
:rtype: bytes, VISAStatus
"""
# from the session handle, dispatch to the read method of the session object.
try:
ret = self.sessions[session].read(count)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def write(self, session, data):
"""Writes data to device or interface synchronously.
Corresponds to viWrite function of the VISA library.
:param session: Unique logical identifier to a session.
:param data: data to be written.
:type data: str
:return: Number of bytes actually transferred, return value of the library call.
:rtype: int, VISAStatus
"""
# from the session handle, dispatch to the write method of the session object.
try:
ret = self.sessions[session].write(data)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def get_attribute(self, session, attribute):
"""Retrieves the state of an attribute.
Corresponds to viGetAttribute function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:param attribute: Resource attribute for which the state query is made (see Attributes.*)
:return: The state of the queried attribute for a specified resource, return value of the library call.
:rtype: unicode | str | list | int, VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return None, StatusCode.error_invalid_object
return sess.get_attribute(attribute)
def set_attribute(self, session, attribute, attribute_state):
"""Sets the state of an attribute.
Corresponds to viSetAttribute function of the VISA library.
:param session: Unique logical identifier to a session.
:param attribute: Attribute for which the state is to be modified. (Attributes.*)
:param attribute_state: The state of the attribute to be set for the specified object.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.set_attribute(attribute, attribute_state)
def lock(self, session, lock_type, timeout, requested_key=None):
"""Establishes an access mode to the specified resources.
Corresponds to viLock function of the VISA library.
:param session: Unique logical identifier to a session.
:param lock_type: Specifies the type of lock requested, either Constants.EXCLUSIVE_LOCK or Constants.SHARED_LOCK.
:param timeout: Absolute time period (in milliseconds) that a resource waits to get unlocked by the
locking session before returning an error.
:param requested_key: This parameter is not used and should be set to VI_NULL when lockType is VI_EXCLUSIVE_LOCK.
:return: access_key that can then be passed to other sessions to share the lock, return value of the library call.
:rtype: str, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.lock(lock_type, timeout, requested_key)
def unlock(self, session):
"""Relinquishes a lock for the specified resource.
Corresponds to viUnlock function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.unlock()
def disable_event(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
def discard_events(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
|
pyvisa/pyvisa-py | pyvisa-py/highlevel.py | PyVisaLibrary._register | python | def _register(self, obj):
session = None
while session is None or session in self.sessions:
session = random.randint(1000000, 9999999)
self.sessions[session] = obj
return session | Creates a random but unique session handle for a session object,
register it in the sessions dictionary and return the value
:param obj: a session object.
:return: session handle
:rtype: int | train | https://github.com/pyvisa/pyvisa-py/blob/dfbd509409675b59d71bb741cd72c5f256efd4cd/pyvisa-py/highlevel.py#L99-L113 | null | class PyVisaLibrary(highlevel.VisaLibraryBase):
"""A pure Python backend for PyVISA.
The object is basically a dispatcher with some common functions implemented.
When a new resource object is requested to pyvisa, the library creates a Session object
(that knows how to perform low-level communication operations) associated with a session handle
(a number, usually refered just as session).
A call to a library function is handled by PyVisaLibrary if it involves a resource agnosting
function or dispatched to the correct session object (obtained from the session id).
Importantly, the user is unaware of this. PyVisaLibrary behaves for the user just as NIVisaLibrary.
"""
# Try to import packages implementing lower level functionality.
try:
from .serial import SerialSession
logger.debug('SerialSession was correctly imported.')
except Exception as e:
logger.debug('SerialSession was not imported %s.' % e)
try:
from .usb import USBSession, USBRawSession
logger.debug('USBSession and USBRawSession were correctly imported.')
except Exception as e:
logger.debug('USBSession and USBRawSession were not imported %s.' % e)
try:
from .tcpip import TCPIPInstrSession, TCPIPSocketSession
logger.debug('TCPIPSession was correctly imported.')
except Exception as e:
logger.debug('TCPIPSession was not imported %s.' % e)
try:
from .gpib import GPIBSession
logger.debug('GPIBSession was correctly imported.')
except Exception as e:
logger.debug('GPIBSession was not imported %s.' % e)
@classmethod
def get_session_classes(cls):
return sessions.Session._session_classes
@classmethod
def iter_session_classes_issues(cls):
return sessions.Session.iter_session_classes_issues()
@staticmethod
def get_debug_info():
"""Return a list of lines with backend info.
"""
from . import __version__
d = OrderedDict()
d['Version'] = '%s' % __version__
for key, val in PyVisaLibrary.get_session_classes().items():
key_name = '%s %s' % (key[0].name.upper(), key[1])
try:
d[key_name] = getattr(val, 'session_issue').split('\n')
except AttributeError:
d[key_name] = 'Available ' + val.get_low_level_info()
return d
def _init(self):
#: map session handle to session object.
#: dict[int, session.Session]
self.sessions = {}
def _return_handler(self, ret_value, func, arguments):
"""Check return values for errors and warnings.
TODO: THIS IS JUST COPIED PASTED FROM NIVisaLibrary.
Needs to be adapted.
"""
logger.debug('%s%s -> %r',
func.__name__, _args_to_str(arguments), ret_value,
extra=self._logging_extra)
try:
ret_value = StatusCode(ret_value)
except ValueError:
pass
self._last_status = ret_value
# The first argument of almost all registered visa functions is a session.
# We store the error code per session
session = None
if func.__name__ not in ('viFindNext', ):
try:
session = arguments[0]
except KeyError:
raise Exception('Function %r does not seem to be a valid '
'visa function (len args %d)' % (func, len(arguments)))
# Functions that use the first parameter to get a session value.
if func.__name__ in ('viOpenDefaultRM', ):
# noinspection PyProtectedMember
session = session._obj.value
if isinstance(session, integer_types):
self._last_status_in_session[session] = ret_value
else:
# Functions that might or might have a session in the first argument.
if func.__name__ not in ('viClose', 'viGetAttribute', 'viSetAttribute', 'viStatusDesc'):
raise Exception('Function %r does not seem to be a valid '
'visa function (type args[0] %r)' % (func, type(session)))
if ret_value < 0:
raise errors.VisaIOError(ret_value)
if ret_value in self.issue_warning_on:
if session and ret_value not in self._ignore_warning_in_session[session]:
warnings.warn(errors.VisaIOWarning(ret_value), stacklevel=2)
return ret_value
# noinspection PyShadowingBuiltins
def open(self, session, resource_name,
access_mode=constants.AccessModes.no_lock,
open_timeout=constants.VI_TMO_IMMEDIATE):
"""Opens a session to the specified resource.
Corresponds to viOpen function of the VISA library.
:param session: Resource Manager session (should always be a session returned from open_default_resource_manager()).
:param resource_name: Unique symbolic name of a resource.
:param access_mode: Specifies the mode by which the resource is to be accessed. (constants.AccessModes)
:param open_timeout: Specifies the maximum time period (in milliseconds) that this operation waits
before returning an error.
:return: Unique logical identifier reference to a session, return value of the library call.
:rtype: session, VISAStatus
"""
try:
open_timeout = int(open_timeout)
except ValueError:
raise ValueError('open_timeout (%r) must be an integer (or compatible type)' % open_timeout)
try:
parsed = rname.parse_resource_name(resource_name)
except rname.InvalidResourceName:
return 0, StatusCode.error_invalid_resource_name
cls = sessions.Session.get_session_class(parsed.interface_type_const, parsed.resource_class)
sess = cls(session, resource_name, parsed, open_timeout)
return self._register(sess), StatusCode.success
def clear(self, session):
"""Clears a device.
Corresponds to viClear function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.clear()
def gpib_command(self, session, command_byte):
"""Write GPIB command byte on the bus.
Corresponds to viGpibCommand function of the VISA library.
See: https://linux-gpib.sourceforge.io/doc_html/gpib-protocol.html#REFERENCE-COMMAND-BYTES
:param command_byte: command byte to send
:type command_byte: int, must be [0 255]
:return: return value of the library call
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].gpib_command(command_byte)
except KeyError:
return constants.StatusCode.error_invalid_object
def assert_trigger(self, session, protocol):
"""Asserts software or hardware trigger.
Corresponds to viAssertTrigger function of the VISA library.
:param session: Unique logical identifier to a session.
:param protocol: Trigger protocol to use during assertion. (Constants.PROT*)
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].assert_trigger(protocol)
except KeyError:
return constants.StatusCode.error_invalid_object
def gpib_send_ifc(self, session):
"""Pulse the interface clear line (IFC) for at least 100 microseconds.
Corresponds to viGpibSendIFC function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.gpib_send_ifc()
def read_stb(self, session):
"""Reads a status byte of the service request.
Corresponds to viReadSTB function of the VISA library.
:param session: Unique logical identifier to a session.
:return: Service request status byte, return value of the library call.
:rtype: int, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return 0, constants.StatusCode.error_invalid_object
return sess.read_stb()
def close(self, session):
"""Closes the specified session, event, or find list.
Corresponds to viClose function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
if sess is not self:
sess.close()
except KeyError:
return StatusCode.error_invalid_object
def open_default_resource_manager(self):
"""This function returns a session to the Default Resource Manager resource.
Corresponds to viOpenDefaultRM function of the VISA library.
:return: Unique logical identifier to a Default Resource Manager session, return value of the library call.
:rtype: session, VISAStatus
"""
return self._register(self), StatusCode.success
def list_resources(self, session, query='?*::INSTR'):
"""Returns a tuple of all connected devices matching query.
:param query: regular expression used to match devices.
"""
# For each session type, ask for the list of connected resources and
# merge them into a single list.
resources = sum([st.list_resources()
for key, st in sessions.Session.iter_valid_session_classes()], [])
resources = rname.filter(resources, query)
return resources
def read(self, session, count):
"""Reads data from device or interface synchronously.
Corresponds to viRead function of the VISA library.
:param session: Unique logical identifier to a session.
:param count: Number of bytes to be read.
:return: data read, return value of the library call.
:rtype: bytes, VISAStatus
"""
# from the session handle, dispatch to the read method of the session object.
try:
ret = self.sessions[session].read(count)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def write(self, session, data):
"""Writes data to device or interface synchronously.
Corresponds to viWrite function of the VISA library.
:param session: Unique logical identifier to a session.
:param data: data to be written.
:type data: str
:return: Number of bytes actually transferred, return value of the library call.
:rtype: int, VISAStatus
"""
# from the session handle, dispatch to the write method of the session object.
try:
ret = self.sessions[session].write(data)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def get_attribute(self, session, attribute):
"""Retrieves the state of an attribute.
Corresponds to viGetAttribute function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:param attribute: Resource attribute for which the state query is made (see Attributes.*)
:return: The state of the queried attribute for a specified resource, return value of the library call.
:rtype: unicode | str | list | int, VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return None, StatusCode.error_invalid_object
return sess.get_attribute(attribute)
def set_attribute(self, session, attribute, attribute_state):
"""Sets the state of an attribute.
Corresponds to viSetAttribute function of the VISA library.
:param session: Unique logical identifier to a session.
:param attribute: Attribute for which the state is to be modified. (Attributes.*)
:param attribute_state: The state of the attribute to be set for the specified object.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.set_attribute(attribute, attribute_state)
def lock(self, session, lock_type, timeout, requested_key=None):
"""Establishes an access mode to the specified resources.
Corresponds to viLock function of the VISA library.
:param session: Unique logical identifier to a session.
:param lock_type: Specifies the type of lock requested, either Constants.EXCLUSIVE_LOCK or Constants.SHARED_LOCK.
:param timeout: Absolute time period (in milliseconds) that a resource waits to get unlocked by the
locking session before returning an error.
:param requested_key: This parameter is not used and should be set to VI_NULL when lockType is VI_EXCLUSIVE_LOCK.
:return: access_key that can then be passed to other sessions to share the lock, return value of the library call.
:rtype: str, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.lock(lock_type, timeout, requested_key)
def unlock(self, session):
"""Relinquishes a lock for the specified resource.
Corresponds to viUnlock function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.unlock()
def disable_event(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
def discard_events(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
|
pyvisa/pyvisa-py | pyvisa-py/highlevel.py | PyVisaLibrary._return_handler | python | def _return_handler(self, ret_value, func, arguments):
logger.debug('%s%s -> %r',
func.__name__, _args_to_str(arguments), ret_value,
extra=self._logging_extra)
try:
ret_value = StatusCode(ret_value)
except ValueError:
pass
self._last_status = ret_value
# The first argument of almost all registered visa functions is a session.
# We store the error code per session
session = None
if func.__name__ not in ('viFindNext', ):
try:
session = arguments[0]
except KeyError:
raise Exception('Function %r does not seem to be a valid '
'visa function (len args %d)' % (func, len(arguments)))
# Functions that use the first parameter to get a session value.
if func.__name__ in ('viOpenDefaultRM', ):
# noinspection PyProtectedMember
session = session._obj.value
if isinstance(session, integer_types):
self._last_status_in_session[session] = ret_value
else:
# Functions that might or might have a session in the first argument.
if func.__name__ not in ('viClose', 'viGetAttribute', 'viSetAttribute', 'viStatusDesc'):
raise Exception('Function %r does not seem to be a valid '
'visa function (type args[0] %r)' % (func, type(session)))
if ret_value < 0:
raise errors.VisaIOError(ret_value)
if ret_value in self.issue_warning_on:
if session and ret_value not in self._ignore_warning_in_session[session]:
warnings.warn(errors.VisaIOWarning(ret_value), stacklevel=2)
return ret_value | Check return values for errors and warnings.
TODO: THIS IS JUST COPIED PASTED FROM NIVisaLibrary.
Needs to be adapted. | train | https://github.com/pyvisa/pyvisa-py/blob/dfbd509409675b59d71bb741cd72c5f256efd4cd/pyvisa-py/highlevel.py#L115-L163 | null | class PyVisaLibrary(highlevel.VisaLibraryBase):
"""A pure Python backend for PyVISA.
The object is basically a dispatcher with some common functions implemented.
When a new resource object is requested to pyvisa, the library creates a Session object
(that knows how to perform low-level communication operations) associated with a session handle
(a number, usually refered just as session).
A call to a library function is handled by PyVisaLibrary if it involves a resource agnosting
function or dispatched to the correct session object (obtained from the session id).
Importantly, the user is unaware of this. PyVisaLibrary behaves for the user just as NIVisaLibrary.
"""
# Try to import packages implementing lower level functionality.
try:
from .serial import SerialSession
logger.debug('SerialSession was correctly imported.')
except Exception as e:
logger.debug('SerialSession was not imported %s.' % e)
try:
from .usb import USBSession, USBRawSession
logger.debug('USBSession and USBRawSession were correctly imported.')
except Exception as e:
logger.debug('USBSession and USBRawSession were not imported %s.' % e)
try:
from .tcpip import TCPIPInstrSession, TCPIPSocketSession
logger.debug('TCPIPSession was correctly imported.')
except Exception as e:
logger.debug('TCPIPSession was not imported %s.' % e)
try:
from .gpib import GPIBSession
logger.debug('GPIBSession was correctly imported.')
except Exception as e:
logger.debug('GPIBSession was not imported %s.' % e)
@classmethod
def get_session_classes(cls):
return sessions.Session._session_classes
@classmethod
def iter_session_classes_issues(cls):
return sessions.Session.iter_session_classes_issues()
@staticmethod
def get_debug_info():
"""Return a list of lines with backend info.
"""
from . import __version__
d = OrderedDict()
d['Version'] = '%s' % __version__
for key, val in PyVisaLibrary.get_session_classes().items():
key_name = '%s %s' % (key[0].name.upper(), key[1])
try:
d[key_name] = getattr(val, 'session_issue').split('\n')
except AttributeError:
d[key_name] = 'Available ' + val.get_low_level_info()
return d
def _init(self):
#: map session handle to session object.
#: dict[int, session.Session]
self.sessions = {}
def _register(self, obj):
"""Creates a random but unique session handle for a session object,
register it in the sessions dictionary and return the value
:param obj: a session object.
:return: session handle
:rtype: int
"""
session = None
while session is None or session in self.sessions:
session = random.randint(1000000, 9999999)
self.sessions[session] = obj
return session
# noinspection PyShadowingBuiltins
def open(self, session, resource_name,
access_mode=constants.AccessModes.no_lock,
open_timeout=constants.VI_TMO_IMMEDIATE):
"""Opens a session to the specified resource.
Corresponds to viOpen function of the VISA library.
:param session: Resource Manager session (should always be a session returned from open_default_resource_manager()).
:param resource_name: Unique symbolic name of a resource.
:param access_mode: Specifies the mode by which the resource is to be accessed. (constants.AccessModes)
:param open_timeout: Specifies the maximum time period (in milliseconds) that this operation waits
before returning an error.
:return: Unique logical identifier reference to a session, return value of the library call.
:rtype: session, VISAStatus
"""
try:
open_timeout = int(open_timeout)
except ValueError:
raise ValueError('open_timeout (%r) must be an integer (or compatible type)' % open_timeout)
try:
parsed = rname.parse_resource_name(resource_name)
except rname.InvalidResourceName:
return 0, StatusCode.error_invalid_resource_name
cls = sessions.Session.get_session_class(parsed.interface_type_const, parsed.resource_class)
sess = cls(session, resource_name, parsed, open_timeout)
return self._register(sess), StatusCode.success
def clear(self, session):
"""Clears a device.
Corresponds to viClear function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.clear()
def gpib_command(self, session, command_byte):
"""Write GPIB command byte on the bus.
Corresponds to viGpibCommand function of the VISA library.
See: https://linux-gpib.sourceforge.io/doc_html/gpib-protocol.html#REFERENCE-COMMAND-BYTES
:param command_byte: command byte to send
:type command_byte: int, must be [0 255]
:return: return value of the library call
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].gpib_command(command_byte)
except KeyError:
return constants.StatusCode.error_invalid_object
def assert_trigger(self, session, protocol):
"""Asserts software or hardware trigger.
Corresponds to viAssertTrigger function of the VISA library.
:param session: Unique logical identifier to a session.
:param protocol: Trigger protocol to use during assertion. (Constants.PROT*)
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].assert_trigger(protocol)
except KeyError:
return constants.StatusCode.error_invalid_object
def gpib_send_ifc(self, session):
"""Pulse the interface clear line (IFC) for at least 100 microseconds.
Corresponds to viGpibSendIFC function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.gpib_send_ifc()
def read_stb(self, session):
"""Reads a status byte of the service request.
Corresponds to viReadSTB function of the VISA library.
:param session: Unique logical identifier to a session.
:return: Service request status byte, return value of the library call.
:rtype: int, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return 0, constants.StatusCode.error_invalid_object
return sess.read_stb()
def close(self, session):
"""Closes the specified session, event, or find list.
Corresponds to viClose function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
if sess is not self:
sess.close()
except KeyError:
return StatusCode.error_invalid_object
def open_default_resource_manager(self):
"""This function returns a session to the Default Resource Manager resource.
Corresponds to viOpenDefaultRM function of the VISA library.
:return: Unique logical identifier to a Default Resource Manager session, return value of the library call.
:rtype: session, VISAStatus
"""
return self._register(self), StatusCode.success
def list_resources(self, session, query='?*::INSTR'):
"""Returns a tuple of all connected devices matching query.
:param query: regular expression used to match devices.
"""
# For each session type, ask for the list of connected resources and
# merge them into a single list.
resources = sum([st.list_resources()
for key, st in sessions.Session.iter_valid_session_classes()], [])
resources = rname.filter(resources, query)
return resources
def read(self, session, count):
"""Reads data from device or interface synchronously.
Corresponds to viRead function of the VISA library.
:param session: Unique logical identifier to a session.
:param count: Number of bytes to be read.
:return: data read, return value of the library call.
:rtype: bytes, VISAStatus
"""
# from the session handle, dispatch to the read method of the session object.
try:
ret = self.sessions[session].read(count)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def write(self, session, data):
"""Writes data to device or interface synchronously.
Corresponds to viWrite function of the VISA library.
:param session: Unique logical identifier to a session.
:param data: data to be written.
:type data: str
:return: Number of bytes actually transferred, return value of the library call.
:rtype: int, VISAStatus
"""
# from the session handle, dispatch to the write method of the session object.
try:
ret = self.sessions[session].write(data)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def get_attribute(self, session, attribute):
"""Retrieves the state of an attribute.
Corresponds to viGetAttribute function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:param attribute: Resource attribute for which the state query is made (see Attributes.*)
:return: The state of the queried attribute for a specified resource, return value of the library call.
:rtype: unicode | str | list | int, VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return None, StatusCode.error_invalid_object
return sess.get_attribute(attribute)
def set_attribute(self, session, attribute, attribute_state):
"""Sets the state of an attribute.
Corresponds to viSetAttribute function of the VISA library.
:param session: Unique logical identifier to a session.
:param attribute: Attribute for which the state is to be modified. (Attributes.*)
:param attribute_state: The state of the attribute to be set for the specified object.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.set_attribute(attribute, attribute_state)
def lock(self, session, lock_type, timeout, requested_key=None):
"""Establishes an access mode to the specified resources.
Corresponds to viLock function of the VISA library.
:param session: Unique logical identifier to a session.
:param lock_type: Specifies the type of lock requested, either Constants.EXCLUSIVE_LOCK or Constants.SHARED_LOCK.
:param timeout: Absolute time period (in milliseconds) that a resource waits to get unlocked by the
locking session before returning an error.
:param requested_key: This parameter is not used and should be set to VI_NULL when lockType is VI_EXCLUSIVE_LOCK.
:return: access_key that can then be passed to other sessions to share the lock, return value of the library call.
:rtype: str, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.lock(lock_type, timeout, requested_key)
def unlock(self, session):
"""Relinquishes a lock for the specified resource.
Corresponds to viUnlock function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.unlock()
def disable_event(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
def discard_events(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
|
pyvisa/pyvisa-py | pyvisa-py/highlevel.py | PyVisaLibrary.clear | python | def clear(self, session):
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.clear() | Clears a device.
Corresponds to viClear function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode` | train | https://github.com/pyvisa/pyvisa-py/blob/dfbd509409675b59d71bb741cd72c5f256efd4cd/pyvisa-py/highlevel.py#L198-L211 | null | class PyVisaLibrary(highlevel.VisaLibraryBase):
"""A pure Python backend for PyVISA.
The object is basically a dispatcher with some common functions implemented.
When a new resource object is requested to pyvisa, the library creates a Session object
(that knows how to perform low-level communication operations) associated with a session handle
(a number, usually refered just as session).
A call to a library function is handled by PyVisaLibrary if it involves a resource agnosting
function or dispatched to the correct session object (obtained from the session id).
Importantly, the user is unaware of this. PyVisaLibrary behaves for the user just as NIVisaLibrary.
"""
# Try to import packages implementing lower level functionality.
try:
from .serial import SerialSession
logger.debug('SerialSession was correctly imported.')
except Exception as e:
logger.debug('SerialSession was not imported %s.' % e)
try:
from .usb import USBSession, USBRawSession
logger.debug('USBSession and USBRawSession were correctly imported.')
except Exception as e:
logger.debug('USBSession and USBRawSession were not imported %s.' % e)
try:
from .tcpip import TCPIPInstrSession, TCPIPSocketSession
logger.debug('TCPIPSession was correctly imported.')
except Exception as e:
logger.debug('TCPIPSession was not imported %s.' % e)
try:
from .gpib import GPIBSession
logger.debug('GPIBSession was correctly imported.')
except Exception as e:
logger.debug('GPIBSession was not imported %s.' % e)
@classmethod
def get_session_classes(cls):
return sessions.Session._session_classes
@classmethod
def iter_session_classes_issues(cls):
return sessions.Session.iter_session_classes_issues()
@staticmethod
def get_debug_info():
"""Return a list of lines with backend info.
"""
from . import __version__
d = OrderedDict()
d['Version'] = '%s' % __version__
for key, val in PyVisaLibrary.get_session_classes().items():
key_name = '%s %s' % (key[0].name.upper(), key[1])
try:
d[key_name] = getattr(val, 'session_issue').split('\n')
except AttributeError:
d[key_name] = 'Available ' + val.get_low_level_info()
return d
def _init(self):
#: map session handle to session object.
#: dict[int, session.Session]
self.sessions = {}
def _register(self, obj):
"""Creates a random but unique session handle for a session object,
register it in the sessions dictionary and return the value
:param obj: a session object.
:return: session handle
:rtype: int
"""
session = None
while session is None or session in self.sessions:
session = random.randint(1000000, 9999999)
self.sessions[session] = obj
return session
def _return_handler(self, ret_value, func, arguments):
"""Check return values for errors and warnings.
TODO: THIS IS JUST COPIED PASTED FROM NIVisaLibrary.
Needs to be adapted.
"""
logger.debug('%s%s -> %r',
func.__name__, _args_to_str(arguments), ret_value,
extra=self._logging_extra)
try:
ret_value = StatusCode(ret_value)
except ValueError:
pass
self._last_status = ret_value
# The first argument of almost all registered visa functions is a session.
# We store the error code per session
session = None
if func.__name__ not in ('viFindNext', ):
try:
session = arguments[0]
except KeyError:
raise Exception('Function %r does not seem to be a valid '
'visa function (len args %d)' % (func, len(arguments)))
# Functions that use the first parameter to get a session value.
if func.__name__ in ('viOpenDefaultRM', ):
# noinspection PyProtectedMember
session = session._obj.value
if isinstance(session, integer_types):
self._last_status_in_session[session] = ret_value
else:
# Functions that might or might have a session in the first argument.
if func.__name__ not in ('viClose', 'viGetAttribute', 'viSetAttribute', 'viStatusDesc'):
raise Exception('Function %r does not seem to be a valid '
'visa function (type args[0] %r)' % (func, type(session)))
if ret_value < 0:
raise errors.VisaIOError(ret_value)
if ret_value in self.issue_warning_on:
if session and ret_value not in self._ignore_warning_in_session[session]:
warnings.warn(errors.VisaIOWarning(ret_value), stacklevel=2)
return ret_value
# noinspection PyShadowingBuiltins
def open(self, session, resource_name,
access_mode=constants.AccessModes.no_lock,
open_timeout=constants.VI_TMO_IMMEDIATE):
"""Opens a session to the specified resource.
Corresponds to viOpen function of the VISA library.
:param session: Resource Manager session (should always be a session returned from open_default_resource_manager()).
:param resource_name: Unique symbolic name of a resource.
:param access_mode: Specifies the mode by which the resource is to be accessed. (constants.AccessModes)
:param open_timeout: Specifies the maximum time period (in milliseconds) that this operation waits
before returning an error.
:return: Unique logical identifier reference to a session, return value of the library call.
:rtype: session, VISAStatus
"""
try:
open_timeout = int(open_timeout)
except ValueError:
raise ValueError('open_timeout (%r) must be an integer (or compatible type)' % open_timeout)
try:
parsed = rname.parse_resource_name(resource_name)
except rname.InvalidResourceName:
return 0, StatusCode.error_invalid_resource_name
cls = sessions.Session.get_session_class(parsed.interface_type_const, parsed.resource_class)
sess = cls(session, resource_name, parsed, open_timeout)
return self._register(sess), StatusCode.success
def gpib_command(self, session, command_byte):
"""Write GPIB command byte on the bus.
Corresponds to viGpibCommand function of the VISA library.
See: https://linux-gpib.sourceforge.io/doc_html/gpib-protocol.html#REFERENCE-COMMAND-BYTES
:param command_byte: command byte to send
:type command_byte: int, must be [0 255]
:return: return value of the library call
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].gpib_command(command_byte)
except KeyError:
return constants.StatusCode.error_invalid_object
def assert_trigger(self, session, protocol):
"""Asserts software or hardware trigger.
Corresponds to viAssertTrigger function of the VISA library.
:param session: Unique logical identifier to a session.
:param protocol: Trigger protocol to use during assertion. (Constants.PROT*)
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].assert_trigger(protocol)
except KeyError:
return constants.StatusCode.error_invalid_object
def gpib_send_ifc(self, session):
"""Pulse the interface clear line (IFC) for at least 100 microseconds.
Corresponds to viGpibSendIFC function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.gpib_send_ifc()
def read_stb(self, session):
"""Reads a status byte of the service request.
Corresponds to viReadSTB function of the VISA library.
:param session: Unique logical identifier to a session.
:return: Service request status byte, return value of the library call.
:rtype: int, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return 0, constants.StatusCode.error_invalid_object
return sess.read_stb()
def close(self, session):
"""Closes the specified session, event, or find list.
Corresponds to viClose function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
if sess is not self:
sess.close()
except KeyError:
return StatusCode.error_invalid_object
def open_default_resource_manager(self):
"""This function returns a session to the Default Resource Manager resource.
Corresponds to viOpenDefaultRM function of the VISA library.
:return: Unique logical identifier to a Default Resource Manager session, return value of the library call.
:rtype: session, VISAStatus
"""
return self._register(self), StatusCode.success
def list_resources(self, session, query='?*::INSTR'):
"""Returns a tuple of all connected devices matching query.
:param query: regular expression used to match devices.
"""
# For each session type, ask for the list of connected resources and
# merge them into a single list.
resources = sum([st.list_resources()
for key, st in sessions.Session.iter_valid_session_classes()], [])
resources = rname.filter(resources, query)
return resources
def read(self, session, count):
"""Reads data from device or interface synchronously.
Corresponds to viRead function of the VISA library.
:param session: Unique logical identifier to a session.
:param count: Number of bytes to be read.
:return: data read, return value of the library call.
:rtype: bytes, VISAStatus
"""
# from the session handle, dispatch to the read method of the session object.
try:
ret = self.sessions[session].read(count)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def write(self, session, data):
"""Writes data to device or interface synchronously.
Corresponds to viWrite function of the VISA library.
:param session: Unique logical identifier to a session.
:param data: data to be written.
:type data: str
:return: Number of bytes actually transferred, return value of the library call.
:rtype: int, VISAStatus
"""
# from the session handle, dispatch to the write method of the session object.
try:
ret = self.sessions[session].write(data)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def get_attribute(self, session, attribute):
"""Retrieves the state of an attribute.
Corresponds to viGetAttribute function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:param attribute: Resource attribute for which the state query is made (see Attributes.*)
:return: The state of the queried attribute for a specified resource, return value of the library call.
:rtype: unicode | str | list | int, VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return None, StatusCode.error_invalid_object
return sess.get_attribute(attribute)
def set_attribute(self, session, attribute, attribute_state):
"""Sets the state of an attribute.
Corresponds to viSetAttribute function of the VISA library.
:param session: Unique logical identifier to a session.
:param attribute: Attribute for which the state is to be modified. (Attributes.*)
:param attribute_state: The state of the attribute to be set for the specified object.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.set_attribute(attribute, attribute_state)
def lock(self, session, lock_type, timeout, requested_key=None):
"""Establishes an access mode to the specified resources.
Corresponds to viLock function of the VISA library.
:param session: Unique logical identifier to a session.
:param lock_type: Specifies the type of lock requested, either Constants.EXCLUSIVE_LOCK or Constants.SHARED_LOCK.
:param timeout: Absolute time period (in milliseconds) that a resource waits to get unlocked by the
locking session before returning an error.
:param requested_key: This parameter is not used and should be set to VI_NULL when lockType is VI_EXCLUSIVE_LOCK.
:return: access_key that can then be passed to other sessions to share the lock, return value of the library call.
:rtype: str, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.lock(lock_type, timeout, requested_key)
def unlock(self, session):
"""Relinquishes a lock for the specified resource.
Corresponds to viUnlock function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.unlock()
def disable_event(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
def discard_events(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
|
pyvisa/pyvisa-py | pyvisa-py/highlevel.py | PyVisaLibrary.gpib_command | python | def gpib_command(self, session, command_byte):
try:
return self.sessions[session].gpib_command(command_byte)
except KeyError:
return constants.StatusCode.error_invalid_object | Write GPIB command byte on the bus.
Corresponds to viGpibCommand function of the VISA library.
See: https://linux-gpib.sourceforge.io/doc_html/gpib-protocol.html#REFERENCE-COMMAND-BYTES
:param command_byte: command byte to send
:type command_byte: int, must be [0 255]
:return: return value of the library call
:rtype: :class:`pyvisa.constants.StatusCode` | train | https://github.com/pyvisa/pyvisa-py/blob/dfbd509409675b59d71bb741cd72c5f256efd4cd/pyvisa-py/highlevel.py#L213-L228 | null | class PyVisaLibrary(highlevel.VisaLibraryBase):
"""A pure Python backend for PyVISA.
The object is basically a dispatcher with some common functions implemented.
When a new resource object is requested to pyvisa, the library creates a Session object
(that knows how to perform low-level communication operations) associated with a session handle
(a number, usually refered just as session).
A call to a library function is handled by PyVisaLibrary if it involves a resource agnosting
function or dispatched to the correct session object (obtained from the session id).
Importantly, the user is unaware of this. PyVisaLibrary behaves for the user just as NIVisaLibrary.
"""
# Try to import packages implementing lower level functionality.
try:
from .serial import SerialSession
logger.debug('SerialSession was correctly imported.')
except Exception as e:
logger.debug('SerialSession was not imported %s.' % e)
try:
from .usb import USBSession, USBRawSession
logger.debug('USBSession and USBRawSession were correctly imported.')
except Exception as e:
logger.debug('USBSession and USBRawSession were not imported %s.' % e)
try:
from .tcpip import TCPIPInstrSession, TCPIPSocketSession
logger.debug('TCPIPSession was correctly imported.')
except Exception as e:
logger.debug('TCPIPSession was not imported %s.' % e)
try:
from .gpib import GPIBSession
logger.debug('GPIBSession was correctly imported.')
except Exception as e:
logger.debug('GPIBSession was not imported %s.' % e)
@classmethod
def get_session_classes(cls):
return sessions.Session._session_classes
@classmethod
def iter_session_classes_issues(cls):
return sessions.Session.iter_session_classes_issues()
@staticmethod
def get_debug_info():
"""Return a list of lines with backend info.
"""
from . import __version__
d = OrderedDict()
d['Version'] = '%s' % __version__
for key, val in PyVisaLibrary.get_session_classes().items():
key_name = '%s %s' % (key[0].name.upper(), key[1])
try:
d[key_name] = getattr(val, 'session_issue').split('\n')
except AttributeError:
d[key_name] = 'Available ' + val.get_low_level_info()
return d
def _init(self):
#: map session handle to session object.
#: dict[int, session.Session]
self.sessions = {}
def _register(self, obj):
"""Creates a random but unique session handle for a session object,
register it in the sessions dictionary and return the value
:param obj: a session object.
:return: session handle
:rtype: int
"""
session = None
while session is None or session in self.sessions:
session = random.randint(1000000, 9999999)
self.sessions[session] = obj
return session
def _return_handler(self, ret_value, func, arguments):
"""Check return values for errors and warnings.
TODO: THIS IS JUST COPIED PASTED FROM NIVisaLibrary.
Needs to be adapted.
"""
logger.debug('%s%s -> %r',
func.__name__, _args_to_str(arguments), ret_value,
extra=self._logging_extra)
try:
ret_value = StatusCode(ret_value)
except ValueError:
pass
self._last_status = ret_value
# The first argument of almost all registered visa functions is a session.
# We store the error code per session
session = None
if func.__name__ not in ('viFindNext', ):
try:
session = arguments[0]
except KeyError:
raise Exception('Function %r does not seem to be a valid '
'visa function (len args %d)' % (func, len(arguments)))
# Functions that use the first parameter to get a session value.
if func.__name__ in ('viOpenDefaultRM', ):
# noinspection PyProtectedMember
session = session._obj.value
if isinstance(session, integer_types):
self._last_status_in_session[session] = ret_value
else:
# Functions that might or might have a session in the first argument.
if func.__name__ not in ('viClose', 'viGetAttribute', 'viSetAttribute', 'viStatusDesc'):
raise Exception('Function %r does not seem to be a valid '
'visa function (type args[0] %r)' % (func, type(session)))
if ret_value < 0:
raise errors.VisaIOError(ret_value)
if ret_value in self.issue_warning_on:
if session and ret_value not in self._ignore_warning_in_session[session]:
warnings.warn(errors.VisaIOWarning(ret_value), stacklevel=2)
return ret_value
# noinspection PyShadowingBuiltins
def open(self, session, resource_name,
access_mode=constants.AccessModes.no_lock,
open_timeout=constants.VI_TMO_IMMEDIATE):
"""Opens a session to the specified resource.
Corresponds to viOpen function of the VISA library.
:param session: Resource Manager session (should always be a session returned from open_default_resource_manager()).
:param resource_name: Unique symbolic name of a resource.
:param access_mode: Specifies the mode by which the resource is to be accessed. (constants.AccessModes)
:param open_timeout: Specifies the maximum time period (in milliseconds) that this operation waits
before returning an error.
:return: Unique logical identifier reference to a session, return value of the library call.
:rtype: session, VISAStatus
"""
try:
open_timeout = int(open_timeout)
except ValueError:
raise ValueError('open_timeout (%r) must be an integer (or compatible type)' % open_timeout)
try:
parsed = rname.parse_resource_name(resource_name)
except rname.InvalidResourceName:
return 0, StatusCode.error_invalid_resource_name
cls = sessions.Session.get_session_class(parsed.interface_type_const, parsed.resource_class)
sess = cls(session, resource_name, parsed, open_timeout)
return self._register(sess), StatusCode.success
def clear(self, session):
"""Clears a device.
Corresponds to viClear function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.clear()
def assert_trigger(self, session, protocol):
"""Asserts software or hardware trigger.
Corresponds to viAssertTrigger function of the VISA library.
:param session: Unique logical identifier to a session.
:param protocol: Trigger protocol to use during assertion. (Constants.PROT*)
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].assert_trigger(protocol)
except KeyError:
return constants.StatusCode.error_invalid_object
def gpib_send_ifc(self, session):
"""Pulse the interface clear line (IFC) for at least 100 microseconds.
Corresponds to viGpibSendIFC function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.gpib_send_ifc()
def read_stb(self, session):
"""Reads a status byte of the service request.
Corresponds to viReadSTB function of the VISA library.
:param session: Unique logical identifier to a session.
:return: Service request status byte, return value of the library call.
:rtype: int, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return 0, constants.StatusCode.error_invalid_object
return sess.read_stb()
def close(self, session):
"""Closes the specified session, event, or find list.
Corresponds to viClose function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
if sess is not self:
sess.close()
except KeyError:
return StatusCode.error_invalid_object
def open_default_resource_manager(self):
"""This function returns a session to the Default Resource Manager resource.
Corresponds to viOpenDefaultRM function of the VISA library.
:return: Unique logical identifier to a Default Resource Manager session, return value of the library call.
:rtype: session, VISAStatus
"""
return self._register(self), StatusCode.success
def list_resources(self, session, query='?*::INSTR'):
"""Returns a tuple of all connected devices matching query.
:param query: regular expression used to match devices.
"""
# For each session type, ask for the list of connected resources and
# merge them into a single list.
resources = sum([st.list_resources()
for key, st in sessions.Session.iter_valid_session_classes()], [])
resources = rname.filter(resources, query)
return resources
def read(self, session, count):
"""Reads data from device or interface synchronously.
Corresponds to viRead function of the VISA library.
:param session: Unique logical identifier to a session.
:param count: Number of bytes to be read.
:return: data read, return value of the library call.
:rtype: bytes, VISAStatus
"""
# from the session handle, dispatch to the read method of the session object.
try:
ret = self.sessions[session].read(count)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def write(self, session, data):
"""Writes data to device or interface synchronously.
Corresponds to viWrite function of the VISA library.
:param session: Unique logical identifier to a session.
:param data: data to be written.
:type data: str
:return: Number of bytes actually transferred, return value of the library call.
:rtype: int, VISAStatus
"""
# from the session handle, dispatch to the write method of the session object.
try:
ret = self.sessions[session].write(data)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def get_attribute(self, session, attribute):
"""Retrieves the state of an attribute.
Corresponds to viGetAttribute function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:param attribute: Resource attribute for which the state query is made (see Attributes.*)
:return: The state of the queried attribute for a specified resource, return value of the library call.
:rtype: unicode | str | list | int, VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return None, StatusCode.error_invalid_object
return sess.get_attribute(attribute)
def set_attribute(self, session, attribute, attribute_state):
"""Sets the state of an attribute.
Corresponds to viSetAttribute function of the VISA library.
:param session: Unique logical identifier to a session.
:param attribute: Attribute for which the state is to be modified. (Attributes.*)
:param attribute_state: The state of the attribute to be set for the specified object.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.set_attribute(attribute, attribute_state)
def lock(self, session, lock_type, timeout, requested_key=None):
"""Establishes an access mode to the specified resources.
Corresponds to viLock function of the VISA library.
:param session: Unique logical identifier to a session.
:param lock_type: Specifies the type of lock requested, either Constants.EXCLUSIVE_LOCK or Constants.SHARED_LOCK.
:param timeout: Absolute time period (in milliseconds) that a resource waits to get unlocked by the
locking session before returning an error.
:param requested_key: This parameter is not used and should be set to VI_NULL when lockType is VI_EXCLUSIVE_LOCK.
:return: access_key that can then be passed to other sessions to share the lock, return value of the library call.
:rtype: str, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.lock(lock_type, timeout, requested_key)
def unlock(self, session):
"""Relinquishes a lock for the specified resource.
Corresponds to viUnlock function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.unlock()
def disable_event(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
def discard_events(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
|
pyvisa/pyvisa-py | pyvisa-py/highlevel.py | PyVisaLibrary.assert_trigger | python | def assert_trigger(self, session, protocol):
try:
return self.sessions[session].assert_trigger(protocol)
except KeyError:
return constants.StatusCode.error_invalid_object | Asserts software or hardware trigger.
Corresponds to viAssertTrigger function of the VISA library.
:param session: Unique logical identifier to a session.
:param protocol: Trigger protocol to use during assertion. (Constants.PROT*)
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode` | train | https://github.com/pyvisa/pyvisa-py/blob/dfbd509409675b59d71bb741cd72c5f256efd4cd/pyvisa-py/highlevel.py#L230-L244 | null | class PyVisaLibrary(highlevel.VisaLibraryBase):
"""A pure Python backend for PyVISA.
The object is basically a dispatcher with some common functions implemented.
When a new resource object is requested to pyvisa, the library creates a Session object
(that knows how to perform low-level communication operations) associated with a session handle
(a number, usually refered just as session).
A call to a library function is handled by PyVisaLibrary if it involves a resource agnosting
function or dispatched to the correct session object (obtained from the session id).
Importantly, the user is unaware of this. PyVisaLibrary behaves for the user just as NIVisaLibrary.
"""
# Try to import packages implementing lower level functionality.
try:
from .serial import SerialSession
logger.debug('SerialSession was correctly imported.')
except Exception as e:
logger.debug('SerialSession was not imported %s.' % e)
try:
from .usb import USBSession, USBRawSession
logger.debug('USBSession and USBRawSession were correctly imported.')
except Exception as e:
logger.debug('USBSession and USBRawSession were not imported %s.' % e)
try:
from .tcpip import TCPIPInstrSession, TCPIPSocketSession
logger.debug('TCPIPSession was correctly imported.')
except Exception as e:
logger.debug('TCPIPSession was not imported %s.' % e)
try:
from .gpib import GPIBSession
logger.debug('GPIBSession was correctly imported.')
except Exception as e:
logger.debug('GPIBSession was not imported %s.' % e)
@classmethod
def get_session_classes(cls):
return sessions.Session._session_classes
@classmethod
def iter_session_classes_issues(cls):
return sessions.Session.iter_session_classes_issues()
@staticmethod
def get_debug_info():
"""Return a list of lines with backend info.
"""
from . import __version__
d = OrderedDict()
d['Version'] = '%s' % __version__
for key, val in PyVisaLibrary.get_session_classes().items():
key_name = '%s %s' % (key[0].name.upper(), key[1])
try:
d[key_name] = getattr(val, 'session_issue').split('\n')
except AttributeError:
d[key_name] = 'Available ' + val.get_low_level_info()
return d
def _init(self):
#: map session handle to session object.
#: dict[int, session.Session]
self.sessions = {}
def _register(self, obj):
"""Creates a random but unique session handle for a session object,
register it in the sessions dictionary and return the value
:param obj: a session object.
:return: session handle
:rtype: int
"""
session = None
while session is None or session in self.sessions:
session = random.randint(1000000, 9999999)
self.sessions[session] = obj
return session
def _return_handler(self, ret_value, func, arguments):
"""Check return values for errors and warnings.
TODO: THIS IS JUST COPIED PASTED FROM NIVisaLibrary.
Needs to be adapted.
"""
logger.debug('%s%s -> %r',
func.__name__, _args_to_str(arguments), ret_value,
extra=self._logging_extra)
try:
ret_value = StatusCode(ret_value)
except ValueError:
pass
self._last_status = ret_value
# The first argument of almost all registered visa functions is a session.
# We store the error code per session
session = None
if func.__name__ not in ('viFindNext', ):
try:
session = arguments[0]
except KeyError:
raise Exception('Function %r does not seem to be a valid '
'visa function (len args %d)' % (func, len(arguments)))
# Functions that use the first parameter to get a session value.
if func.__name__ in ('viOpenDefaultRM', ):
# noinspection PyProtectedMember
session = session._obj.value
if isinstance(session, integer_types):
self._last_status_in_session[session] = ret_value
else:
# Functions that might or might have a session in the first argument.
if func.__name__ not in ('viClose', 'viGetAttribute', 'viSetAttribute', 'viStatusDesc'):
raise Exception('Function %r does not seem to be a valid '
'visa function (type args[0] %r)' % (func, type(session)))
if ret_value < 0:
raise errors.VisaIOError(ret_value)
if ret_value in self.issue_warning_on:
if session and ret_value not in self._ignore_warning_in_session[session]:
warnings.warn(errors.VisaIOWarning(ret_value), stacklevel=2)
return ret_value
# noinspection PyShadowingBuiltins
def open(self, session, resource_name,
access_mode=constants.AccessModes.no_lock,
open_timeout=constants.VI_TMO_IMMEDIATE):
"""Opens a session to the specified resource.
Corresponds to viOpen function of the VISA library.
:param session: Resource Manager session (should always be a session returned from open_default_resource_manager()).
:param resource_name: Unique symbolic name of a resource.
:param access_mode: Specifies the mode by which the resource is to be accessed. (constants.AccessModes)
:param open_timeout: Specifies the maximum time period (in milliseconds) that this operation waits
before returning an error.
:return: Unique logical identifier reference to a session, return value of the library call.
:rtype: session, VISAStatus
"""
try:
open_timeout = int(open_timeout)
except ValueError:
raise ValueError('open_timeout (%r) must be an integer (or compatible type)' % open_timeout)
try:
parsed = rname.parse_resource_name(resource_name)
except rname.InvalidResourceName:
return 0, StatusCode.error_invalid_resource_name
cls = sessions.Session.get_session_class(parsed.interface_type_const, parsed.resource_class)
sess = cls(session, resource_name, parsed, open_timeout)
return self._register(sess), StatusCode.success
def clear(self, session):
"""Clears a device.
Corresponds to viClear function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.clear()
def gpib_command(self, session, command_byte):
"""Write GPIB command byte on the bus.
Corresponds to viGpibCommand function of the VISA library.
See: https://linux-gpib.sourceforge.io/doc_html/gpib-protocol.html#REFERENCE-COMMAND-BYTES
:param command_byte: command byte to send
:type command_byte: int, must be [0 255]
:return: return value of the library call
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].gpib_command(command_byte)
except KeyError:
return constants.StatusCode.error_invalid_object
def gpib_send_ifc(self, session):
"""Pulse the interface clear line (IFC) for at least 100 microseconds.
Corresponds to viGpibSendIFC function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.gpib_send_ifc()
def read_stb(self, session):
"""Reads a status byte of the service request.
Corresponds to viReadSTB function of the VISA library.
:param session: Unique logical identifier to a session.
:return: Service request status byte, return value of the library call.
:rtype: int, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return 0, constants.StatusCode.error_invalid_object
return sess.read_stb()
def close(self, session):
"""Closes the specified session, event, or find list.
Corresponds to viClose function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
if sess is not self:
sess.close()
except KeyError:
return StatusCode.error_invalid_object
def open_default_resource_manager(self):
"""This function returns a session to the Default Resource Manager resource.
Corresponds to viOpenDefaultRM function of the VISA library.
:return: Unique logical identifier to a Default Resource Manager session, return value of the library call.
:rtype: session, VISAStatus
"""
return self._register(self), StatusCode.success
def list_resources(self, session, query='?*::INSTR'):
"""Returns a tuple of all connected devices matching query.
:param query: regular expression used to match devices.
"""
# For each session type, ask for the list of connected resources and
# merge them into a single list.
resources = sum([st.list_resources()
for key, st in sessions.Session.iter_valid_session_classes()], [])
resources = rname.filter(resources, query)
return resources
def read(self, session, count):
"""Reads data from device or interface synchronously.
Corresponds to viRead function of the VISA library.
:param session: Unique logical identifier to a session.
:param count: Number of bytes to be read.
:return: data read, return value of the library call.
:rtype: bytes, VISAStatus
"""
# from the session handle, dispatch to the read method of the session object.
try:
ret = self.sessions[session].read(count)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def write(self, session, data):
"""Writes data to device or interface synchronously.
Corresponds to viWrite function of the VISA library.
:param session: Unique logical identifier to a session.
:param data: data to be written.
:type data: str
:return: Number of bytes actually transferred, return value of the library call.
:rtype: int, VISAStatus
"""
# from the session handle, dispatch to the write method of the session object.
try:
ret = self.sessions[session].write(data)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def get_attribute(self, session, attribute):
"""Retrieves the state of an attribute.
Corresponds to viGetAttribute function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:param attribute: Resource attribute for which the state query is made (see Attributes.*)
:return: The state of the queried attribute for a specified resource, return value of the library call.
:rtype: unicode | str | list | int, VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return None, StatusCode.error_invalid_object
return sess.get_attribute(attribute)
def set_attribute(self, session, attribute, attribute_state):
"""Sets the state of an attribute.
Corresponds to viSetAttribute function of the VISA library.
:param session: Unique logical identifier to a session.
:param attribute: Attribute for which the state is to be modified. (Attributes.*)
:param attribute_state: The state of the attribute to be set for the specified object.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.set_attribute(attribute, attribute_state)
def lock(self, session, lock_type, timeout, requested_key=None):
"""Establishes an access mode to the specified resources.
Corresponds to viLock function of the VISA library.
:param session: Unique logical identifier to a session.
:param lock_type: Specifies the type of lock requested, either Constants.EXCLUSIVE_LOCK or Constants.SHARED_LOCK.
:param timeout: Absolute time period (in milliseconds) that a resource waits to get unlocked by the
locking session before returning an error.
:param requested_key: This parameter is not used and should be set to VI_NULL when lockType is VI_EXCLUSIVE_LOCK.
:return: access_key that can then be passed to other sessions to share the lock, return value of the library call.
:rtype: str, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.lock(lock_type, timeout, requested_key)
def unlock(self, session):
"""Relinquishes a lock for the specified resource.
Corresponds to viUnlock function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.unlock()
def disable_event(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
def discard_events(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
|
pyvisa/pyvisa-py | pyvisa-py/highlevel.py | PyVisaLibrary.close | python | def close(self, session):
try:
sess = self.sessions[session]
if sess is not self:
sess.close()
except KeyError:
return StatusCode.error_invalid_object | Closes the specified session, event, or find list.
Corresponds to viClose function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:return: return value of the library call.
:rtype: VISAStatus | train | https://github.com/pyvisa/pyvisa-py/blob/dfbd509409675b59d71bb741cd72c5f256efd4cd/pyvisa-py/highlevel.py#L274-L288 | null | class PyVisaLibrary(highlevel.VisaLibraryBase):
"""A pure Python backend for PyVISA.
The object is basically a dispatcher with some common functions implemented.
When a new resource object is requested to pyvisa, the library creates a Session object
(that knows how to perform low-level communication operations) associated with a session handle
(a number, usually refered just as session).
A call to a library function is handled by PyVisaLibrary if it involves a resource agnosting
function or dispatched to the correct session object (obtained from the session id).
Importantly, the user is unaware of this. PyVisaLibrary behaves for the user just as NIVisaLibrary.
"""
# Try to import packages implementing lower level functionality.
try:
from .serial import SerialSession
logger.debug('SerialSession was correctly imported.')
except Exception as e:
logger.debug('SerialSession was not imported %s.' % e)
try:
from .usb import USBSession, USBRawSession
logger.debug('USBSession and USBRawSession were correctly imported.')
except Exception as e:
logger.debug('USBSession and USBRawSession were not imported %s.' % e)
try:
from .tcpip import TCPIPInstrSession, TCPIPSocketSession
logger.debug('TCPIPSession was correctly imported.')
except Exception as e:
logger.debug('TCPIPSession was not imported %s.' % e)
try:
from .gpib import GPIBSession
logger.debug('GPIBSession was correctly imported.')
except Exception as e:
logger.debug('GPIBSession was not imported %s.' % e)
@classmethod
def get_session_classes(cls):
return sessions.Session._session_classes
@classmethod
def iter_session_classes_issues(cls):
return sessions.Session.iter_session_classes_issues()
@staticmethod
def get_debug_info():
"""Return a list of lines with backend info.
"""
from . import __version__
d = OrderedDict()
d['Version'] = '%s' % __version__
for key, val in PyVisaLibrary.get_session_classes().items():
key_name = '%s %s' % (key[0].name.upper(), key[1])
try:
d[key_name] = getattr(val, 'session_issue').split('\n')
except AttributeError:
d[key_name] = 'Available ' + val.get_low_level_info()
return d
def _init(self):
#: map session handle to session object.
#: dict[int, session.Session]
self.sessions = {}
def _register(self, obj):
"""Creates a random but unique session handle for a session object,
register it in the sessions dictionary and return the value
:param obj: a session object.
:return: session handle
:rtype: int
"""
session = None
while session is None or session in self.sessions:
session = random.randint(1000000, 9999999)
self.sessions[session] = obj
return session
def _return_handler(self, ret_value, func, arguments):
"""Check return values for errors and warnings.
TODO: THIS IS JUST COPIED PASTED FROM NIVisaLibrary.
Needs to be adapted.
"""
logger.debug('%s%s -> %r',
func.__name__, _args_to_str(arguments), ret_value,
extra=self._logging_extra)
try:
ret_value = StatusCode(ret_value)
except ValueError:
pass
self._last_status = ret_value
# The first argument of almost all registered visa functions is a session.
# We store the error code per session
session = None
if func.__name__ not in ('viFindNext', ):
try:
session = arguments[0]
except KeyError:
raise Exception('Function %r does not seem to be a valid '
'visa function (len args %d)' % (func, len(arguments)))
# Functions that use the first parameter to get a session value.
if func.__name__ in ('viOpenDefaultRM', ):
# noinspection PyProtectedMember
session = session._obj.value
if isinstance(session, integer_types):
self._last_status_in_session[session] = ret_value
else:
# Functions that might or might have a session in the first argument.
if func.__name__ not in ('viClose', 'viGetAttribute', 'viSetAttribute', 'viStatusDesc'):
raise Exception('Function %r does not seem to be a valid '
'visa function (type args[0] %r)' % (func, type(session)))
if ret_value < 0:
raise errors.VisaIOError(ret_value)
if ret_value in self.issue_warning_on:
if session and ret_value not in self._ignore_warning_in_session[session]:
warnings.warn(errors.VisaIOWarning(ret_value), stacklevel=2)
return ret_value
# noinspection PyShadowingBuiltins
def open(self, session, resource_name,
access_mode=constants.AccessModes.no_lock,
open_timeout=constants.VI_TMO_IMMEDIATE):
"""Opens a session to the specified resource.
Corresponds to viOpen function of the VISA library.
:param session: Resource Manager session (should always be a session returned from open_default_resource_manager()).
:param resource_name: Unique symbolic name of a resource.
:param access_mode: Specifies the mode by which the resource is to be accessed. (constants.AccessModes)
:param open_timeout: Specifies the maximum time period (in milliseconds) that this operation waits
before returning an error.
:return: Unique logical identifier reference to a session, return value of the library call.
:rtype: session, VISAStatus
"""
try:
open_timeout = int(open_timeout)
except ValueError:
raise ValueError('open_timeout (%r) must be an integer (or compatible type)' % open_timeout)
try:
parsed = rname.parse_resource_name(resource_name)
except rname.InvalidResourceName:
return 0, StatusCode.error_invalid_resource_name
cls = sessions.Session.get_session_class(parsed.interface_type_const, parsed.resource_class)
sess = cls(session, resource_name, parsed, open_timeout)
return self._register(sess), StatusCode.success
def clear(self, session):
"""Clears a device.
Corresponds to viClear function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.clear()
def gpib_command(self, session, command_byte):
"""Write GPIB command byte on the bus.
Corresponds to viGpibCommand function of the VISA library.
See: https://linux-gpib.sourceforge.io/doc_html/gpib-protocol.html#REFERENCE-COMMAND-BYTES
:param command_byte: command byte to send
:type command_byte: int, must be [0 255]
:return: return value of the library call
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].gpib_command(command_byte)
except KeyError:
return constants.StatusCode.error_invalid_object
def assert_trigger(self, session, protocol):
"""Asserts software or hardware trigger.
Corresponds to viAssertTrigger function of the VISA library.
:param session: Unique logical identifier to a session.
:param protocol: Trigger protocol to use during assertion. (Constants.PROT*)
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].assert_trigger(protocol)
except KeyError:
return constants.StatusCode.error_invalid_object
def gpib_send_ifc(self, session):
"""Pulse the interface clear line (IFC) for at least 100 microseconds.
Corresponds to viGpibSendIFC function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.gpib_send_ifc()
def read_stb(self, session):
"""Reads a status byte of the service request.
Corresponds to viReadSTB function of the VISA library.
:param session: Unique logical identifier to a session.
:return: Service request status byte, return value of the library call.
:rtype: int, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return 0, constants.StatusCode.error_invalid_object
return sess.read_stb()
def open_default_resource_manager(self):
"""This function returns a session to the Default Resource Manager resource.
Corresponds to viOpenDefaultRM function of the VISA library.
:return: Unique logical identifier to a Default Resource Manager session, return value of the library call.
:rtype: session, VISAStatus
"""
return self._register(self), StatusCode.success
def list_resources(self, session, query='?*::INSTR'):
"""Returns a tuple of all connected devices matching query.
:param query: regular expression used to match devices.
"""
# For each session type, ask for the list of connected resources and
# merge them into a single list.
resources = sum([st.list_resources()
for key, st in sessions.Session.iter_valid_session_classes()], [])
resources = rname.filter(resources, query)
return resources
def read(self, session, count):
"""Reads data from device or interface synchronously.
Corresponds to viRead function of the VISA library.
:param session: Unique logical identifier to a session.
:param count: Number of bytes to be read.
:return: data read, return value of the library call.
:rtype: bytes, VISAStatus
"""
# from the session handle, dispatch to the read method of the session object.
try:
ret = self.sessions[session].read(count)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def write(self, session, data):
"""Writes data to device or interface synchronously.
Corresponds to viWrite function of the VISA library.
:param session: Unique logical identifier to a session.
:param data: data to be written.
:type data: str
:return: Number of bytes actually transferred, return value of the library call.
:rtype: int, VISAStatus
"""
# from the session handle, dispatch to the write method of the session object.
try:
ret = self.sessions[session].write(data)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def get_attribute(self, session, attribute):
"""Retrieves the state of an attribute.
Corresponds to viGetAttribute function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:param attribute: Resource attribute for which the state query is made (see Attributes.*)
:return: The state of the queried attribute for a specified resource, return value of the library call.
:rtype: unicode | str | list | int, VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return None, StatusCode.error_invalid_object
return sess.get_attribute(attribute)
def set_attribute(self, session, attribute, attribute_state):
"""Sets the state of an attribute.
Corresponds to viSetAttribute function of the VISA library.
:param session: Unique logical identifier to a session.
:param attribute: Attribute for which the state is to be modified. (Attributes.*)
:param attribute_state: The state of the attribute to be set for the specified object.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.set_attribute(attribute, attribute_state)
def lock(self, session, lock_type, timeout, requested_key=None):
"""Establishes an access mode to the specified resources.
Corresponds to viLock function of the VISA library.
:param session: Unique logical identifier to a session.
:param lock_type: Specifies the type of lock requested, either Constants.EXCLUSIVE_LOCK or Constants.SHARED_LOCK.
:param timeout: Absolute time period (in milliseconds) that a resource waits to get unlocked by the
locking session before returning an error.
:param requested_key: This parameter is not used and should be set to VI_NULL when lockType is VI_EXCLUSIVE_LOCK.
:return: access_key that can then be passed to other sessions to share the lock, return value of the library call.
:rtype: str, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.lock(lock_type, timeout, requested_key)
def unlock(self, session):
"""Relinquishes a lock for the specified resource.
Corresponds to viUnlock function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.unlock()
def disable_event(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
def discard_events(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
|
pyvisa/pyvisa-py | pyvisa-py/highlevel.py | PyVisaLibrary.list_resources | python | def list_resources(self, session, query='?*::INSTR'):
# For each session type, ask for the list of connected resources and
# merge them into a single list.
resources = sum([st.list_resources()
for key, st in sessions.Session.iter_valid_session_classes()], [])
resources = rname.filter(resources, query)
return resources | Returns a tuple of all connected devices matching query.
:param query: regular expression used to match devices. | train | https://github.com/pyvisa/pyvisa-py/blob/dfbd509409675b59d71bb741cd72c5f256efd4cd/pyvisa-py/highlevel.py#L300-L314 | null | class PyVisaLibrary(highlevel.VisaLibraryBase):
"""A pure Python backend for PyVISA.
The object is basically a dispatcher with some common functions implemented.
When a new resource object is requested to pyvisa, the library creates a Session object
(that knows how to perform low-level communication operations) associated with a session handle
(a number, usually refered just as session).
A call to a library function is handled by PyVisaLibrary if it involves a resource agnosting
function or dispatched to the correct session object (obtained from the session id).
Importantly, the user is unaware of this. PyVisaLibrary behaves for the user just as NIVisaLibrary.
"""
# Try to import packages implementing lower level functionality.
try:
from .serial import SerialSession
logger.debug('SerialSession was correctly imported.')
except Exception as e:
logger.debug('SerialSession was not imported %s.' % e)
try:
from .usb import USBSession, USBRawSession
logger.debug('USBSession and USBRawSession were correctly imported.')
except Exception as e:
logger.debug('USBSession and USBRawSession were not imported %s.' % e)
try:
from .tcpip import TCPIPInstrSession, TCPIPSocketSession
logger.debug('TCPIPSession was correctly imported.')
except Exception as e:
logger.debug('TCPIPSession was not imported %s.' % e)
try:
from .gpib import GPIBSession
logger.debug('GPIBSession was correctly imported.')
except Exception as e:
logger.debug('GPIBSession was not imported %s.' % e)
@classmethod
def get_session_classes(cls):
return sessions.Session._session_classes
@classmethod
def iter_session_classes_issues(cls):
return sessions.Session.iter_session_classes_issues()
@staticmethod
def get_debug_info():
"""Return a list of lines with backend info.
"""
from . import __version__
d = OrderedDict()
d['Version'] = '%s' % __version__
for key, val in PyVisaLibrary.get_session_classes().items():
key_name = '%s %s' % (key[0].name.upper(), key[1])
try:
d[key_name] = getattr(val, 'session_issue').split('\n')
except AttributeError:
d[key_name] = 'Available ' + val.get_low_level_info()
return d
def _init(self):
#: map session handle to session object.
#: dict[int, session.Session]
self.sessions = {}
def _register(self, obj):
"""Creates a random but unique session handle for a session object,
register it in the sessions dictionary and return the value
:param obj: a session object.
:return: session handle
:rtype: int
"""
session = None
while session is None or session in self.sessions:
session = random.randint(1000000, 9999999)
self.sessions[session] = obj
return session
def _return_handler(self, ret_value, func, arguments):
"""Check return values for errors and warnings.
TODO: THIS IS JUST COPIED PASTED FROM NIVisaLibrary.
Needs to be adapted.
"""
logger.debug('%s%s -> %r',
func.__name__, _args_to_str(arguments), ret_value,
extra=self._logging_extra)
try:
ret_value = StatusCode(ret_value)
except ValueError:
pass
self._last_status = ret_value
# The first argument of almost all registered visa functions is a session.
# We store the error code per session
session = None
if func.__name__ not in ('viFindNext', ):
try:
session = arguments[0]
except KeyError:
raise Exception('Function %r does not seem to be a valid '
'visa function (len args %d)' % (func, len(arguments)))
# Functions that use the first parameter to get a session value.
if func.__name__ in ('viOpenDefaultRM', ):
# noinspection PyProtectedMember
session = session._obj.value
if isinstance(session, integer_types):
self._last_status_in_session[session] = ret_value
else:
# Functions that might or might have a session in the first argument.
if func.__name__ not in ('viClose', 'viGetAttribute', 'viSetAttribute', 'viStatusDesc'):
raise Exception('Function %r does not seem to be a valid '
'visa function (type args[0] %r)' % (func, type(session)))
if ret_value < 0:
raise errors.VisaIOError(ret_value)
if ret_value in self.issue_warning_on:
if session and ret_value not in self._ignore_warning_in_session[session]:
warnings.warn(errors.VisaIOWarning(ret_value), stacklevel=2)
return ret_value
# noinspection PyShadowingBuiltins
def open(self, session, resource_name,
access_mode=constants.AccessModes.no_lock,
open_timeout=constants.VI_TMO_IMMEDIATE):
"""Opens a session to the specified resource.
Corresponds to viOpen function of the VISA library.
:param session: Resource Manager session (should always be a session returned from open_default_resource_manager()).
:param resource_name: Unique symbolic name of a resource.
:param access_mode: Specifies the mode by which the resource is to be accessed. (constants.AccessModes)
:param open_timeout: Specifies the maximum time period (in milliseconds) that this operation waits
before returning an error.
:return: Unique logical identifier reference to a session, return value of the library call.
:rtype: session, VISAStatus
"""
try:
open_timeout = int(open_timeout)
except ValueError:
raise ValueError('open_timeout (%r) must be an integer (or compatible type)' % open_timeout)
try:
parsed = rname.parse_resource_name(resource_name)
except rname.InvalidResourceName:
return 0, StatusCode.error_invalid_resource_name
cls = sessions.Session.get_session_class(parsed.interface_type_const, parsed.resource_class)
sess = cls(session, resource_name, parsed, open_timeout)
return self._register(sess), StatusCode.success
def clear(self, session):
"""Clears a device.
Corresponds to viClear function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.clear()
def gpib_command(self, session, command_byte):
"""Write GPIB command byte on the bus.
Corresponds to viGpibCommand function of the VISA library.
See: https://linux-gpib.sourceforge.io/doc_html/gpib-protocol.html#REFERENCE-COMMAND-BYTES
:param command_byte: command byte to send
:type command_byte: int, must be [0 255]
:return: return value of the library call
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].gpib_command(command_byte)
except KeyError:
return constants.StatusCode.error_invalid_object
def assert_trigger(self, session, protocol):
"""Asserts software or hardware trigger.
Corresponds to viAssertTrigger function of the VISA library.
:param session: Unique logical identifier to a session.
:param protocol: Trigger protocol to use during assertion. (Constants.PROT*)
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].assert_trigger(protocol)
except KeyError:
return constants.StatusCode.error_invalid_object
def gpib_send_ifc(self, session):
"""Pulse the interface clear line (IFC) for at least 100 microseconds.
Corresponds to viGpibSendIFC function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.gpib_send_ifc()
def read_stb(self, session):
"""Reads a status byte of the service request.
Corresponds to viReadSTB function of the VISA library.
:param session: Unique logical identifier to a session.
:return: Service request status byte, return value of the library call.
:rtype: int, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return 0, constants.StatusCode.error_invalid_object
return sess.read_stb()
def close(self, session):
"""Closes the specified session, event, or find list.
Corresponds to viClose function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
if sess is not self:
sess.close()
except KeyError:
return StatusCode.error_invalid_object
def open_default_resource_manager(self):
"""This function returns a session to the Default Resource Manager resource.
Corresponds to viOpenDefaultRM function of the VISA library.
:return: Unique logical identifier to a Default Resource Manager session, return value of the library call.
:rtype: session, VISAStatus
"""
return self._register(self), StatusCode.success
def read(self, session, count):
"""Reads data from device or interface synchronously.
Corresponds to viRead function of the VISA library.
:param session: Unique logical identifier to a session.
:param count: Number of bytes to be read.
:return: data read, return value of the library call.
:rtype: bytes, VISAStatus
"""
# from the session handle, dispatch to the read method of the session object.
try:
ret = self.sessions[session].read(count)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def write(self, session, data):
"""Writes data to device or interface synchronously.
Corresponds to viWrite function of the VISA library.
:param session: Unique logical identifier to a session.
:param data: data to be written.
:type data: str
:return: Number of bytes actually transferred, return value of the library call.
:rtype: int, VISAStatus
"""
# from the session handle, dispatch to the write method of the session object.
try:
ret = self.sessions[session].write(data)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def get_attribute(self, session, attribute):
"""Retrieves the state of an attribute.
Corresponds to viGetAttribute function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:param attribute: Resource attribute for which the state query is made (see Attributes.*)
:return: The state of the queried attribute for a specified resource, return value of the library call.
:rtype: unicode | str | list | int, VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return None, StatusCode.error_invalid_object
return sess.get_attribute(attribute)
def set_attribute(self, session, attribute, attribute_state):
"""Sets the state of an attribute.
Corresponds to viSetAttribute function of the VISA library.
:param session: Unique logical identifier to a session.
:param attribute: Attribute for which the state is to be modified. (Attributes.*)
:param attribute_state: The state of the attribute to be set for the specified object.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.set_attribute(attribute, attribute_state)
def lock(self, session, lock_type, timeout, requested_key=None):
"""Establishes an access mode to the specified resources.
Corresponds to viLock function of the VISA library.
:param session: Unique logical identifier to a session.
:param lock_type: Specifies the type of lock requested, either Constants.EXCLUSIVE_LOCK or Constants.SHARED_LOCK.
:param timeout: Absolute time period (in milliseconds) that a resource waits to get unlocked by the
locking session before returning an error.
:param requested_key: This parameter is not used and should be set to VI_NULL when lockType is VI_EXCLUSIVE_LOCK.
:return: access_key that can then be passed to other sessions to share the lock, return value of the library call.
:rtype: str, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.lock(lock_type, timeout, requested_key)
def unlock(self, session):
"""Relinquishes a lock for the specified resource.
Corresponds to viUnlock function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.unlock()
def disable_event(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
def discard_events(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
|
pyvisa/pyvisa-py | pyvisa-py/highlevel.py | PyVisaLibrary.read | python | def read(self, session, count):
# from the session handle, dispatch to the read method of the session object.
try:
ret = self.sessions[session].read(count)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret | Reads data from device or interface synchronously.
Corresponds to viRead function of the VISA library.
:param session: Unique logical identifier to a session.
:param count: Number of bytes to be read.
:return: data read, return value of the library call.
:rtype: bytes, VISAStatus | train | https://github.com/pyvisa/pyvisa-py/blob/dfbd509409675b59d71bb741cd72c5f256efd4cd/pyvisa-py/highlevel.py#L316-L336 | null | class PyVisaLibrary(highlevel.VisaLibraryBase):
"""A pure Python backend for PyVISA.
The object is basically a dispatcher with some common functions implemented.
When a new resource object is requested to pyvisa, the library creates a Session object
(that knows how to perform low-level communication operations) associated with a session handle
(a number, usually refered just as session).
A call to a library function is handled by PyVisaLibrary if it involves a resource agnosting
function or dispatched to the correct session object (obtained from the session id).
Importantly, the user is unaware of this. PyVisaLibrary behaves for the user just as NIVisaLibrary.
"""
# Try to import packages implementing lower level functionality.
try:
from .serial import SerialSession
logger.debug('SerialSession was correctly imported.')
except Exception as e:
logger.debug('SerialSession was not imported %s.' % e)
try:
from .usb import USBSession, USBRawSession
logger.debug('USBSession and USBRawSession were correctly imported.')
except Exception as e:
logger.debug('USBSession and USBRawSession were not imported %s.' % e)
try:
from .tcpip import TCPIPInstrSession, TCPIPSocketSession
logger.debug('TCPIPSession was correctly imported.')
except Exception as e:
logger.debug('TCPIPSession was not imported %s.' % e)
try:
from .gpib import GPIBSession
logger.debug('GPIBSession was correctly imported.')
except Exception as e:
logger.debug('GPIBSession was not imported %s.' % e)
@classmethod
def get_session_classes(cls):
return sessions.Session._session_classes
@classmethod
def iter_session_classes_issues(cls):
return sessions.Session.iter_session_classes_issues()
@staticmethod
def get_debug_info():
"""Return a list of lines with backend info.
"""
from . import __version__
d = OrderedDict()
d['Version'] = '%s' % __version__
for key, val in PyVisaLibrary.get_session_classes().items():
key_name = '%s %s' % (key[0].name.upper(), key[1])
try:
d[key_name] = getattr(val, 'session_issue').split('\n')
except AttributeError:
d[key_name] = 'Available ' + val.get_low_level_info()
return d
def _init(self):
#: map session handle to session object.
#: dict[int, session.Session]
self.sessions = {}
def _register(self, obj):
"""Creates a random but unique session handle for a session object,
register it in the sessions dictionary and return the value
:param obj: a session object.
:return: session handle
:rtype: int
"""
session = None
while session is None or session in self.sessions:
session = random.randint(1000000, 9999999)
self.sessions[session] = obj
return session
def _return_handler(self, ret_value, func, arguments):
"""Check return values for errors and warnings.
TODO: THIS IS JUST COPIED PASTED FROM NIVisaLibrary.
Needs to be adapted.
"""
logger.debug('%s%s -> %r',
func.__name__, _args_to_str(arguments), ret_value,
extra=self._logging_extra)
try:
ret_value = StatusCode(ret_value)
except ValueError:
pass
self._last_status = ret_value
# The first argument of almost all registered visa functions is a session.
# We store the error code per session
session = None
if func.__name__ not in ('viFindNext', ):
try:
session = arguments[0]
except KeyError:
raise Exception('Function %r does not seem to be a valid '
'visa function (len args %d)' % (func, len(arguments)))
# Functions that use the first parameter to get a session value.
if func.__name__ in ('viOpenDefaultRM', ):
# noinspection PyProtectedMember
session = session._obj.value
if isinstance(session, integer_types):
self._last_status_in_session[session] = ret_value
else:
# Functions that might or might have a session in the first argument.
if func.__name__ not in ('viClose', 'viGetAttribute', 'viSetAttribute', 'viStatusDesc'):
raise Exception('Function %r does not seem to be a valid '
'visa function (type args[0] %r)' % (func, type(session)))
if ret_value < 0:
raise errors.VisaIOError(ret_value)
if ret_value in self.issue_warning_on:
if session and ret_value not in self._ignore_warning_in_session[session]:
warnings.warn(errors.VisaIOWarning(ret_value), stacklevel=2)
return ret_value
# noinspection PyShadowingBuiltins
def open(self, session, resource_name,
access_mode=constants.AccessModes.no_lock,
open_timeout=constants.VI_TMO_IMMEDIATE):
"""Opens a session to the specified resource.
Corresponds to viOpen function of the VISA library.
:param session: Resource Manager session (should always be a session returned from open_default_resource_manager()).
:param resource_name: Unique symbolic name of a resource.
:param access_mode: Specifies the mode by which the resource is to be accessed. (constants.AccessModes)
:param open_timeout: Specifies the maximum time period (in milliseconds) that this operation waits
before returning an error.
:return: Unique logical identifier reference to a session, return value of the library call.
:rtype: session, VISAStatus
"""
try:
open_timeout = int(open_timeout)
except ValueError:
raise ValueError('open_timeout (%r) must be an integer (or compatible type)' % open_timeout)
try:
parsed = rname.parse_resource_name(resource_name)
except rname.InvalidResourceName:
return 0, StatusCode.error_invalid_resource_name
cls = sessions.Session.get_session_class(parsed.interface_type_const, parsed.resource_class)
sess = cls(session, resource_name, parsed, open_timeout)
return self._register(sess), StatusCode.success
def clear(self, session):
"""Clears a device.
Corresponds to viClear function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.clear()
def gpib_command(self, session, command_byte):
"""Write GPIB command byte on the bus.
Corresponds to viGpibCommand function of the VISA library.
See: https://linux-gpib.sourceforge.io/doc_html/gpib-protocol.html#REFERENCE-COMMAND-BYTES
:param command_byte: command byte to send
:type command_byte: int, must be [0 255]
:return: return value of the library call
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].gpib_command(command_byte)
except KeyError:
return constants.StatusCode.error_invalid_object
def assert_trigger(self, session, protocol):
"""Asserts software or hardware trigger.
Corresponds to viAssertTrigger function of the VISA library.
:param session: Unique logical identifier to a session.
:param protocol: Trigger protocol to use during assertion. (Constants.PROT*)
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].assert_trigger(protocol)
except KeyError:
return constants.StatusCode.error_invalid_object
def gpib_send_ifc(self, session):
"""Pulse the interface clear line (IFC) for at least 100 microseconds.
Corresponds to viGpibSendIFC function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.gpib_send_ifc()
def read_stb(self, session):
"""Reads a status byte of the service request.
Corresponds to viReadSTB function of the VISA library.
:param session: Unique logical identifier to a session.
:return: Service request status byte, return value of the library call.
:rtype: int, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return 0, constants.StatusCode.error_invalid_object
return sess.read_stb()
def close(self, session):
"""Closes the specified session, event, or find list.
Corresponds to viClose function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
if sess is not self:
sess.close()
except KeyError:
return StatusCode.error_invalid_object
def open_default_resource_manager(self):
"""This function returns a session to the Default Resource Manager resource.
Corresponds to viOpenDefaultRM function of the VISA library.
:return: Unique logical identifier to a Default Resource Manager session, return value of the library call.
:rtype: session, VISAStatus
"""
return self._register(self), StatusCode.success
def list_resources(self, session, query='?*::INSTR'):
"""Returns a tuple of all connected devices matching query.
:param query: regular expression used to match devices.
"""
# For each session type, ask for the list of connected resources and
# merge them into a single list.
resources = sum([st.list_resources()
for key, st in sessions.Session.iter_valid_session_classes()], [])
resources = rname.filter(resources, query)
return resources
def write(self, session, data):
"""Writes data to device or interface synchronously.
Corresponds to viWrite function of the VISA library.
:param session: Unique logical identifier to a session.
:param data: data to be written.
:type data: str
:return: Number of bytes actually transferred, return value of the library call.
:rtype: int, VISAStatus
"""
# from the session handle, dispatch to the write method of the session object.
try:
ret = self.sessions[session].write(data)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def get_attribute(self, session, attribute):
"""Retrieves the state of an attribute.
Corresponds to viGetAttribute function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:param attribute: Resource attribute for which the state query is made (see Attributes.*)
:return: The state of the queried attribute for a specified resource, return value of the library call.
:rtype: unicode | str | list | int, VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return None, StatusCode.error_invalid_object
return sess.get_attribute(attribute)
def set_attribute(self, session, attribute, attribute_state):
"""Sets the state of an attribute.
Corresponds to viSetAttribute function of the VISA library.
:param session: Unique logical identifier to a session.
:param attribute: Attribute for which the state is to be modified. (Attributes.*)
:param attribute_state: The state of the attribute to be set for the specified object.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.set_attribute(attribute, attribute_state)
def lock(self, session, lock_type, timeout, requested_key=None):
"""Establishes an access mode to the specified resources.
Corresponds to viLock function of the VISA library.
:param session: Unique logical identifier to a session.
:param lock_type: Specifies the type of lock requested, either Constants.EXCLUSIVE_LOCK or Constants.SHARED_LOCK.
:param timeout: Absolute time period (in milliseconds) that a resource waits to get unlocked by the
locking session before returning an error.
:param requested_key: This parameter is not used and should be set to VI_NULL when lockType is VI_EXCLUSIVE_LOCK.
:return: access_key that can then be passed to other sessions to share the lock, return value of the library call.
:rtype: str, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.lock(lock_type, timeout, requested_key)
def unlock(self, session):
"""Relinquishes a lock for the specified resource.
Corresponds to viUnlock function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.unlock()
def disable_event(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
def discard_events(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
|
pyvisa/pyvisa-py | pyvisa-py/highlevel.py | PyVisaLibrary.write | python | def write(self, session, data):
# from the session handle, dispatch to the write method of the session object.
try:
ret = self.sessions[session].write(data)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret | Writes data to device or interface synchronously.
Corresponds to viWrite function of the VISA library.
:param session: Unique logical identifier to a session.
:param data: data to be written.
:type data: str
:return: Number of bytes actually transferred, return value of the library call.
:rtype: int, VISAStatus | train | https://github.com/pyvisa/pyvisa-py/blob/dfbd509409675b59d71bb741cd72c5f256efd4cd/pyvisa-py/highlevel.py#L338-L359 | null | class PyVisaLibrary(highlevel.VisaLibraryBase):
"""A pure Python backend for PyVISA.
The object is basically a dispatcher with some common functions implemented.
When a new resource object is requested to pyvisa, the library creates a Session object
(that knows how to perform low-level communication operations) associated with a session handle
(a number, usually refered just as session).
A call to a library function is handled by PyVisaLibrary if it involves a resource agnosting
function or dispatched to the correct session object (obtained from the session id).
Importantly, the user is unaware of this. PyVisaLibrary behaves for the user just as NIVisaLibrary.
"""
# Try to import packages implementing lower level functionality.
try:
from .serial import SerialSession
logger.debug('SerialSession was correctly imported.')
except Exception as e:
logger.debug('SerialSession was not imported %s.' % e)
try:
from .usb import USBSession, USBRawSession
logger.debug('USBSession and USBRawSession were correctly imported.')
except Exception as e:
logger.debug('USBSession and USBRawSession were not imported %s.' % e)
try:
from .tcpip import TCPIPInstrSession, TCPIPSocketSession
logger.debug('TCPIPSession was correctly imported.')
except Exception as e:
logger.debug('TCPIPSession was not imported %s.' % e)
try:
from .gpib import GPIBSession
logger.debug('GPIBSession was correctly imported.')
except Exception as e:
logger.debug('GPIBSession was not imported %s.' % e)
@classmethod
def get_session_classes(cls):
return sessions.Session._session_classes
@classmethod
def iter_session_classes_issues(cls):
return sessions.Session.iter_session_classes_issues()
@staticmethod
def get_debug_info():
"""Return a list of lines with backend info.
"""
from . import __version__
d = OrderedDict()
d['Version'] = '%s' % __version__
for key, val in PyVisaLibrary.get_session_classes().items():
key_name = '%s %s' % (key[0].name.upper(), key[1])
try:
d[key_name] = getattr(val, 'session_issue').split('\n')
except AttributeError:
d[key_name] = 'Available ' + val.get_low_level_info()
return d
def _init(self):
#: map session handle to session object.
#: dict[int, session.Session]
self.sessions = {}
def _register(self, obj):
"""Creates a random but unique session handle for a session object,
register it in the sessions dictionary and return the value
:param obj: a session object.
:return: session handle
:rtype: int
"""
session = None
while session is None or session in self.sessions:
session = random.randint(1000000, 9999999)
self.sessions[session] = obj
return session
def _return_handler(self, ret_value, func, arguments):
"""Check return values for errors and warnings.
TODO: THIS IS JUST COPIED PASTED FROM NIVisaLibrary.
Needs to be adapted.
"""
logger.debug('%s%s -> %r',
func.__name__, _args_to_str(arguments), ret_value,
extra=self._logging_extra)
try:
ret_value = StatusCode(ret_value)
except ValueError:
pass
self._last_status = ret_value
# The first argument of almost all registered visa functions is a session.
# We store the error code per session
session = None
if func.__name__ not in ('viFindNext', ):
try:
session = arguments[0]
except KeyError:
raise Exception('Function %r does not seem to be a valid '
'visa function (len args %d)' % (func, len(arguments)))
# Functions that use the first parameter to get a session value.
if func.__name__ in ('viOpenDefaultRM', ):
# noinspection PyProtectedMember
session = session._obj.value
if isinstance(session, integer_types):
self._last_status_in_session[session] = ret_value
else:
# Functions that might or might have a session in the first argument.
if func.__name__ not in ('viClose', 'viGetAttribute', 'viSetAttribute', 'viStatusDesc'):
raise Exception('Function %r does not seem to be a valid '
'visa function (type args[0] %r)' % (func, type(session)))
if ret_value < 0:
raise errors.VisaIOError(ret_value)
if ret_value in self.issue_warning_on:
if session and ret_value not in self._ignore_warning_in_session[session]:
warnings.warn(errors.VisaIOWarning(ret_value), stacklevel=2)
return ret_value
# noinspection PyShadowingBuiltins
def open(self, session, resource_name,
access_mode=constants.AccessModes.no_lock,
open_timeout=constants.VI_TMO_IMMEDIATE):
"""Opens a session to the specified resource.
Corresponds to viOpen function of the VISA library.
:param session: Resource Manager session (should always be a session returned from open_default_resource_manager()).
:param resource_name: Unique symbolic name of a resource.
:param access_mode: Specifies the mode by which the resource is to be accessed. (constants.AccessModes)
:param open_timeout: Specifies the maximum time period (in milliseconds) that this operation waits
before returning an error.
:return: Unique logical identifier reference to a session, return value of the library call.
:rtype: session, VISAStatus
"""
try:
open_timeout = int(open_timeout)
except ValueError:
raise ValueError('open_timeout (%r) must be an integer (or compatible type)' % open_timeout)
try:
parsed = rname.parse_resource_name(resource_name)
except rname.InvalidResourceName:
return 0, StatusCode.error_invalid_resource_name
cls = sessions.Session.get_session_class(parsed.interface_type_const, parsed.resource_class)
sess = cls(session, resource_name, parsed, open_timeout)
return self._register(sess), StatusCode.success
def clear(self, session):
"""Clears a device.
Corresponds to viClear function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.clear()
def gpib_command(self, session, command_byte):
"""Write GPIB command byte on the bus.
Corresponds to viGpibCommand function of the VISA library.
See: https://linux-gpib.sourceforge.io/doc_html/gpib-protocol.html#REFERENCE-COMMAND-BYTES
:param command_byte: command byte to send
:type command_byte: int, must be [0 255]
:return: return value of the library call
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].gpib_command(command_byte)
except KeyError:
return constants.StatusCode.error_invalid_object
def assert_trigger(self, session, protocol):
"""Asserts software or hardware trigger.
Corresponds to viAssertTrigger function of the VISA library.
:param session: Unique logical identifier to a session.
:param protocol: Trigger protocol to use during assertion. (Constants.PROT*)
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].assert_trigger(protocol)
except KeyError:
return constants.StatusCode.error_invalid_object
def gpib_send_ifc(self, session):
"""Pulse the interface clear line (IFC) for at least 100 microseconds.
Corresponds to viGpibSendIFC function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.gpib_send_ifc()
def read_stb(self, session):
"""Reads a status byte of the service request.
Corresponds to viReadSTB function of the VISA library.
:param session: Unique logical identifier to a session.
:return: Service request status byte, return value of the library call.
:rtype: int, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return 0, constants.StatusCode.error_invalid_object
return sess.read_stb()
def close(self, session):
"""Closes the specified session, event, or find list.
Corresponds to viClose function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
if sess is not self:
sess.close()
except KeyError:
return StatusCode.error_invalid_object
def open_default_resource_manager(self):
"""This function returns a session to the Default Resource Manager resource.
Corresponds to viOpenDefaultRM function of the VISA library.
:return: Unique logical identifier to a Default Resource Manager session, return value of the library call.
:rtype: session, VISAStatus
"""
return self._register(self), StatusCode.success
def list_resources(self, session, query='?*::INSTR'):
"""Returns a tuple of all connected devices matching query.
:param query: regular expression used to match devices.
"""
# For each session type, ask for the list of connected resources and
# merge them into a single list.
resources = sum([st.list_resources()
for key, st in sessions.Session.iter_valid_session_classes()], [])
resources = rname.filter(resources, query)
return resources
def read(self, session, count):
"""Reads data from device or interface synchronously.
Corresponds to viRead function of the VISA library.
:param session: Unique logical identifier to a session.
:param count: Number of bytes to be read.
:return: data read, return value of the library call.
:rtype: bytes, VISAStatus
"""
# from the session handle, dispatch to the read method of the session object.
try:
ret = self.sessions[session].read(count)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def get_attribute(self, session, attribute):
"""Retrieves the state of an attribute.
Corresponds to viGetAttribute function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:param attribute: Resource attribute for which the state query is made (see Attributes.*)
:return: The state of the queried attribute for a specified resource, return value of the library call.
:rtype: unicode | str | list | int, VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return None, StatusCode.error_invalid_object
return sess.get_attribute(attribute)
def set_attribute(self, session, attribute, attribute_state):
"""Sets the state of an attribute.
Corresponds to viSetAttribute function of the VISA library.
:param session: Unique logical identifier to a session.
:param attribute: Attribute for which the state is to be modified. (Attributes.*)
:param attribute_state: The state of the attribute to be set for the specified object.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.set_attribute(attribute, attribute_state)
def lock(self, session, lock_type, timeout, requested_key=None):
"""Establishes an access mode to the specified resources.
Corresponds to viLock function of the VISA library.
:param session: Unique logical identifier to a session.
:param lock_type: Specifies the type of lock requested, either Constants.EXCLUSIVE_LOCK or Constants.SHARED_LOCK.
:param timeout: Absolute time period (in milliseconds) that a resource waits to get unlocked by the
locking session before returning an error.
:param requested_key: This parameter is not used and should be set to VI_NULL when lockType is VI_EXCLUSIVE_LOCK.
:return: access_key that can then be passed to other sessions to share the lock, return value of the library call.
:rtype: str, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.lock(lock_type, timeout, requested_key)
def unlock(self, session):
"""Relinquishes a lock for the specified resource.
Corresponds to viUnlock function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.unlock()
def disable_event(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
def discard_events(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
|
pyvisa/pyvisa-py | pyvisa-py/highlevel.py | PyVisaLibrary.get_attribute | python | def get_attribute(self, session, attribute):
try:
sess = self.sessions[session]
except KeyError:
return None, StatusCode.error_invalid_object
return sess.get_attribute(attribute) | Retrieves the state of an attribute.
Corresponds to viGetAttribute function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:param attribute: Resource attribute for which the state query is made (see Attributes.*)
:return: The state of the queried attribute for a specified resource, return value of the library call.
:rtype: unicode | str | list | int, VISAStatus | train | https://github.com/pyvisa/pyvisa-py/blob/dfbd509409675b59d71bb741cd72c5f256efd4cd/pyvisa-py/highlevel.py#L361-L376 | null | class PyVisaLibrary(highlevel.VisaLibraryBase):
"""A pure Python backend for PyVISA.
The object is basically a dispatcher with some common functions implemented.
When a new resource object is requested to pyvisa, the library creates a Session object
(that knows how to perform low-level communication operations) associated with a session handle
(a number, usually refered just as session).
A call to a library function is handled by PyVisaLibrary if it involves a resource agnosting
function or dispatched to the correct session object (obtained from the session id).
Importantly, the user is unaware of this. PyVisaLibrary behaves for the user just as NIVisaLibrary.
"""
# Try to import packages implementing lower level functionality.
try:
from .serial import SerialSession
logger.debug('SerialSession was correctly imported.')
except Exception as e:
logger.debug('SerialSession was not imported %s.' % e)
try:
from .usb import USBSession, USBRawSession
logger.debug('USBSession and USBRawSession were correctly imported.')
except Exception as e:
logger.debug('USBSession and USBRawSession were not imported %s.' % e)
try:
from .tcpip import TCPIPInstrSession, TCPIPSocketSession
logger.debug('TCPIPSession was correctly imported.')
except Exception as e:
logger.debug('TCPIPSession was not imported %s.' % e)
try:
from .gpib import GPIBSession
logger.debug('GPIBSession was correctly imported.')
except Exception as e:
logger.debug('GPIBSession was not imported %s.' % e)
@classmethod
def get_session_classes(cls):
return sessions.Session._session_classes
@classmethod
def iter_session_classes_issues(cls):
return sessions.Session.iter_session_classes_issues()
@staticmethod
def get_debug_info():
"""Return a list of lines with backend info.
"""
from . import __version__
d = OrderedDict()
d['Version'] = '%s' % __version__
for key, val in PyVisaLibrary.get_session_classes().items():
key_name = '%s %s' % (key[0].name.upper(), key[1])
try:
d[key_name] = getattr(val, 'session_issue').split('\n')
except AttributeError:
d[key_name] = 'Available ' + val.get_low_level_info()
return d
def _init(self):
#: map session handle to session object.
#: dict[int, session.Session]
self.sessions = {}
def _register(self, obj):
"""Creates a random but unique session handle for a session object,
register it in the sessions dictionary and return the value
:param obj: a session object.
:return: session handle
:rtype: int
"""
session = None
while session is None or session in self.sessions:
session = random.randint(1000000, 9999999)
self.sessions[session] = obj
return session
def _return_handler(self, ret_value, func, arguments):
"""Check return values for errors and warnings.
TODO: THIS IS JUST COPIED PASTED FROM NIVisaLibrary.
Needs to be adapted.
"""
logger.debug('%s%s -> %r',
func.__name__, _args_to_str(arguments), ret_value,
extra=self._logging_extra)
try:
ret_value = StatusCode(ret_value)
except ValueError:
pass
self._last_status = ret_value
# The first argument of almost all registered visa functions is a session.
# We store the error code per session
session = None
if func.__name__ not in ('viFindNext', ):
try:
session = arguments[0]
except KeyError:
raise Exception('Function %r does not seem to be a valid '
'visa function (len args %d)' % (func, len(arguments)))
# Functions that use the first parameter to get a session value.
if func.__name__ in ('viOpenDefaultRM', ):
# noinspection PyProtectedMember
session = session._obj.value
if isinstance(session, integer_types):
self._last_status_in_session[session] = ret_value
else:
# Functions that might or might have a session in the first argument.
if func.__name__ not in ('viClose', 'viGetAttribute', 'viSetAttribute', 'viStatusDesc'):
raise Exception('Function %r does not seem to be a valid '
'visa function (type args[0] %r)' % (func, type(session)))
if ret_value < 0:
raise errors.VisaIOError(ret_value)
if ret_value in self.issue_warning_on:
if session and ret_value not in self._ignore_warning_in_session[session]:
warnings.warn(errors.VisaIOWarning(ret_value), stacklevel=2)
return ret_value
# noinspection PyShadowingBuiltins
def open(self, session, resource_name,
access_mode=constants.AccessModes.no_lock,
open_timeout=constants.VI_TMO_IMMEDIATE):
"""Opens a session to the specified resource.
Corresponds to viOpen function of the VISA library.
:param session: Resource Manager session (should always be a session returned from open_default_resource_manager()).
:param resource_name: Unique symbolic name of a resource.
:param access_mode: Specifies the mode by which the resource is to be accessed. (constants.AccessModes)
:param open_timeout: Specifies the maximum time period (in milliseconds) that this operation waits
before returning an error.
:return: Unique logical identifier reference to a session, return value of the library call.
:rtype: session, VISAStatus
"""
try:
open_timeout = int(open_timeout)
except ValueError:
raise ValueError('open_timeout (%r) must be an integer (or compatible type)' % open_timeout)
try:
parsed = rname.parse_resource_name(resource_name)
except rname.InvalidResourceName:
return 0, StatusCode.error_invalid_resource_name
cls = sessions.Session.get_session_class(parsed.interface_type_const, parsed.resource_class)
sess = cls(session, resource_name, parsed, open_timeout)
return self._register(sess), StatusCode.success
def clear(self, session):
"""Clears a device.
Corresponds to viClear function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.clear()
def gpib_command(self, session, command_byte):
"""Write GPIB command byte on the bus.
Corresponds to viGpibCommand function of the VISA library.
See: https://linux-gpib.sourceforge.io/doc_html/gpib-protocol.html#REFERENCE-COMMAND-BYTES
:param command_byte: command byte to send
:type command_byte: int, must be [0 255]
:return: return value of the library call
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].gpib_command(command_byte)
except KeyError:
return constants.StatusCode.error_invalid_object
def assert_trigger(self, session, protocol):
"""Asserts software or hardware trigger.
Corresponds to viAssertTrigger function of the VISA library.
:param session: Unique logical identifier to a session.
:param protocol: Trigger protocol to use during assertion. (Constants.PROT*)
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].assert_trigger(protocol)
except KeyError:
return constants.StatusCode.error_invalid_object
def gpib_send_ifc(self, session):
"""Pulse the interface clear line (IFC) for at least 100 microseconds.
Corresponds to viGpibSendIFC function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.gpib_send_ifc()
def read_stb(self, session):
"""Reads a status byte of the service request.
Corresponds to viReadSTB function of the VISA library.
:param session: Unique logical identifier to a session.
:return: Service request status byte, return value of the library call.
:rtype: int, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return 0, constants.StatusCode.error_invalid_object
return sess.read_stb()
def close(self, session):
"""Closes the specified session, event, or find list.
Corresponds to viClose function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
if sess is not self:
sess.close()
except KeyError:
return StatusCode.error_invalid_object
def open_default_resource_manager(self):
"""This function returns a session to the Default Resource Manager resource.
Corresponds to viOpenDefaultRM function of the VISA library.
:return: Unique logical identifier to a Default Resource Manager session, return value of the library call.
:rtype: session, VISAStatus
"""
return self._register(self), StatusCode.success
def list_resources(self, session, query='?*::INSTR'):
"""Returns a tuple of all connected devices matching query.
:param query: regular expression used to match devices.
"""
# For each session type, ask for the list of connected resources and
# merge them into a single list.
resources = sum([st.list_resources()
for key, st in sessions.Session.iter_valid_session_classes()], [])
resources = rname.filter(resources, query)
return resources
def read(self, session, count):
"""Reads data from device or interface synchronously.
Corresponds to viRead function of the VISA library.
:param session: Unique logical identifier to a session.
:param count: Number of bytes to be read.
:return: data read, return value of the library call.
:rtype: bytes, VISAStatus
"""
# from the session handle, dispatch to the read method of the session object.
try:
ret = self.sessions[session].read(count)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def write(self, session, data):
"""Writes data to device or interface synchronously.
Corresponds to viWrite function of the VISA library.
:param session: Unique logical identifier to a session.
:param data: data to be written.
:type data: str
:return: Number of bytes actually transferred, return value of the library call.
:rtype: int, VISAStatus
"""
# from the session handle, dispatch to the write method of the session object.
try:
ret = self.sessions[session].write(data)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def set_attribute(self, session, attribute, attribute_state):
"""Sets the state of an attribute.
Corresponds to viSetAttribute function of the VISA library.
:param session: Unique logical identifier to a session.
:param attribute: Attribute for which the state is to be modified. (Attributes.*)
:param attribute_state: The state of the attribute to be set for the specified object.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.set_attribute(attribute, attribute_state)
def lock(self, session, lock_type, timeout, requested_key=None):
"""Establishes an access mode to the specified resources.
Corresponds to viLock function of the VISA library.
:param session: Unique logical identifier to a session.
:param lock_type: Specifies the type of lock requested, either Constants.EXCLUSIVE_LOCK or Constants.SHARED_LOCK.
:param timeout: Absolute time period (in milliseconds) that a resource waits to get unlocked by the
locking session before returning an error.
:param requested_key: This parameter is not used and should be set to VI_NULL when lockType is VI_EXCLUSIVE_LOCK.
:return: access_key that can then be passed to other sessions to share the lock, return value of the library call.
:rtype: str, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.lock(lock_type, timeout, requested_key)
def unlock(self, session):
"""Relinquishes a lock for the specified resource.
Corresponds to viUnlock function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.unlock()
def disable_event(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
def discard_events(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
|
pyvisa/pyvisa-py | pyvisa-py/highlevel.py | PyVisaLibrary.set_attribute | python | def set_attribute(self, session, attribute, attribute_state):
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.set_attribute(attribute, attribute_state) | Sets the state of an attribute.
Corresponds to viSetAttribute function of the VISA library.
:param session: Unique logical identifier to a session.
:param attribute: Attribute for which the state is to be modified. (Attributes.*)
:param attribute_state: The state of the attribute to be set for the specified object.
:return: return value of the library call.
:rtype: VISAStatus | train | https://github.com/pyvisa/pyvisa-py/blob/dfbd509409675b59d71bb741cd72c5f256efd4cd/pyvisa-py/highlevel.py#L378-L395 | null | class PyVisaLibrary(highlevel.VisaLibraryBase):
"""A pure Python backend for PyVISA.
The object is basically a dispatcher with some common functions implemented.
When a new resource object is requested to pyvisa, the library creates a Session object
(that knows how to perform low-level communication operations) associated with a session handle
(a number, usually refered just as session).
A call to a library function is handled by PyVisaLibrary if it involves a resource agnosting
function or dispatched to the correct session object (obtained from the session id).
Importantly, the user is unaware of this. PyVisaLibrary behaves for the user just as NIVisaLibrary.
"""
# Try to import packages implementing lower level functionality.
try:
from .serial import SerialSession
logger.debug('SerialSession was correctly imported.')
except Exception as e:
logger.debug('SerialSession was not imported %s.' % e)
try:
from .usb import USBSession, USBRawSession
logger.debug('USBSession and USBRawSession were correctly imported.')
except Exception as e:
logger.debug('USBSession and USBRawSession were not imported %s.' % e)
try:
from .tcpip import TCPIPInstrSession, TCPIPSocketSession
logger.debug('TCPIPSession was correctly imported.')
except Exception as e:
logger.debug('TCPIPSession was not imported %s.' % e)
try:
from .gpib import GPIBSession
logger.debug('GPIBSession was correctly imported.')
except Exception as e:
logger.debug('GPIBSession was not imported %s.' % e)
@classmethod
def get_session_classes(cls):
return sessions.Session._session_classes
@classmethod
def iter_session_classes_issues(cls):
return sessions.Session.iter_session_classes_issues()
@staticmethod
def get_debug_info():
"""Return a list of lines with backend info.
"""
from . import __version__
d = OrderedDict()
d['Version'] = '%s' % __version__
for key, val in PyVisaLibrary.get_session_classes().items():
key_name = '%s %s' % (key[0].name.upper(), key[1])
try:
d[key_name] = getattr(val, 'session_issue').split('\n')
except AttributeError:
d[key_name] = 'Available ' + val.get_low_level_info()
return d
def _init(self):
#: map session handle to session object.
#: dict[int, session.Session]
self.sessions = {}
def _register(self, obj):
"""Creates a random but unique session handle for a session object,
register it in the sessions dictionary and return the value
:param obj: a session object.
:return: session handle
:rtype: int
"""
session = None
while session is None or session in self.sessions:
session = random.randint(1000000, 9999999)
self.sessions[session] = obj
return session
def _return_handler(self, ret_value, func, arguments):
"""Check return values for errors and warnings.
TODO: THIS IS JUST COPIED PASTED FROM NIVisaLibrary.
Needs to be adapted.
"""
logger.debug('%s%s -> %r',
func.__name__, _args_to_str(arguments), ret_value,
extra=self._logging_extra)
try:
ret_value = StatusCode(ret_value)
except ValueError:
pass
self._last_status = ret_value
# The first argument of almost all registered visa functions is a session.
# We store the error code per session
session = None
if func.__name__ not in ('viFindNext', ):
try:
session = arguments[0]
except KeyError:
raise Exception('Function %r does not seem to be a valid '
'visa function (len args %d)' % (func, len(arguments)))
# Functions that use the first parameter to get a session value.
if func.__name__ in ('viOpenDefaultRM', ):
# noinspection PyProtectedMember
session = session._obj.value
if isinstance(session, integer_types):
self._last_status_in_session[session] = ret_value
else:
# Functions that might or might have a session in the first argument.
if func.__name__ not in ('viClose', 'viGetAttribute', 'viSetAttribute', 'viStatusDesc'):
raise Exception('Function %r does not seem to be a valid '
'visa function (type args[0] %r)' % (func, type(session)))
if ret_value < 0:
raise errors.VisaIOError(ret_value)
if ret_value in self.issue_warning_on:
if session and ret_value not in self._ignore_warning_in_session[session]:
warnings.warn(errors.VisaIOWarning(ret_value), stacklevel=2)
return ret_value
# noinspection PyShadowingBuiltins
def open(self, session, resource_name,
access_mode=constants.AccessModes.no_lock,
open_timeout=constants.VI_TMO_IMMEDIATE):
"""Opens a session to the specified resource.
Corresponds to viOpen function of the VISA library.
:param session: Resource Manager session (should always be a session returned from open_default_resource_manager()).
:param resource_name: Unique symbolic name of a resource.
:param access_mode: Specifies the mode by which the resource is to be accessed. (constants.AccessModes)
:param open_timeout: Specifies the maximum time period (in milliseconds) that this operation waits
before returning an error.
:return: Unique logical identifier reference to a session, return value of the library call.
:rtype: session, VISAStatus
"""
try:
open_timeout = int(open_timeout)
except ValueError:
raise ValueError('open_timeout (%r) must be an integer (or compatible type)' % open_timeout)
try:
parsed = rname.parse_resource_name(resource_name)
except rname.InvalidResourceName:
return 0, StatusCode.error_invalid_resource_name
cls = sessions.Session.get_session_class(parsed.interface_type_const, parsed.resource_class)
sess = cls(session, resource_name, parsed, open_timeout)
return self._register(sess), StatusCode.success
def clear(self, session):
"""Clears a device.
Corresponds to viClear function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.clear()
def gpib_command(self, session, command_byte):
"""Write GPIB command byte on the bus.
Corresponds to viGpibCommand function of the VISA library.
See: https://linux-gpib.sourceforge.io/doc_html/gpib-protocol.html#REFERENCE-COMMAND-BYTES
:param command_byte: command byte to send
:type command_byte: int, must be [0 255]
:return: return value of the library call
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].gpib_command(command_byte)
except KeyError:
return constants.StatusCode.error_invalid_object
def assert_trigger(self, session, protocol):
"""Asserts software or hardware trigger.
Corresponds to viAssertTrigger function of the VISA library.
:param session: Unique logical identifier to a session.
:param protocol: Trigger protocol to use during assertion. (Constants.PROT*)
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].assert_trigger(protocol)
except KeyError:
return constants.StatusCode.error_invalid_object
def gpib_send_ifc(self, session):
"""Pulse the interface clear line (IFC) for at least 100 microseconds.
Corresponds to viGpibSendIFC function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.gpib_send_ifc()
def read_stb(self, session):
"""Reads a status byte of the service request.
Corresponds to viReadSTB function of the VISA library.
:param session: Unique logical identifier to a session.
:return: Service request status byte, return value of the library call.
:rtype: int, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return 0, constants.StatusCode.error_invalid_object
return sess.read_stb()
def close(self, session):
"""Closes the specified session, event, or find list.
Corresponds to viClose function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
if sess is not self:
sess.close()
except KeyError:
return StatusCode.error_invalid_object
def open_default_resource_manager(self):
"""This function returns a session to the Default Resource Manager resource.
Corresponds to viOpenDefaultRM function of the VISA library.
:return: Unique logical identifier to a Default Resource Manager session, return value of the library call.
:rtype: session, VISAStatus
"""
return self._register(self), StatusCode.success
def list_resources(self, session, query='?*::INSTR'):
"""Returns a tuple of all connected devices matching query.
:param query: regular expression used to match devices.
"""
# For each session type, ask for the list of connected resources and
# merge them into a single list.
resources = sum([st.list_resources()
for key, st in sessions.Session.iter_valid_session_classes()], [])
resources = rname.filter(resources, query)
return resources
def read(self, session, count):
"""Reads data from device or interface synchronously.
Corresponds to viRead function of the VISA library.
:param session: Unique logical identifier to a session.
:param count: Number of bytes to be read.
:return: data read, return value of the library call.
:rtype: bytes, VISAStatus
"""
# from the session handle, dispatch to the read method of the session object.
try:
ret = self.sessions[session].read(count)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def write(self, session, data):
"""Writes data to device or interface synchronously.
Corresponds to viWrite function of the VISA library.
:param session: Unique logical identifier to a session.
:param data: data to be written.
:type data: str
:return: Number of bytes actually transferred, return value of the library call.
:rtype: int, VISAStatus
"""
# from the session handle, dispatch to the write method of the session object.
try:
ret = self.sessions[session].write(data)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def get_attribute(self, session, attribute):
"""Retrieves the state of an attribute.
Corresponds to viGetAttribute function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:param attribute: Resource attribute for which the state query is made (see Attributes.*)
:return: The state of the queried attribute for a specified resource, return value of the library call.
:rtype: unicode | str | list | int, VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return None, StatusCode.error_invalid_object
return sess.get_attribute(attribute)
def lock(self, session, lock_type, timeout, requested_key=None):
"""Establishes an access mode to the specified resources.
Corresponds to viLock function of the VISA library.
:param session: Unique logical identifier to a session.
:param lock_type: Specifies the type of lock requested, either Constants.EXCLUSIVE_LOCK or Constants.SHARED_LOCK.
:param timeout: Absolute time period (in milliseconds) that a resource waits to get unlocked by the
locking session before returning an error.
:param requested_key: This parameter is not used and should be set to VI_NULL when lockType is VI_EXCLUSIVE_LOCK.
:return: access_key that can then be passed to other sessions to share the lock, return value of the library call.
:rtype: str, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.lock(lock_type, timeout, requested_key)
def unlock(self, session):
"""Relinquishes a lock for the specified resource.
Corresponds to viUnlock function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.unlock()
def disable_event(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
def discard_events(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
|
pyvisa/pyvisa-py | pyvisa-py/highlevel.py | PyVisaLibrary.lock | python | def lock(self, session, lock_type, timeout, requested_key=None):
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.lock(lock_type, timeout, requested_key) | Establishes an access mode to the specified resources.
Corresponds to viLock function of the VISA library.
:param session: Unique logical identifier to a session.
:param lock_type: Specifies the type of lock requested, either Constants.EXCLUSIVE_LOCK or Constants.SHARED_LOCK.
:param timeout: Absolute time period (in milliseconds) that a resource waits to get unlocked by the
locking session before returning an error.
:param requested_key: This parameter is not used and should be set to VI_NULL when lockType is VI_EXCLUSIVE_LOCK.
:return: access_key that can then be passed to other sessions to share the lock, return value of the library call.
:rtype: str, :class:`pyvisa.constants.StatusCode` | train | https://github.com/pyvisa/pyvisa-py/blob/dfbd509409675b59d71bb741cd72c5f256efd4cd/pyvisa-py/highlevel.py#L397-L415 | null | class PyVisaLibrary(highlevel.VisaLibraryBase):
"""A pure Python backend for PyVISA.
The object is basically a dispatcher with some common functions implemented.
When a new resource object is requested to pyvisa, the library creates a Session object
(that knows how to perform low-level communication operations) associated with a session handle
(a number, usually refered just as session).
A call to a library function is handled by PyVisaLibrary if it involves a resource agnosting
function or dispatched to the correct session object (obtained from the session id).
Importantly, the user is unaware of this. PyVisaLibrary behaves for the user just as NIVisaLibrary.
"""
# Try to import packages implementing lower level functionality.
try:
from .serial import SerialSession
logger.debug('SerialSession was correctly imported.')
except Exception as e:
logger.debug('SerialSession was not imported %s.' % e)
try:
from .usb import USBSession, USBRawSession
logger.debug('USBSession and USBRawSession were correctly imported.')
except Exception as e:
logger.debug('USBSession and USBRawSession were not imported %s.' % e)
try:
from .tcpip import TCPIPInstrSession, TCPIPSocketSession
logger.debug('TCPIPSession was correctly imported.')
except Exception as e:
logger.debug('TCPIPSession was not imported %s.' % e)
try:
from .gpib import GPIBSession
logger.debug('GPIBSession was correctly imported.')
except Exception as e:
logger.debug('GPIBSession was not imported %s.' % e)
@classmethod
def get_session_classes(cls):
return sessions.Session._session_classes
@classmethod
def iter_session_classes_issues(cls):
return sessions.Session.iter_session_classes_issues()
@staticmethod
def get_debug_info():
"""Return a list of lines with backend info.
"""
from . import __version__
d = OrderedDict()
d['Version'] = '%s' % __version__
for key, val in PyVisaLibrary.get_session_classes().items():
key_name = '%s %s' % (key[0].name.upper(), key[1])
try:
d[key_name] = getattr(val, 'session_issue').split('\n')
except AttributeError:
d[key_name] = 'Available ' + val.get_low_level_info()
return d
def _init(self):
#: map session handle to session object.
#: dict[int, session.Session]
self.sessions = {}
def _register(self, obj):
"""Creates a random but unique session handle for a session object,
register it in the sessions dictionary and return the value
:param obj: a session object.
:return: session handle
:rtype: int
"""
session = None
while session is None or session in self.sessions:
session = random.randint(1000000, 9999999)
self.sessions[session] = obj
return session
def _return_handler(self, ret_value, func, arguments):
"""Check return values for errors and warnings.
TODO: THIS IS JUST COPIED PASTED FROM NIVisaLibrary.
Needs to be adapted.
"""
logger.debug('%s%s -> %r',
func.__name__, _args_to_str(arguments), ret_value,
extra=self._logging_extra)
try:
ret_value = StatusCode(ret_value)
except ValueError:
pass
self._last_status = ret_value
# The first argument of almost all registered visa functions is a session.
# We store the error code per session
session = None
if func.__name__ not in ('viFindNext', ):
try:
session = arguments[0]
except KeyError:
raise Exception('Function %r does not seem to be a valid '
'visa function (len args %d)' % (func, len(arguments)))
# Functions that use the first parameter to get a session value.
if func.__name__ in ('viOpenDefaultRM', ):
# noinspection PyProtectedMember
session = session._obj.value
if isinstance(session, integer_types):
self._last_status_in_session[session] = ret_value
else:
# Functions that might or might have a session in the first argument.
if func.__name__ not in ('viClose', 'viGetAttribute', 'viSetAttribute', 'viStatusDesc'):
raise Exception('Function %r does not seem to be a valid '
'visa function (type args[0] %r)' % (func, type(session)))
if ret_value < 0:
raise errors.VisaIOError(ret_value)
if ret_value in self.issue_warning_on:
if session and ret_value not in self._ignore_warning_in_session[session]:
warnings.warn(errors.VisaIOWarning(ret_value), stacklevel=2)
return ret_value
# noinspection PyShadowingBuiltins
def open(self, session, resource_name,
access_mode=constants.AccessModes.no_lock,
open_timeout=constants.VI_TMO_IMMEDIATE):
"""Opens a session to the specified resource.
Corresponds to viOpen function of the VISA library.
:param session: Resource Manager session (should always be a session returned from open_default_resource_manager()).
:param resource_name: Unique symbolic name of a resource.
:param access_mode: Specifies the mode by which the resource is to be accessed. (constants.AccessModes)
:param open_timeout: Specifies the maximum time period (in milliseconds) that this operation waits
before returning an error.
:return: Unique logical identifier reference to a session, return value of the library call.
:rtype: session, VISAStatus
"""
try:
open_timeout = int(open_timeout)
except ValueError:
raise ValueError('open_timeout (%r) must be an integer (or compatible type)' % open_timeout)
try:
parsed = rname.parse_resource_name(resource_name)
except rname.InvalidResourceName:
return 0, StatusCode.error_invalid_resource_name
cls = sessions.Session.get_session_class(parsed.interface_type_const, parsed.resource_class)
sess = cls(session, resource_name, parsed, open_timeout)
return self._register(sess), StatusCode.success
def clear(self, session):
"""Clears a device.
Corresponds to viClear function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.clear()
def gpib_command(self, session, command_byte):
"""Write GPIB command byte on the bus.
Corresponds to viGpibCommand function of the VISA library.
See: https://linux-gpib.sourceforge.io/doc_html/gpib-protocol.html#REFERENCE-COMMAND-BYTES
:param command_byte: command byte to send
:type command_byte: int, must be [0 255]
:return: return value of the library call
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].gpib_command(command_byte)
except KeyError:
return constants.StatusCode.error_invalid_object
def assert_trigger(self, session, protocol):
"""Asserts software or hardware trigger.
Corresponds to viAssertTrigger function of the VISA library.
:param session: Unique logical identifier to a session.
:param protocol: Trigger protocol to use during assertion. (Constants.PROT*)
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].assert_trigger(protocol)
except KeyError:
return constants.StatusCode.error_invalid_object
def gpib_send_ifc(self, session):
"""Pulse the interface clear line (IFC) for at least 100 microseconds.
Corresponds to viGpibSendIFC function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.gpib_send_ifc()
def read_stb(self, session):
"""Reads a status byte of the service request.
Corresponds to viReadSTB function of the VISA library.
:param session: Unique logical identifier to a session.
:return: Service request status byte, return value of the library call.
:rtype: int, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return 0, constants.StatusCode.error_invalid_object
return sess.read_stb()
def close(self, session):
"""Closes the specified session, event, or find list.
Corresponds to viClose function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
if sess is not self:
sess.close()
except KeyError:
return StatusCode.error_invalid_object
def open_default_resource_manager(self):
"""This function returns a session to the Default Resource Manager resource.
Corresponds to viOpenDefaultRM function of the VISA library.
:return: Unique logical identifier to a Default Resource Manager session, return value of the library call.
:rtype: session, VISAStatus
"""
return self._register(self), StatusCode.success
def list_resources(self, session, query='?*::INSTR'):
"""Returns a tuple of all connected devices matching query.
:param query: regular expression used to match devices.
"""
# For each session type, ask for the list of connected resources and
# merge them into a single list.
resources = sum([st.list_resources()
for key, st in sessions.Session.iter_valid_session_classes()], [])
resources = rname.filter(resources, query)
return resources
def read(self, session, count):
"""Reads data from device or interface synchronously.
Corresponds to viRead function of the VISA library.
:param session: Unique logical identifier to a session.
:param count: Number of bytes to be read.
:return: data read, return value of the library call.
:rtype: bytes, VISAStatus
"""
# from the session handle, dispatch to the read method of the session object.
try:
ret = self.sessions[session].read(count)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def write(self, session, data):
"""Writes data to device or interface synchronously.
Corresponds to viWrite function of the VISA library.
:param session: Unique logical identifier to a session.
:param data: data to be written.
:type data: str
:return: Number of bytes actually transferred, return value of the library call.
:rtype: int, VISAStatus
"""
# from the session handle, dispatch to the write method of the session object.
try:
ret = self.sessions[session].write(data)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def get_attribute(self, session, attribute):
"""Retrieves the state of an attribute.
Corresponds to viGetAttribute function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:param attribute: Resource attribute for which the state query is made (see Attributes.*)
:return: The state of the queried attribute for a specified resource, return value of the library call.
:rtype: unicode | str | list | int, VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return None, StatusCode.error_invalid_object
return sess.get_attribute(attribute)
def set_attribute(self, session, attribute, attribute_state):
"""Sets the state of an attribute.
Corresponds to viSetAttribute function of the VISA library.
:param session: Unique logical identifier to a session.
:param attribute: Attribute for which the state is to be modified. (Attributes.*)
:param attribute_state: The state of the attribute to be set for the specified object.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.set_attribute(attribute, attribute_state)
def unlock(self, session):
"""Relinquishes a lock for the specified resource.
Corresponds to viUnlock function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.unlock()
def disable_event(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
def discard_events(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
|
pyvisa/pyvisa-py | pyvisa-py/highlevel.py | PyVisaLibrary.unlock | python | def unlock(self, session):
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.unlock() | Relinquishes a lock for the specified resource.
Corresponds to viUnlock function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode` | train | https://github.com/pyvisa/pyvisa-py/blob/dfbd509409675b59d71bb741cd72c5f256efd4cd/pyvisa-py/highlevel.py#L417-L431 | null | class PyVisaLibrary(highlevel.VisaLibraryBase):
"""A pure Python backend for PyVISA.
The object is basically a dispatcher with some common functions implemented.
When a new resource object is requested to pyvisa, the library creates a Session object
(that knows how to perform low-level communication operations) associated with a session handle
(a number, usually refered just as session).
A call to a library function is handled by PyVisaLibrary if it involves a resource agnosting
function or dispatched to the correct session object (obtained from the session id).
Importantly, the user is unaware of this. PyVisaLibrary behaves for the user just as NIVisaLibrary.
"""
# Try to import packages implementing lower level functionality.
try:
from .serial import SerialSession
logger.debug('SerialSession was correctly imported.')
except Exception as e:
logger.debug('SerialSession was not imported %s.' % e)
try:
from .usb import USBSession, USBRawSession
logger.debug('USBSession and USBRawSession were correctly imported.')
except Exception as e:
logger.debug('USBSession and USBRawSession were not imported %s.' % e)
try:
from .tcpip import TCPIPInstrSession, TCPIPSocketSession
logger.debug('TCPIPSession was correctly imported.')
except Exception as e:
logger.debug('TCPIPSession was not imported %s.' % e)
try:
from .gpib import GPIBSession
logger.debug('GPIBSession was correctly imported.')
except Exception as e:
logger.debug('GPIBSession was not imported %s.' % e)
@classmethod
def get_session_classes(cls):
return sessions.Session._session_classes
@classmethod
def iter_session_classes_issues(cls):
return sessions.Session.iter_session_classes_issues()
@staticmethod
def get_debug_info():
"""Return a list of lines with backend info.
"""
from . import __version__
d = OrderedDict()
d['Version'] = '%s' % __version__
for key, val in PyVisaLibrary.get_session_classes().items():
key_name = '%s %s' % (key[0].name.upper(), key[1])
try:
d[key_name] = getattr(val, 'session_issue').split('\n')
except AttributeError:
d[key_name] = 'Available ' + val.get_low_level_info()
return d
def _init(self):
#: map session handle to session object.
#: dict[int, session.Session]
self.sessions = {}
def _register(self, obj):
"""Creates a random but unique session handle for a session object,
register it in the sessions dictionary and return the value
:param obj: a session object.
:return: session handle
:rtype: int
"""
session = None
while session is None or session in self.sessions:
session = random.randint(1000000, 9999999)
self.sessions[session] = obj
return session
def _return_handler(self, ret_value, func, arguments):
"""Check return values for errors and warnings.
TODO: THIS IS JUST COPIED PASTED FROM NIVisaLibrary.
Needs to be adapted.
"""
logger.debug('%s%s -> %r',
func.__name__, _args_to_str(arguments), ret_value,
extra=self._logging_extra)
try:
ret_value = StatusCode(ret_value)
except ValueError:
pass
self._last_status = ret_value
# The first argument of almost all registered visa functions is a session.
# We store the error code per session
session = None
if func.__name__ not in ('viFindNext', ):
try:
session = arguments[0]
except KeyError:
raise Exception('Function %r does not seem to be a valid '
'visa function (len args %d)' % (func, len(arguments)))
# Functions that use the first parameter to get a session value.
if func.__name__ in ('viOpenDefaultRM', ):
# noinspection PyProtectedMember
session = session._obj.value
if isinstance(session, integer_types):
self._last_status_in_session[session] = ret_value
else:
# Functions that might or might have a session in the first argument.
if func.__name__ not in ('viClose', 'viGetAttribute', 'viSetAttribute', 'viStatusDesc'):
raise Exception('Function %r does not seem to be a valid '
'visa function (type args[0] %r)' % (func, type(session)))
if ret_value < 0:
raise errors.VisaIOError(ret_value)
if ret_value in self.issue_warning_on:
if session and ret_value not in self._ignore_warning_in_session[session]:
warnings.warn(errors.VisaIOWarning(ret_value), stacklevel=2)
return ret_value
# noinspection PyShadowingBuiltins
def open(self, session, resource_name,
access_mode=constants.AccessModes.no_lock,
open_timeout=constants.VI_TMO_IMMEDIATE):
"""Opens a session to the specified resource.
Corresponds to viOpen function of the VISA library.
:param session: Resource Manager session (should always be a session returned from open_default_resource_manager()).
:param resource_name: Unique symbolic name of a resource.
:param access_mode: Specifies the mode by which the resource is to be accessed. (constants.AccessModes)
:param open_timeout: Specifies the maximum time period (in milliseconds) that this operation waits
before returning an error.
:return: Unique logical identifier reference to a session, return value of the library call.
:rtype: session, VISAStatus
"""
try:
open_timeout = int(open_timeout)
except ValueError:
raise ValueError('open_timeout (%r) must be an integer (or compatible type)' % open_timeout)
try:
parsed = rname.parse_resource_name(resource_name)
except rname.InvalidResourceName:
return 0, StatusCode.error_invalid_resource_name
cls = sessions.Session.get_session_class(parsed.interface_type_const, parsed.resource_class)
sess = cls(session, resource_name, parsed, open_timeout)
return self._register(sess), StatusCode.success
def clear(self, session):
"""Clears a device.
Corresponds to viClear function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.clear()
def gpib_command(self, session, command_byte):
"""Write GPIB command byte on the bus.
Corresponds to viGpibCommand function of the VISA library.
See: https://linux-gpib.sourceforge.io/doc_html/gpib-protocol.html#REFERENCE-COMMAND-BYTES
:param command_byte: command byte to send
:type command_byte: int, must be [0 255]
:return: return value of the library call
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].gpib_command(command_byte)
except KeyError:
return constants.StatusCode.error_invalid_object
def assert_trigger(self, session, protocol):
"""Asserts software or hardware trigger.
Corresponds to viAssertTrigger function of the VISA library.
:param session: Unique logical identifier to a session.
:param protocol: Trigger protocol to use during assertion. (Constants.PROT*)
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
return self.sessions[session].assert_trigger(protocol)
except KeyError:
return constants.StatusCode.error_invalid_object
def gpib_send_ifc(self, session):
"""Pulse the interface clear line (IFC) for at least 100 microseconds.
Corresponds to viGpibSendIFC function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return constants.StatusCode.error_invalid_object
return sess.gpib_send_ifc()
def read_stb(self, session):
"""Reads a status byte of the service request.
Corresponds to viReadSTB function of the VISA library.
:param session: Unique logical identifier to a session.
:return: Service request status byte, return value of the library call.
:rtype: int, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return 0, constants.StatusCode.error_invalid_object
return sess.read_stb()
def close(self, session):
"""Closes the specified session, event, or find list.
Corresponds to viClose function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
if sess is not self:
sess.close()
except KeyError:
return StatusCode.error_invalid_object
def open_default_resource_manager(self):
"""This function returns a session to the Default Resource Manager resource.
Corresponds to viOpenDefaultRM function of the VISA library.
:return: Unique logical identifier to a Default Resource Manager session, return value of the library call.
:rtype: session, VISAStatus
"""
return self._register(self), StatusCode.success
def list_resources(self, session, query='?*::INSTR'):
"""Returns a tuple of all connected devices matching query.
:param query: regular expression used to match devices.
"""
# For each session type, ask for the list of connected resources and
# merge them into a single list.
resources = sum([st.list_resources()
for key, st in sessions.Session.iter_valid_session_classes()], [])
resources = rname.filter(resources, query)
return resources
def read(self, session, count):
"""Reads data from device or interface synchronously.
Corresponds to viRead function of the VISA library.
:param session: Unique logical identifier to a session.
:param count: Number of bytes to be read.
:return: data read, return value of the library call.
:rtype: bytes, VISAStatus
"""
# from the session handle, dispatch to the read method of the session object.
try:
ret = self.sessions[session].read(count)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def write(self, session, data):
"""Writes data to device or interface synchronously.
Corresponds to viWrite function of the VISA library.
:param session: Unique logical identifier to a session.
:param data: data to be written.
:type data: str
:return: Number of bytes actually transferred, return value of the library call.
:rtype: int, VISAStatus
"""
# from the session handle, dispatch to the write method of the session object.
try:
ret = self.sessions[session].write(data)
except KeyError:
return 0, StatusCode.error_invalid_object
if ret[1] < 0:
raise errors.VisaIOError(ret[1])
return ret
def get_attribute(self, session, attribute):
"""Retrieves the state of an attribute.
Corresponds to viGetAttribute function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:param attribute: Resource attribute for which the state query is made (see Attributes.*)
:return: The state of the queried attribute for a specified resource, return value of the library call.
:rtype: unicode | str | list | int, VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return None, StatusCode.error_invalid_object
return sess.get_attribute(attribute)
def set_attribute(self, session, attribute, attribute_state):
"""Sets the state of an attribute.
Corresponds to viSetAttribute function of the VISA library.
:param session: Unique logical identifier to a session.
:param attribute: Attribute for which the state is to be modified. (Attributes.*)
:param attribute_state: The state of the attribute to be set for the specified object.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.set_attribute(attribute, attribute_state)
def lock(self, session, lock_type, timeout, requested_key=None):
"""Establishes an access mode to the specified resources.
Corresponds to viLock function of the VISA library.
:param session: Unique logical identifier to a session.
:param lock_type: Specifies the type of lock requested, either Constants.EXCLUSIVE_LOCK or Constants.SHARED_LOCK.
:param timeout: Absolute time period (in milliseconds) that a resource waits to get unlocked by the
locking session before returning an error.
:param requested_key: This parameter is not used and should be set to VI_NULL when lockType is VI_EXCLUSIVE_LOCK.
:return: access_key that can then be passed to other sessions to share the lock, return value of the library call.
:rtype: str, :class:`pyvisa.constants.StatusCode`
"""
try:
sess = self.sessions[session]
except KeyError:
return StatusCode.error_invalid_object
return sess.lock(lock_type, timeout, requested_key)
def disable_event(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
def discard_events(self, session, event_type, mechanism):
# TODO: implement this for GPIB finalization
pass
|
pyvisa/pyvisa-py | pyvisa-py/protocols/usbraw.py | find_raw_devices | python | def find_raw_devices(vendor=None, product=None, serial_number=None,
custom_match=None, **kwargs):
def is_usbraw(dev):
if custom_match and not custom_match(dev):
return False
return bool(find_interfaces(dev, bInterfaceClass=0xFF,
bInterfaceSubClass=0xFF))
return find_devices(vendor, product, serial_number, is_usbraw, **kwargs) | Find connected USB RAW devices. See usbutil.find_devices for more info. | train | https://github.com/pyvisa/pyvisa-py/blob/dfbd509409675b59d71bb741cd72c5f256efd4cd/pyvisa-py/protocols/usbraw.py#L24-L34 | [
"def find_devices(vendor=None, product=None, serial_number=None,\n custom_match=None, **kwargs):\n \"\"\"Find connected USB devices matching certain keywords.\n\n Wildcards can be used for vendor, product and serial_number.\n\n :param vendor: name or id of the vendor (manufacturer)\n :pa... | # -*- coding: utf-8 -*-
"""
pyvisa-py.protocols.usbraw
~~~~~~~~~~~~~~~~~~~~~~~~~~
Implements Session to control USB Raw devices
Loosely based on PyUSBTMC:python module to handle USB-TMC(Test and Measurement class) devices.
by Noboru Yamamot, Accl. Lab, KEK, JAPAN
This file is an offspring of the Lantz Project.
:copyright: 2014 by PyVISA-py Authors, see AUTHORS for more details.
:license: MIT, see LICENSE for more details.
"""
from __future__ import division, unicode_literals, print_function, absolute_import
from .usbtmc import USBRaw as USBRaw
from .usbutil import find_devices, find_interfaces
class USBRawDevice(USBRaw):
RECV_CHUNK = 1024 ** 2
find_devices = staticmethod(find_raw_devices)
def __init__(self, vendor=None, product=None, serial_number=None, **kwargs):
super(USBRawDevice, self).__init__(vendor, product, serial_number, **kwargs)
if not (self.usb_recv_ep and self.usb_send_ep):
raise ValueError("USBRAW device must have both Bulk-In and Bulk-out endpoints.")
def write(self, data):
"""Send raw bytes to the instrument.
:param data: bytes to be sent to the instrument
:type data: bytes
"""
begin, end, size = 0, 0, len(data)
bytes_sent = 0
raw_write = super(USBRawDevice, self).write
while not end > size:
begin = end
end = begin + self.RECV_CHUNK
bytes_sent += raw_write(data[begin:end])
return bytes_sent
def read(self, size):
"""Read raw bytes from the instrument.
:param size: amount of bytes to be sent to the instrument
:type size: integer
:return: received bytes
:return type: bytes
"""
raw_read = super(USBRawDevice, self).read
received = bytearray()
while not len(received) >= size:
resp = raw_read(self.RECV_CHUNK)
received.extend(resp)
return bytes(received)
|
pyvisa/pyvisa-py | pyvisa-py/protocols/usbraw.py | USBRawDevice.write | python | def write(self, data):
begin, end, size = 0, 0, len(data)
bytes_sent = 0
raw_write = super(USBRawDevice, self).write
while not end > size:
begin = end
end = begin + self.RECV_CHUNK
bytes_sent += raw_write(data[begin:end])
return bytes_sent | Send raw bytes to the instrument.
:param data: bytes to be sent to the instrument
:type data: bytes | train | https://github.com/pyvisa/pyvisa-py/blob/dfbd509409675b59d71bb741cd72c5f256efd4cd/pyvisa-py/protocols/usbraw.py#L50-L67 | [
"def write(self, data):\n \"\"\"Send raw bytes to the instrument.\n\n :param data: bytes to be sent to the instrument\n :type data: bytes\n \"\"\"\n\n try:\n return self.usb_send_ep.write(data)\n except usb.core.USBError as e:\n raise ValueError(str(e))\n"
] | class USBRawDevice(USBRaw):
RECV_CHUNK = 1024 ** 2
find_devices = staticmethod(find_raw_devices)
def __init__(self, vendor=None, product=None, serial_number=None, **kwargs):
super(USBRawDevice, self).__init__(vendor, product, serial_number, **kwargs)
if not (self.usb_recv_ep and self.usb_send_ep):
raise ValueError("USBRAW device must have both Bulk-In and Bulk-out endpoints.")
def read(self, size):
"""Read raw bytes from the instrument.
:param size: amount of bytes to be sent to the instrument
:type size: integer
:return: received bytes
:return type: bytes
"""
raw_read = super(USBRawDevice, self).read
received = bytearray()
while not len(received) >= size:
resp = raw_read(self.RECV_CHUNK)
received.extend(resp)
return bytes(received)
|
pyvisa/pyvisa-py | pyvisa-py/protocols/usbraw.py | USBRawDevice.read | python | def read(self, size):
raw_read = super(USBRawDevice, self).read
received = bytearray()
while not len(received) >= size:
resp = raw_read(self.RECV_CHUNK)
received.extend(resp)
return bytes(received) | Read raw bytes from the instrument.
:param size: amount of bytes to be sent to the instrument
:type size: integer
:return: received bytes
:return type: bytes | train | https://github.com/pyvisa/pyvisa-py/blob/dfbd509409675b59d71bb741cd72c5f256efd4cd/pyvisa-py/protocols/usbraw.py#L69-L87 | [
"def read(self, size):\n \"\"\"Receive raw bytes to the instrument.\n\n :param size: number of bytes to receive\n :return: received bytes\n :return type: bytes\n \"\"\"\n\n if size <= 0:\n size = 1\n\n data = array_to_bytes(self.usb_recv_ep.read(size, self.timeout))\n\n return data\n"... | class USBRawDevice(USBRaw):
RECV_CHUNK = 1024 ** 2
find_devices = staticmethod(find_raw_devices)
def __init__(self, vendor=None, product=None, serial_number=None, **kwargs):
super(USBRawDevice, self).__init__(vendor, product, serial_number, **kwargs)
if not (self.usb_recv_ep and self.usb_send_ep):
raise ValueError("USBRAW device must have both Bulk-In and Bulk-out endpoints.")
def write(self, data):
"""Send raw bytes to the instrument.
:param data: bytes to be sent to the instrument
:type data: bytes
"""
begin, end, size = 0, 0, len(data)
bytes_sent = 0
raw_write = super(USBRawDevice, self).write
while not end > size:
begin = end
end = begin + self.RECV_CHUNK
bytes_sent += raw_write(data[begin:end])
return bytes_sent
|
pyvisa/pyvisa-py | pyvisa-py/gpib.py | _find_listeners | python | def _find_listeners():
for i in range(31):
try:
if gpib.listener(BOARD, i) and gpib.ask(BOARD, 1) != i:
yield i
except gpib.GpibError as e:
logger.debug("GPIB error in _find_listeners(): %s", repr(e)) | Find GPIB listeners. | train | https://github.com/pyvisa/pyvisa-py/blob/dfbd509409675b59d71bb741cd72c5f256efd4cd/pyvisa-py/gpib.py#L48-L56 | null | # -*- coding: utf-8 -*-
"""
pyvisa-py.gpib
~~~~~~~~~~~~~~
GPIB Session implementation using linux-gpib or gpib-ctypes.
:copyright: 2015 by PyVISA-py Authors, see AUTHORS for more details.
:license: MIT, see LICENSE for more details.
"""
from __future__ import division, unicode_literals, print_function, absolute_import
from bisect import bisect
from pyvisa import constants, logger, attributes
from .sessions import Session, UnknownAttribute
try:
import gpib
from Gpib import Gpib
except ImportError:
try:
from gpib_ctypes import gpib
from gpib_ctypes.Gpib import Gpib
except ImportError as e:
Session.register_unavailable(constants.InterfaceType.gpib, 'INSTR',
'Please install linux-gpib (Linux) or '
'gpib-ctypes (Windows, Linux) to use '
'this resource type.\n%s' % e)
raise
# patch Gpib to avoid double closing of handles
def _patch_Gpib():
if not hasattr(Gpib, "close"):
_old_del = Gpib.__del__
def _inner(self):
_old_del(self)
self._own = False
Gpib.__del__ = _inner
Gpib.close = _inner
_patch_Gpib()
StatusCode = constants.StatusCode
# linux-gpib timeout constants, in seconds. See GPIBSession._set_timeout.
TIMETABLE = (0, 10e-6, 30e-6, 100e-6, 300e-6, 1e-3, 3e-3, 10e-3, 30e-3, 100e-3, 300e-3, 1.0, 3.0,
10.0, 30.0, 100.0, 300.0, 1000.0)
# TODO: Check board indices other than 0.
BOARD = 0
# TODO: Check secondary addresses.
@Session.register(constants.InterfaceType.gpib, 'INSTR')
class GPIBSession(Session):
"""A GPIB Session that uses linux-gpib to do the low level communication.
"""
@staticmethod
def list_resources():
return ['GPIB0::%d::INSTR' % pad for pad in _find_listeners()]
@classmethod
def get_low_level_info(cls):
try:
ver = gpib.version()
except AttributeError:
ver = '< 4.0'
return 'via Linux GPIB (%s)' % ver
def after_parsing(self):
minor = int(self.parsed.board)
pad = int(self.parsed.primary_address)
sad = 0
timeout = 13
send_eoi = 1
eos_mode = 0
self.interface = Gpib(name=minor, pad=pad, sad=sad, timeout=timeout, send_eoi=send_eoi, eos_mode=eos_mode)
self.controller = Gpib(name=minor) # this is the bus controller device
# force timeout setting to interface
self.set_attribute(constants.VI_ATTR_TMO_VALUE, attributes.AttributesByID[constants.VI_ATTR_TMO_VALUE].default)
def _get_timeout(self, attribute):
if self.interface:
# 0x3 is the hexadecimal reference to the IbaTMO (timeout) configuration
# option in linux-gpib.
gpib_timeout = self.interface.ask(3)
if gpib_timeout and gpib_timeout < len(TIMETABLE):
self.timeout = TIMETABLE[gpib_timeout]
else:
# value is 0 or out of range -> infinite
self.timeout = None
return super(GPIBSession, self)._get_timeout(attribute)
def _set_timeout(self, attribute, value):
"""
linux-gpib only supports 18 discrete timeout values. If a timeout
value other than these is requested, it will be rounded up to the closest
available value. Values greater than the largest available timout value
will instead be rounded down. The available timeout values are:
0 Never timeout.
1 10 microseconds
2 30 microseconds
3 100 microseconds
4 300 microseconds
5 1 millisecond
6 3 milliseconds
7 10 milliseconds
8 30 milliseconds
9 100 milliseconds
10 300 milliseconds
11 1 second
12 3 seconds
13 10 seconds
14 30 seconds
15 100 seconds
16 300 seconds
17 1000 seconds
"""
status = super(GPIBSession, self)._set_timeout(attribute, value)
if self.interface:
if self.timeout is None:
gpib_timeout = 0
else:
# round up only values that are higher by 0.1% than discrete values
gpib_timeout = min(bisect(TIMETABLE, 0.999 * self.timeout), 17)
self.timeout = TIMETABLE[gpib_timeout]
self.interface.timeout(gpib_timeout)
return status
def close(self):
self.interface.close()
self.controller.close()
def read(self, count):
"""Reads data from device or interface synchronously.
Corresponds to viRead function of the VISA library.
:param count: Number of bytes to be read.
:return: data read, return value of the library call.
:rtype: bytes, constants.StatusCode
"""
# 0x2000 = 8192 = END
checker = lambda current: self.interface.ibsta() & 8192
reader = lambda: self.interface.read(count)
return self._read(reader, count, checker, False, None, False, gpib.GpibError)
def write(self, data):
"""Writes data to device or interface synchronously.
Corresponds to viWrite function of the VISA library.
:param data: data to be written.
:type data: bytes
:return: Number of bytes actually transferred, return value of the library call.
:rtype: int, VISAStatus
"""
logger.debug('GPIB.write %r' % data)
try:
self.interface.write(data)
count = self.interface.ibcnt() # number of bytes transmitted
return count, StatusCode.success
except gpib.GpibError:
# 0x4000 = 16384 = TIMO
if self.interface.ibsta() & 16384:
return 0, StatusCode.error_timeout
else:
return 0, StatusCode.error_system_error
def clear(self):
"""Clears a device.
Corresponds to viClear function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
logger.debug('GPIB.device clear')
try:
self.interface.clear()
return StatusCode.success
except gpib.GpibError:
return StatusCode.error_system_error
def gpib_command(self, command_byte):
"""Write GPIB command byte on the bus.
Corresponds to viGpibCommand function of the VISA library.
See: https://linux-gpib.sourceforge.io/doc_html/gpib-protocol.html#REFERENCE-COMMAND-BYTES
:param command_byte: command byte to send
:type command_byte: int, must be [0 255]
:return: Number of written bytes, return value of the library call.
:rtype: int, :class:`pyvisa.constants.StatusCode`
"""
if 0 <= command_byte <= 255:
data = chr(command_byte)
else:
return 0, StatusCode.error_nonsupported_operation
try:
return self.controller.command(data), StatusCode.success
except gpib.GpibError:
return 0, StatusCode.error_system_error
def assert_trigger(self, protocol):
"""Asserts hardware trigger.
Only supports protocol = constants.VI_TRIG_PROT_DEFAULT
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
logger.debug('GPIB.device assert hardware trigger')
try:
if protocol == constants.VI_TRIG_PROT_DEFAULT:
self.interface.trigger()
return StatusCode.success
else:
return StatusCode.error_nonsupported_operation
except gpib.GpibError:
return StatusCode.error_system_error
def gpib_send_ifc(self):
"""Pulse the interface clear line (IFC) for at least 100 microseconds.
Corresponds to viGpibSendIFC function of the VISA library.
:param session: Unique logical identifier to a session.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
logger.debug('GPIB.interface clear')
try:
self.controller.interface_clear()
return StatusCode.success
except gpib.GpibError:
return StatusCode.error_system_error
def _get_attribute(self, attribute):
"""Get the value for a given VISA attribute for this session.
Use to implement custom logic for attributes.
:param attribute: Resource attribute for which the state query is made
:return: The state of the queried attribute for a specified resource, return value of the library call.
:rtype: (unicode | str | list | int, VISAStatus)
"""
if attribute == constants.VI_ATTR_GPIB_READDR_EN:
# IbaREADDR 0x6
# Setting has no effect in linux-gpib.
return self.interface.ask(6), StatusCode.success
elif attribute == constants.VI_ATTR_GPIB_PRIMARY_ADDR:
# IbaPAD 0x1
return self.interface.ask(1), StatusCode.success
elif attribute == constants.VI_ATTR_GPIB_SECONDARY_ADDR:
# IbaSAD 0x2
# Remove 0x60 because National Instruments.
sad = self.interface.ask(2)
if self.interface.ask(2):
return self.interface.ask(2) - 96, StatusCode.success
else:
return constants.VI_NO_SEC_ADDR, StatusCode.success
elif attribute == constants.VI_ATTR_GPIB_REN_STATE:
try:
lines = self.controller.lines()
if not lines & gpib.ValidREN:
return constants.VI_STATE_UNKNOWN, StatusCode.success
if lines & gpib.BusREN:
return constants.VI_STATE_ASSERTED, StatusCode.success
else:
return constants.VI_STATE_UNASSERTED, StatusCode.success
except AttributeError:
# some versions of linux-gpib do not expose Gpib.lines()
return constants.VI_STATE_UNKNOWN, StatusCode.success
elif attribute == constants.VI_ATTR_GPIB_UNADDR_EN:
# IbaUnAddr 0x1b
if self.interface.ask(27):
return constants.VI_TRUE, StatusCode.success
else:
return constants.VI_FALSE, StatusCode.success
elif attribute == constants.VI_ATTR_SEND_END_EN:
# IbaEndBitIsNormal 0x1a
if self.interface.ask(26):
return constants.VI_TRUE, StatusCode.success
else:
return constants.VI_FALSE, StatusCode.success
elif attribute == constants.VI_ATTR_INTF_NUM:
# IbaBNA 0x200
return self.interface.ask(512), StatusCode.success
elif attribute == constants.VI_ATTR_INTF_TYPE:
return constants.InterfaceType.gpib, StatusCode.success
raise UnknownAttribute(attribute)
def _set_attribute(self, attribute, attribute_state):
"""Sets the state of an attribute.
Corresponds to viSetAttribute function of the VISA library.
:param attribute: Attribute for which the state is to be modified. (Attributes.*)
:param attribute_state: The state of the attribute to be set for the specified object.
:return: return value of the library call.
:rtype: VISAStatus
"""
if attribute == constants.VI_ATTR_GPIB_READDR_EN:
# IbcREADDR 0x6
# Setting has no effect in linux-gpib.
if isinstance(attribute_state, int):
self.interface.config(6, attribute_state)
return StatusCode.success
else:
return StatusCode.error_nonsupported_attribute_state
elif attribute == constants.VI_ATTR_GPIB_PRIMARY_ADDR:
# IbcPAD 0x1
if isinstance(attribute_state, int) and 0 <= attribute_state <= 30:
self.interface.config(1, attribute_state)
return StatusCode.success
else:
return StatusCode.error_nonsupported_attribute_state
elif attribute == constants.VI_ATTR_GPIB_SECONDARY_ADDR:
# IbcSAD 0x2
# Add 0x60 because National Instruments.
if isinstance(attribute_state, int) and 0 <= attribute_state <= 30:
if self.interface.ask(2):
self.interface.config(2, attribute_state + 96)
return StatusCode.success
else:
return StatusCode.error_nonsupported_attribute
else:
return StatusCode.error_nonsupported_attribute_state
elif attribute == constants.VI_ATTR_GPIB_UNADDR_EN:
# IbcUnAddr 0x1b
try:
self.interface.config(27, attribute_state)
return StatusCode.success
except gpib.GpibError:
return StatusCode.error_nonsupported_attribute_state
elif attribute == constants.VI_ATTR_SEND_END_EN:
# IbcEndBitIsNormal 0x1a
if isinstance(attribute_state, int):
self.interface.config(26, attribute_state)
return StatusCode.success
else:
return StatusCode.error_nonsupported_attribute_state
raise UnknownAttribute(attribute)
def read_stb(self):
try:
return self.interface.serial_poll(), StatusCode.success
except gpib.GpibError:
return 0, StatusCode.error_system_error
|
pyvisa/pyvisa-py | pyvisa-py/protocols/usbutil.py | find_devices | python | def find_devices(vendor=None, product=None, serial_number=None,
custom_match=None, **kwargs):
kwargs = kwargs or {}
attrs = {}
if isinstance(vendor, str):
attrs['manufacturer'] = vendor
elif vendor is not None:
kwargs['idVendor'] = vendor
if isinstance(product, str):
attrs['product'] = product
elif product is not None:
kwargs['idProduct'] = product
if serial_number:
attrs['serial_number'] = str(serial_number)
if attrs:
def cm(dev):
if custom_match is not None and not custom_match(dev):
return False
for attr, pattern in attrs.items():
if not fnmatch(getattr(dev, attr).lower(), pattern.lower()):
return False
return True
else:
cm = custom_match
return usb.core.find(find_all=True, custom_match=cm, **kwargs) | Find connected USB devices matching certain keywords.
Wildcards can be used for vendor, product and serial_number.
:param vendor: name or id of the vendor (manufacturer)
:param product: name or id of the product
:param serial_number: serial number.
:param custom_match: callable returning True or False that takes a device as only input.
:param kwargs: other properties to match. See usb.core.find
:return: | train | https://github.com/pyvisa/pyvisa-py/blob/dfbd509409675b59d71bb741cd72c5f256efd4cd/pyvisa-py/protocols/usbutil.py#L158-L197 | null | # -*- coding: utf-8 -*-
"""
pyvisa-py.usb
~~~~~~~~~~~~~
Serial Session implementation using PyUSB.
See the following link for more information about USB.
http://www.beyondlogic.org/usbnutshell/usb5.shtml
This file is an offspring of the Lantz Project.
:copyright: 2014 by PyVISA-py Authors, see AUTHORS for more details.
:license: MIT, see LICENSE for more details.
"""
from __future__ import division, unicode_literals, print_function, absolute_import
from fnmatch import fnmatch
import usb
from usb.util import (get_string as usb_get_string,
find_descriptor as usb_find_desc)
ClassCodes = {
0x00: ('Device', 'Use class information in the Interface Descriptors'),
0x01: ('Interface', 'Audio'),
0x02: ('Both', 'Communications and CDC Control'),
0x03: ('Interface', 'HID (Human Interface Device)'),
0x05: ('Interface', 'Physical'),
0x06: ('Interface', 'Image'),
0x07: ('Interface', 'Printer'),
0x08: ('Interface', 'Mass Storage'),
0x09: ('Device', 'Hub'),
0x0A: ('Interface', 'CDC-Data'),
0x0B: ('Interface', 'Smart Card'),
0x0D: ('Interface', 'Content Security'),
0x0E: ('Interface', 'Video'),
0x0F: ('Interface', 'Personal Healthcare'),
0x10: ('Interface', 'Audio/Video Devices'),
0xDC: ('Both', 'Diagnostic Device'),
0xE0: ('Interface', 'Wireless Controller'),
0xEF: ('Both', 'Miscellaneous'),
0xFE: ('Interface', 'Application Specific'),
0xFF: ('Both', 'Vendor Specific')
}
# None is 0xxx
AllCodes = {
(0x00, 0x00, 0x00): 'Use class code info from Interface Descriptors',
(0x01, None, None): 'Audio device',
(0x02, None, None): 'Communication device class',
(0x03, None, None): 'HID device class',
(0x05, None, None): 'Physical device class',
(0x06, 0x01, 0x01): 'Still Imaging device',
(0x07, None, None): 'Printer device',
(0x08, None, None): 'Mass Storage device',
(0x09, 0x00, 0x00): 'Full speed Hub',
(0x09, 0x00, 0x01): 'Hi-speed hub with single TT',
(0x09, 0x00, 0x02): 'Hi-speed hub with multiple TTs',
(0x0A, None, None): 'CDC data device',
(0x0B, None, None): 'Smart Card device',
(0x0D, 0x00, 0x00): 'Content Security device',
(0x0E, None, None): 'Video device',
(0x0F, None, None): 'Personal Healthcare device',
(0x10, 0x01, 0x00): 'Control Interface',
(0x10, 0x02, 0x00): 'Data Video Streaming Interface',
(0x10, 0x03, 0x00): 'VData Audio Streaming Interface',
(0xDC, 0x01, 0x01): 'USB2 Compliance Device',
(0xE0, 0x01, 0x01): 'Bluetooth Programming Interface.',
(0xE0, 0x01, 0x02): 'UWB Radio Control Interface.',
(0xE0, 0x01, 0x03): 'Remote NDIS',
(0xE0, 0x01, 0x04): 'Bluetooth AMP Controller.',
(0xE0, 0x2, 0x01): 'Host Wire Adapter Control/Data interface.',
(0xE0, 0x2, 0x02): 'Device Wire Adapter Control/Data interface.',
(0xE0, 0x2, 0x03): 'Device Wire Adapter Isochronous interface.',
(0xEF, 0x01, 0x01): 'Active Sync device.',
(0xEF, 0x01, 0x02): 'Palm Sync. This class code can be used in either '
'Device or Interface Descriptors.',
(0xEF, 0x02, 0x01): 'Interface Association Descriptor.',
(0xEF, 0x02, 0x02): 'Wire Adapter Multifunction Peripheral programming interface.',
(0xEF, 0x03, 0x01): 'Cable Based Association Framework.',
(0xEF, 0x04, 0x01): 'RNDIS over Ethernet. Connecting a host to the Internet via '
'Ethernet mobile device. The device appears to the host as an'
'Ethernet gateway device. This class code may only be used in '
'Interface Descriptors.',
(0xEF, 0x04, 0x02): 'RNDIS over WiFi. Connecting a host to the Internet via WiFi '
'enabled mobile device. The device represents itself to the host'
'as an 802.11 compliant network device. This class code may only'
'be used in Interface Descriptors.',
(0xEF, 0x04, 0x03): 'RNDIS over WiMAX. Connecting a host to the Internet via WiMAX '
'enabled mobile device. The device is represented to the host '
'as an 802.16 network device. This class code may only be used '
'in Interface Descriptors.',
(0xEF, 0x04, 0x04): 'RNDIS over WWAN. Connecting a host to the Internet via a device '
'using mobile broadband, i.e. WWAN (GSM/CDMA). This class code may '
'only be used in Interface Descriptors.',
(0xEF, 0x04, 0x05): 'RNDIS for Raw IPv4. Connecting a host to the Internet using raw '
'IPv4 via non-Ethernet mobile device. Devices that provide raw '
'IPv4, not in an Ethernet packet, may use this form to in lieu of '
'other stock types. '
'This class code may only be used in Interface Descriptors.',
(0xEF, 0x04, 0x06): 'RNDIS for Raw IPv6. Connecting a host to the Internet using raw '
'IPv6 via non-Ethernet mobile device. Devices that provide raw '
'IPv6, not in an Ethernet packet, may use this form to in lieu of '
'other stock types. '
'This class code may only be used in Interface Descriptors.',
(0xEF, 0x04, 0x07): 'RNDIS for GPRS. Connecting a host to the Internet over GPRS mobile '
'device using the device’s cellular radio.',
(0xEF, 0x05, 0x00): 'USB3 Vision Control Interface',
(0xEF, 0x05, 0x01): 'USB3 Vision Event Interface',
(0xEF, 0x05, 0x02): 'USB3 Vision Streaming Interface',
(0xFE, 0x01, 0x01): 'Device Firmware Upgrade.',
(0xFE, 0x02, 0x00): 'IRDA Bridge device.',
(0xFE, 0x03, 0x00): 'USB Test and Measurement Device.',
(0xFE, 0x03, 0x01): 'USB Test and Measurement Device conforming to the USBTMC USB488 Subclass',
(0xFF, None, None): 'Vendor specific'
}
def ep_attributes(ep):
c = ep.bmAttributes
attrs = []
tp = c & usb.ENDPOINT_TYPE_MASK
if tp == usb.ENDPOINT_TYPE_CONTROL:
attrs.append('Control')
elif tp == usb.ENDPOINT_TYPE_ISOCHRONOUS:
attrs.append('Isochronous')
elif tp == usb.ENDPOINT_TYPE_BULK:
attrs.append('Bulk')
elif tp == usb.ENDPOINT_TYPE_INTERRUPT:
attrs.append('Interrupt')
sync = (c & 12) >> 2
if sync == 0:
attrs.append('No sync')
elif sync == 1:
attrs.append('Async')
elif sync == 2:
attrs.append('Adaptive')
elif sync == 3:
attrs.append('Sync')
usage = (c & 48) >> 4
if usage == 0:
attrs.append('Data endpoint')
elif usage == 1:
attrs.append('Feedback endpoint')
elif usage == 2:
attrs.append('Subordinate Feedback endpoint')
elif usage == 3:
attrs.append('Reserved')
return ', '.join(attrs)
def find_interfaces(device, **kwargs):
"""
:param device:
:return:
"""
interfaces = []
try:
for cfg in device:
try:
interfaces.extend(usb_find_desc(cfg, find_all=True, **kwargs))
except:
pass
except:
pass
return interfaces
def find_endpoint(interface, direction, type):
ep = usb_find_desc(interface, custom_match=
lambda e: usb.util.endpoint_direction(e.bEndpointAddress) == direction and
usb.util.endpoint_type(e.bmAttributes) == type
)
return ep
def _patch_endpoint(ep, log_func=print):
_read = ep.read
_write = ep.write
def new_read(*args, **kwargs):
log_func('---')
log_func('reading from {}'.format(ep.bEndpointAddress))
log_func('args: {}'.format(args))
log_func('kwargs: {}'.format(kwargs))
ret = _read(*args, **kwargs)
log_func('returned', ret)
log_func('---')
return ret
def new_write(*args, **kwargs):
log_func('---')
log_func('writing to {}'.format(ep.bEndpointAddress))
log_func('args: {}'.format(args))
log_func('kwargs: {}'.format(kwargs))
ret = _write(*args, **kwargs)
log_func('returned', ret)
log_func('---')
return ret
ep.read = new_read
ep.write = new_write
|
pyvisa/pyvisa-py | pyvisa-py/protocols/usbutil.py | find_interfaces | python | def find_interfaces(device, **kwargs):
interfaces = []
try:
for cfg in device:
try:
interfaces.extend(usb_find_desc(cfg, find_all=True, **kwargs))
except:
pass
except:
pass
return interfaces | :param device:
:return: | train | https://github.com/pyvisa/pyvisa-py/blob/dfbd509409675b59d71bb741cd72c5f256efd4cd/pyvisa-py/protocols/usbutil.py#L200-L214 | null | # -*- coding: utf-8 -*-
"""
pyvisa-py.usb
~~~~~~~~~~~~~
Serial Session implementation using PyUSB.
See the following link for more information about USB.
http://www.beyondlogic.org/usbnutshell/usb5.shtml
This file is an offspring of the Lantz Project.
:copyright: 2014 by PyVISA-py Authors, see AUTHORS for more details.
:license: MIT, see LICENSE for more details.
"""
from __future__ import division, unicode_literals, print_function, absolute_import
from fnmatch import fnmatch
import usb
from usb.util import (get_string as usb_get_string,
find_descriptor as usb_find_desc)
ClassCodes = {
0x00: ('Device', 'Use class information in the Interface Descriptors'),
0x01: ('Interface', 'Audio'),
0x02: ('Both', 'Communications and CDC Control'),
0x03: ('Interface', 'HID (Human Interface Device)'),
0x05: ('Interface', 'Physical'),
0x06: ('Interface', 'Image'),
0x07: ('Interface', 'Printer'),
0x08: ('Interface', 'Mass Storage'),
0x09: ('Device', 'Hub'),
0x0A: ('Interface', 'CDC-Data'),
0x0B: ('Interface', 'Smart Card'),
0x0D: ('Interface', 'Content Security'),
0x0E: ('Interface', 'Video'),
0x0F: ('Interface', 'Personal Healthcare'),
0x10: ('Interface', 'Audio/Video Devices'),
0xDC: ('Both', 'Diagnostic Device'),
0xE0: ('Interface', 'Wireless Controller'),
0xEF: ('Both', 'Miscellaneous'),
0xFE: ('Interface', 'Application Specific'),
0xFF: ('Both', 'Vendor Specific')
}
# None is 0xxx
AllCodes = {
(0x00, 0x00, 0x00): 'Use class code info from Interface Descriptors',
(0x01, None, None): 'Audio device',
(0x02, None, None): 'Communication device class',
(0x03, None, None): 'HID device class',
(0x05, None, None): 'Physical device class',
(0x06, 0x01, 0x01): 'Still Imaging device',
(0x07, None, None): 'Printer device',
(0x08, None, None): 'Mass Storage device',
(0x09, 0x00, 0x00): 'Full speed Hub',
(0x09, 0x00, 0x01): 'Hi-speed hub with single TT',
(0x09, 0x00, 0x02): 'Hi-speed hub with multiple TTs',
(0x0A, None, None): 'CDC data device',
(0x0B, None, None): 'Smart Card device',
(0x0D, 0x00, 0x00): 'Content Security device',
(0x0E, None, None): 'Video device',
(0x0F, None, None): 'Personal Healthcare device',
(0x10, 0x01, 0x00): 'Control Interface',
(0x10, 0x02, 0x00): 'Data Video Streaming Interface',
(0x10, 0x03, 0x00): 'VData Audio Streaming Interface',
(0xDC, 0x01, 0x01): 'USB2 Compliance Device',
(0xE0, 0x01, 0x01): 'Bluetooth Programming Interface.',
(0xE0, 0x01, 0x02): 'UWB Radio Control Interface.',
(0xE0, 0x01, 0x03): 'Remote NDIS',
(0xE0, 0x01, 0x04): 'Bluetooth AMP Controller.',
(0xE0, 0x2, 0x01): 'Host Wire Adapter Control/Data interface.',
(0xE0, 0x2, 0x02): 'Device Wire Adapter Control/Data interface.',
(0xE0, 0x2, 0x03): 'Device Wire Adapter Isochronous interface.',
(0xEF, 0x01, 0x01): 'Active Sync device.',
(0xEF, 0x01, 0x02): 'Palm Sync. This class code can be used in either '
'Device or Interface Descriptors.',
(0xEF, 0x02, 0x01): 'Interface Association Descriptor.',
(0xEF, 0x02, 0x02): 'Wire Adapter Multifunction Peripheral programming interface.',
(0xEF, 0x03, 0x01): 'Cable Based Association Framework.',
(0xEF, 0x04, 0x01): 'RNDIS over Ethernet. Connecting a host to the Internet via '
'Ethernet mobile device. The device appears to the host as an'
'Ethernet gateway device. This class code may only be used in '
'Interface Descriptors.',
(0xEF, 0x04, 0x02): 'RNDIS over WiFi. Connecting a host to the Internet via WiFi '
'enabled mobile device. The device represents itself to the host'
'as an 802.11 compliant network device. This class code may only'
'be used in Interface Descriptors.',
(0xEF, 0x04, 0x03): 'RNDIS over WiMAX. Connecting a host to the Internet via WiMAX '
'enabled mobile device. The device is represented to the host '
'as an 802.16 network device. This class code may only be used '
'in Interface Descriptors.',
(0xEF, 0x04, 0x04): 'RNDIS over WWAN. Connecting a host to the Internet via a device '
'using mobile broadband, i.e. WWAN (GSM/CDMA). This class code may '
'only be used in Interface Descriptors.',
(0xEF, 0x04, 0x05): 'RNDIS for Raw IPv4. Connecting a host to the Internet using raw '
'IPv4 via non-Ethernet mobile device. Devices that provide raw '
'IPv4, not in an Ethernet packet, may use this form to in lieu of '
'other stock types. '
'This class code may only be used in Interface Descriptors.',
(0xEF, 0x04, 0x06): 'RNDIS for Raw IPv6. Connecting a host to the Internet using raw '
'IPv6 via non-Ethernet mobile device. Devices that provide raw '
'IPv6, not in an Ethernet packet, may use this form to in lieu of '
'other stock types. '
'This class code may only be used in Interface Descriptors.',
(0xEF, 0x04, 0x07): 'RNDIS for GPRS. Connecting a host to the Internet over GPRS mobile '
'device using the device’s cellular radio.',
(0xEF, 0x05, 0x00): 'USB3 Vision Control Interface',
(0xEF, 0x05, 0x01): 'USB3 Vision Event Interface',
(0xEF, 0x05, 0x02): 'USB3 Vision Streaming Interface',
(0xFE, 0x01, 0x01): 'Device Firmware Upgrade.',
(0xFE, 0x02, 0x00): 'IRDA Bridge device.',
(0xFE, 0x03, 0x00): 'USB Test and Measurement Device.',
(0xFE, 0x03, 0x01): 'USB Test and Measurement Device conforming to the USBTMC USB488 Subclass',
(0xFF, None, None): 'Vendor specific'
}
def ep_attributes(ep):
c = ep.bmAttributes
attrs = []
tp = c & usb.ENDPOINT_TYPE_MASK
if tp == usb.ENDPOINT_TYPE_CONTROL:
attrs.append('Control')
elif tp == usb.ENDPOINT_TYPE_ISOCHRONOUS:
attrs.append('Isochronous')
elif tp == usb.ENDPOINT_TYPE_BULK:
attrs.append('Bulk')
elif tp == usb.ENDPOINT_TYPE_INTERRUPT:
attrs.append('Interrupt')
sync = (c & 12) >> 2
if sync == 0:
attrs.append('No sync')
elif sync == 1:
attrs.append('Async')
elif sync == 2:
attrs.append('Adaptive')
elif sync == 3:
attrs.append('Sync')
usage = (c & 48) >> 4
if usage == 0:
attrs.append('Data endpoint')
elif usage == 1:
attrs.append('Feedback endpoint')
elif usage == 2:
attrs.append('Subordinate Feedback endpoint')
elif usage == 3:
attrs.append('Reserved')
return ', '.join(attrs)
def find_devices(vendor=None, product=None, serial_number=None,
custom_match=None, **kwargs):
"""Find connected USB devices matching certain keywords.
Wildcards can be used for vendor, product and serial_number.
:param vendor: name or id of the vendor (manufacturer)
:param product: name or id of the product
:param serial_number: serial number.
:param custom_match: callable returning True or False that takes a device as only input.
:param kwargs: other properties to match. See usb.core.find
:return:
"""
kwargs = kwargs or {}
attrs = {}
if isinstance(vendor, str):
attrs['manufacturer'] = vendor
elif vendor is not None:
kwargs['idVendor'] = vendor
if isinstance(product, str):
attrs['product'] = product
elif product is not None:
kwargs['idProduct'] = product
if serial_number:
attrs['serial_number'] = str(serial_number)
if attrs:
def cm(dev):
if custom_match is not None and not custom_match(dev):
return False
for attr, pattern in attrs.items():
if not fnmatch(getattr(dev, attr).lower(), pattern.lower()):
return False
return True
else:
cm = custom_match
return usb.core.find(find_all=True, custom_match=cm, **kwargs)
def find_endpoint(interface, direction, type):
ep = usb_find_desc(interface, custom_match=
lambda e: usb.util.endpoint_direction(e.bEndpointAddress) == direction and
usb.util.endpoint_type(e.bmAttributes) == type
)
return ep
def _patch_endpoint(ep, log_func=print):
_read = ep.read
_write = ep.write
def new_read(*args, **kwargs):
log_func('---')
log_func('reading from {}'.format(ep.bEndpointAddress))
log_func('args: {}'.format(args))
log_func('kwargs: {}'.format(kwargs))
ret = _read(*args, **kwargs)
log_func('returned', ret)
log_func('---')
return ret
def new_write(*args, **kwargs):
log_func('---')
log_func('writing to {}'.format(ep.bEndpointAddress))
log_func('args: {}'.format(args))
log_func('kwargs: {}'.format(kwargs))
ret = _write(*args, **kwargs)
log_func('returned', ret)
log_func('---')
return ret
ep.read = new_read
ep.write = new_write
|
pyvisa/pyvisa-py | pyvisa-py/usb.py | USBSession.read | python | def read(self, count):
def _usb_reader():
"""Data reader identifying usb timeout exception."""
try:
return self.interface.read(count)
except usb.USBError as exc:
if exc.errno in (errno.ETIMEDOUT, -errno.ETIMEDOUT):
raise USBTimeoutException()
raise
supress_end_en, _ = self.get_attribute(constants.VI_ATTR_SUPPRESS_END_EN)
if supress_end_en:
raise ValueError('VI_ATTR_SUPPRESS_END_EN == True is currently unsupported by pyvisa-py')
term_char, _ = self.get_attribute(constants.VI_ATTR_TERMCHAR)
term_char_en, _ = self.get_attribute(constants.VI_ATTR_TERMCHAR_EN)
return self._read(_usb_reader,
count,
lambda current: True, # USB always returns a complete message
supress_end_en,
term_char,
term_char_en,
USBTimeoutException) | Reads data from device or interface synchronously.
Corresponds to viRead function of the VISA library.
:param count: Number of bytes to be read.
:return: data read, return value of the library call.
:rtype: (bytes, VISAStatus) | train | https://github.com/pyvisa/pyvisa-py/blob/dfbd509409675b59d71bb741cd72c5f256efd4cd/pyvisa-py/usb.py#L95-L128 | null | class USBSession(Session):
"""Base class for drivers that communicate with usb devices
via usb port using pyUSB
"""
@staticmethod
def list_resources():
"""Return list of resources for this type of USB device"""
raise NotImplementedError
@classmethod
def get_low_level_info(cls):
try:
ver = usb.__version__
except AttributeError:
ver = 'N/A'
try:
# noinspection PyProtectedMember
backend = usb.core.find()._ctx.backend.__class__.__module__.split('.')[-1]
except Exception:
backend = 'N/A'
return 'via PyUSB (%s). Backend: %s' % (ver, backend)
def _get_timeout(self, attribute):
if self.interface:
if self.interface.timeout == 2**32-1:
self.timeout = None
else:
self.timeout = self.interface.timeout / 1000
return super(USBSession, self)._get_timeout(attribute)
def _set_timeout(self, attribute, value):
status = super(USBSession, self)._set_timeout(attribute, value)
timeout = int(self.timeout*1000) if self.timeout else 2**32-1
timeout = min(timeout, 2**32-1)
if self.interface:
self.interface.timeout = timeout
return status
def write(self, data):
"""Writes data to device or interface synchronously.
Corresponds to viWrite function of the VISA library.
:param data: data to be written.
:type data: bytes
:return: Number of bytes actually transferred, return value of the library call.
:rtype: (int, VISAStatus)
"""
send_end, _ = self.get_attribute(constants.VI_ATTR_SEND_END_EN)
count = self.interface.write(data)
return count, StatusCode.success
def close(self):
self.interface.close()
def _get_attribute(self, attribute):
"""Get the value for a given VISA attribute for this session.
Use to implement custom logic for attributes.
:param attribute: Resource attribute for which the state query is made
:return: The state of the queried attribute for a specified resource, return value of the library call.
:rtype: (unicode | str | list | int, VISAStatus)
"""
raise UnknownAttribute(attribute)
def _set_attribute(self, attribute, attribute_state):
"""Sets the state of an attribute.
Corresponds to viSetAttribute function of the VISA library.
:param attribute: Attribute for which the state is to be modified. (Attributes.*)
:param attribute_state: The state of the attribute to be set for the specified object.
:return: return value of the library call.
:rtype: VISAStatus
"""
raise UnknownAttribute(attribute)
|
pyvisa/pyvisa-py | pyvisa-py/usb.py | USBSession.write | python | def write(self, data):
send_end, _ = self.get_attribute(constants.VI_ATTR_SEND_END_EN)
count = self.interface.write(data)
return count, StatusCode.success | Writes data to device or interface synchronously.
Corresponds to viWrite function of the VISA library.
:param data: data to be written.
:type data: bytes
:return: Number of bytes actually transferred, return value of the library call.
:rtype: (int, VISAStatus) | train | https://github.com/pyvisa/pyvisa-py/blob/dfbd509409675b59d71bb741cd72c5f256efd4cd/pyvisa-py/usb.py#L130-L145 | null | class USBSession(Session):
"""Base class for drivers that communicate with usb devices
via usb port using pyUSB
"""
@staticmethod
def list_resources():
"""Return list of resources for this type of USB device"""
raise NotImplementedError
@classmethod
def get_low_level_info(cls):
try:
ver = usb.__version__
except AttributeError:
ver = 'N/A'
try:
# noinspection PyProtectedMember
backend = usb.core.find()._ctx.backend.__class__.__module__.split('.')[-1]
except Exception:
backend = 'N/A'
return 'via PyUSB (%s). Backend: %s' % (ver, backend)
def _get_timeout(self, attribute):
if self.interface:
if self.interface.timeout == 2**32-1:
self.timeout = None
else:
self.timeout = self.interface.timeout / 1000
return super(USBSession, self)._get_timeout(attribute)
def _set_timeout(self, attribute, value):
status = super(USBSession, self)._set_timeout(attribute, value)
timeout = int(self.timeout*1000) if self.timeout else 2**32-1
timeout = min(timeout, 2**32-1)
if self.interface:
self.interface.timeout = timeout
return status
def read(self, count):
"""Reads data from device or interface synchronously.
Corresponds to viRead function of the VISA library.
:param count: Number of bytes to be read.
:return: data read, return value of the library call.
:rtype: (bytes, VISAStatus)
"""
def _usb_reader():
"""Data reader identifying usb timeout exception."""
try:
return self.interface.read(count)
except usb.USBError as exc:
if exc.errno in (errno.ETIMEDOUT, -errno.ETIMEDOUT):
raise USBTimeoutException()
raise
supress_end_en, _ = self.get_attribute(constants.VI_ATTR_SUPPRESS_END_EN)
if supress_end_en:
raise ValueError('VI_ATTR_SUPPRESS_END_EN == True is currently unsupported by pyvisa-py')
term_char, _ = self.get_attribute(constants.VI_ATTR_TERMCHAR)
term_char_en, _ = self.get_attribute(constants.VI_ATTR_TERMCHAR_EN)
return self._read(_usb_reader,
count,
lambda current: True, # USB always returns a complete message
supress_end_en,
term_char,
term_char_en,
USBTimeoutException)
def close(self):
self.interface.close()
def _get_attribute(self, attribute):
"""Get the value for a given VISA attribute for this session.
Use to implement custom logic for attributes.
:param attribute: Resource attribute for which the state query is made
:return: The state of the queried attribute for a specified resource, return value of the library call.
:rtype: (unicode | str | list | int, VISAStatus)
"""
raise UnknownAttribute(attribute)
def _set_attribute(self, attribute, attribute_state):
"""Sets the state of an attribute.
Corresponds to viSetAttribute function of the VISA library.
:param attribute: Attribute for which the state is to be modified. (Attributes.*)
:param attribute_state: The state of the attribute to be set for the specified object.
:return: return value of the library call.
:rtype: VISAStatus
"""
raise UnknownAttribute(attribute)
|
pytest-dev/pluggy | scripts/release.py | create_branch | python | def create_branch(version):
repo = Repo.init(".")
if repo.is_dirty(untracked_files=True):
raise RuntimeError(f"Repository is dirty, please commit/stash your changes.")
branch_name = f"release-{version}"
print(f"{Fore.CYAN}Create {branch_name} branch from upstream master")
upstream = get_upstream(repo)
upstream.fetch()
release_branch = repo.create_head(branch_name, upstream.refs.master, force=True)
release_branch.checkout()
return repo | Create a fresh branch from upstream/master | train | https://github.com/pytest-dev/pluggy/blob/4de9e440eeadd9f0eb8c5232b349ef64e20e33fb/scripts/release.py#L12-L24 | [
"def get_upstream(repo: Repo) -> Remote:\n \"\"\"Find upstream repository for pluggy on the remotes\"\"\"\n for remote in repo.remotes:\n for url in remote.urls:\n if url.endswith(\"pytest-dev/pluggy.git\"):\n return remote\n raise RuntimeError(\"could not find tox-dev/tox.... | """
Release script.
"""
import argparse
import sys
from subprocess import check_call
from colorama import init, Fore
from git import Repo, Remote
def get_upstream(repo: Repo) -> Remote:
"""Find upstream repository for pluggy on the remotes"""
for remote in repo.remotes:
for url in remote.urls:
if url.endswith("pytest-dev/pluggy.git"):
return remote
raise RuntimeError("could not find tox-dev/tox.git remote")
def pre_release(version):
"""Generates new docs, release announcements and creates a local tag."""
create_branch(version)
changelog(version, write_out=True)
check_call(["git", "commit", "-a", "-m", f"Preparing release {version}"])
print()
print(f"{Fore.GREEN}Please push your branch to your fork and open a PR.")
def changelog(version, write_out=False):
if write_out:
addopts = []
else:
addopts = ["--draft"]
print(f"{Fore.CYAN}Generating CHANGELOG")
check_call(["towncrier", "--yes", "--version", version] + addopts)
def main():
init(autoreset=True)
parser = argparse.ArgumentParser()
parser.add_argument("version", help="Release version")
options = parser.parse_args()
try:
pre_release(options.version)
except RuntimeError as e:
print(f"{Fore.RED}ERROR: {e}")
return 1
if __name__ == "__main__":
sys.exit(main())
|
pytest-dev/pluggy | scripts/release.py | get_upstream | python | def get_upstream(repo: Repo) -> Remote:
for remote in repo.remotes:
for url in remote.urls:
if url.endswith("pytest-dev/pluggy.git"):
return remote
raise RuntimeError("could not find tox-dev/tox.git remote") | Find upstream repository for pluggy on the remotes | train | https://github.com/pytest-dev/pluggy/blob/4de9e440eeadd9f0eb8c5232b349ef64e20e33fb/scripts/release.py#L27-L33 | null | """
Release script.
"""
import argparse
import sys
from subprocess import check_call
from colorama import init, Fore
from git import Repo, Remote
def create_branch(version):
"""Create a fresh branch from upstream/master"""
repo = Repo.init(".")
if repo.is_dirty(untracked_files=True):
raise RuntimeError(f"Repository is dirty, please commit/stash your changes.")
branch_name = f"release-{version}"
print(f"{Fore.CYAN}Create {branch_name} branch from upstream master")
upstream = get_upstream(repo)
upstream.fetch()
release_branch = repo.create_head(branch_name, upstream.refs.master, force=True)
release_branch.checkout()
return repo
def pre_release(version):
"""Generates new docs, release announcements and creates a local tag."""
create_branch(version)
changelog(version, write_out=True)
check_call(["git", "commit", "-a", "-m", f"Preparing release {version}"])
print()
print(f"{Fore.GREEN}Please push your branch to your fork and open a PR.")
def changelog(version, write_out=False):
if write_out:
addopts = []
else:
addopts = ["--draft"]
print(f"{Fore.CYAN}Generating CHANGELOG")
check_call(["towncrier", "--yes", "--version", version] + addopts)
def main():
init(autoreset=True)
parser = argparse.ArgumentParser()
parser.add_argument("version", help="Release version")
options = parser.parse_args()
try:
pre_release(options.version)
except RuntimeError as e:
print(f"{Fore.RED}ERROR: {e}")
return 1
if __name__ == "__main__":
sys.exit(main())
|
pytest-dev/pluggy | scripts/release.py | pre_release | python | def pre_release(version):
create_branch(version)
changelog(version, write_out=True)
check_call(["git", "commit", "-a", "-m", f"Preparing release {version}"])
print()
print(f"{Fore.GREEN}Please push your branch to your fork and open a PR.") | Generates new docs, release announcements and creates a local tag. | train | https://github.com/pytest-dev/pluggy/blob/4de9e440eeadd9f0eb8c5232b349ef64e20e33fb/scripts/release.py#L36-L44 | [
"def create_branch(version):\n \"\"\"Create a fresh branch from upstream/master\"\"\"\n repo = Repo.init(\".\")\n if repo.is_dirty(untracked_files=True):\n raise RuntimeError(f\"Repository is dirty, please commit/stash your changes.\")\n\n branch_name = f\"release-{version}\"\n print(f\"{Fore.... | """
Release script.
"""
import argparse
import sys
from subprocess import check_call
from colorama import init, Fore
from git import Repo, Remote
def create_branch(version):
"""Create a fresh branch from upstream/master"""
repo = Repo.init(".")
if repo.is_dirty(untracked_files=True):
raise RuntimeError(f"Repository is dirty, please commit/stash your changes.")
branch_name = f"release-{version}"
print(f"{Fore.CYAN}Create {branch_name} branch from upstream master")
upstream = get_upstream(repo)
upstream.fetch()
release_branch = repo.create_head(branch_name, upstream.refs.master, force=True)
release_branch.checkout()
return repo
def get_upstream(repo: Repo) -> Remote:
"""Find upstream repository for pluggy on the remotes"""
for remote in repo.remotes:
for url in remote.urls:
if url.endswith("pytest-dev/pluggy.git"):
return remote
raise RuntimeError("could not find tox-dev/tox.git remote")
def changelog(version, write_out=False):
if write_out:
addopts = []
else:
addopts = ["--draft"]
print(f"{Fore.CYAN}Generating CHANGELOG")
check_call(["towncrier", "--yes", "--version", version] + addopts)
def main():
init(autoreset=True)
parser = argparse.ArgumentParser()
parser.add_argument("version", help="Release version")
options = parser.parse_args()
try:
pre_release(options.version)
except RuntimeError as e:
print(f"{Fore.RED}ERROR: {e}")
return 1
if __name__ == "__main__":
sys.exit(main())
|
pytest-dev/pluggy | pluggy/callers.py | _wrapped_call | python | def _wrapped_call(wrap_controller, func):
try:
next(wrap_controller) # first yield
except StopIteration:
_raise_wrapfail(wrap_controller, "did not yield")
call_outcome = _Result.from_call(func)
try:
wrap_controller.send(call_outcome)
_raise_wrapfail(wrap_controller, "has second yield")
except StopIteration:
pass
return call_outcome.get_result() | Wrap calling to a function with a generator which needs to yield
exactly once. The yield point will trigger calling the wrapped function
and return its ``_Result`` to the yield point. The generator then needs
to finish (raise StopIteration) in order for the wrapped call to complete. | train | https://github.com/pytest-dev/pluggy/blob/4de9e440eeadd9f0eb8c5232b349ef64e20e33fb/pluggy/callers.py#L84-L100 | [
"def _raise_wrapfail(wrap_controller, msg):\n co = wrap_controller.gi_code\n raise RuntimeError(\n \"wrap_controller at %r %s:%d %s\"\n % (co.co_name, co.co_filename, co.co_firstlineno, msg)\n )\n",
"def from_call(cls, func):\n __tracebackhide__ = True\n result = excinfo = None\n t... | """
Call loop machinery
"""
import sys
import warnings
_py3 = sys.version_info > (3, 0)
if not _py3:
exec(
"""
def _reraise(cls, val, tb):
raise cls, val, tb
"""
)
def _raise_wrapfail(wrap_controller, msg):
co = wrap_controller.gi_code
raise RuntimeError(
"wrap_controller at %r %s:%d %s"
% (co.co_name, co.co_filename, co.co_firstlineno, msg)
)
class HookCallError(Exception):
""" Hook was called wrongly. """
class _Result(object):
def __init__(self, result, excinfo):
self._result = result
self._excinfo = excinfo
@property
def excinfo(self):
return self._excinfo
@property
def result(self):
"""Get the result(s) for this hook call (DEPRECATED in favor of ``get_result()``)."""
msg = "Use get_result() which forces correct exception handling"
warnings.warn(DeprecationWarning(msg), stacklevel=2)
return self._result
@classmethod
def from_call(cls, func):
__tracebackhide__ = True
result = excinfo = None
try:
result = func()
except BaseException:
excinfo = sys.exc_info()
return cls(result, excinfo)
def force_result(self, result):
"""Force the result(s) to ``result``.
If the hook was marked as a ``firstresult`` a single value should
be set otherwise set a (modified) list of results. Any exceptions
found during invocation will be deleted.
"""
self._result = result
self._excinfo = None
def get_result(self):
"""Get the result(s) for this hook call.
If the hook was marked as a ``firstresult`` only a single value
will be returned otherwise a list of results.
"""
__tracebackhide__ = True
if self._excinfo is None:
return self._result
else:
ex = self._excinfo
if _py3:
raise ex[1].with_traceback(ex[2])
_reraise(*ex) # noqa
class _LegacyMultiCall(object):
""" execute a call into multiple python functions/methods. """
# XXX note that the __multicall__ argument is supported only
# for pytest compatibility reasons. It was never officially
# supported there and is explicitely deprecated since 2.8
# so we can remove it soon, allowing to avoid the below recursion
# in execute() and simplify/speed up the execute loop.
def __init__(self, hook_impls, kwargs, firstresult=False):
self.hook_impls = hook_impls
self.caller_kwargs = kwargs # come from _HookCaller.__call__()
self.caller_kwargs["__multicall__"] = self
self.firstresult = firstresult
def execute(self):
caller_kwargs = self.caller_kwargs
self.results = results = []
firstresult = self.firstresult
while self.hook_impls:
hook_impl = self.hook_impls.pop()
try:
args = [caller_kwargs[argname] for argname in hook_impl.argnames]
except KeyError:
for argname in hook_impl.argnames:
if argname not in caller_kwargs:
raise HookCallError(
"hook call must provide argument %r" % (argname,)
)
if hook_impl.hookwrapper:
return _wrapped_call(hook_impl.function(*args), self.execute)
res = hook_impl.function(*args)
if res is not None:
if firstresult:
return res
results.append(res)
if not firstresult:
return results
def __repr__(self):
status = "%d meths" % (len(self.hook_impls),)
if hasattr(self, "results"):
status = ("%d results, " % len(self.results)) + status
return "<_MultiCall %s, kwargs=%r>" % (status, self.caller_kwargs)
def _legacymulticall(hook_impls, caller_kwargs, firstresult=False):
return _LegacyMultiCall(
hook_impls, caller_kwargs, firstresult=firstresult
).execute()
def _multicall(hook_impls, caller_kwargs, firstresult=False):
"""Execute a call into multiple python functions/methods and return the
result(s).
``caller_kwargs`` comes from _HookCaller.__call__().
"""
__tracebackhide__ = True
results = []
excinfo = None
try: # run impl and wrapper setup functions in a loop
teardowns = []
try:
for hook_impl in reversed(hook_impls):
try:
args = [caller_kwargs[argname] for argname in hook_impl.argnames]
except KeyError:
for argname in hook_impl.argnames:
if argname not in caller_kwargs:
raise HookCallError(
"hook call must provide argument %r" % (argname,)
)
if hook_impl.hookwrapper:
try:
gen = hook_impl.function(*args)
next(gen) # first yield
teardowns.append(gen)
except StopIteration:
_raise_wrapfail(gen, "did not yield")
else:
res = hook_impl.function(*args)
if res is not None:
results.append(res)
if firstresult: # halt further impl calls
break
except BaseException:
excinfo = sys.exc_info()
finally:
if firstresult: # first result hooks return a single value
outcome = _Result(results[0] if results else None, excinfo)
else:
outcome = _Result(results, excinfo)
# run all wrapper post-yield blocks
for gen in reversed(teardowns):
try:
gen.send(outcome)
_raise_wrapfail(gen, "has second yield")
except StopIteration:
pass
return outcome.get_result()
|
pytest-dev/pluggy | pluggy/callers.py | _multicall | python | def _multicall(hook_impls, caller_kwargs, firstresult=False):
__tracebackhide__ = True
results = []
excinfo = None
try: # run impl and wrapper setup functions in a loop
teardowns = []
try:
for hook_impl in reversed(hook_impls):
try:
args = [caller_kwargs[argname] for argname in hook_impl.argnames]
except KeyError:
for argname in hook_impl.argnames:
if argname not in caller_kwargs:
raise HookCallError(
"hook call must provide argument %r" % (argname,)
)
if hook_impl.hookwrapper:
try:
gen = hook_impl.function(*args)
next(gen) # first yield
teardowns.append(gen)
except StopIteration:
_raise_wrapfail(gen, "did not yield")
else:
res = hook_impl.function(*args)
if res is not None:
results.append(res)
if firstresult: # halt further impl calls
break
except BaseException:
excinfo = sys.exc_info()
finally:
if firstresult: # first result hooks return a single value
outcome = _Result(results[0] if results else None, excinfo)
else:
outcome = _Result(results, excinfo)
# run all wrapper post-yield blocks
for gen in reversed(teardowns):
try:
gen.send(outcome)
_raise_wrapfail(gen, "has second yield")
except StopIteration:
pass
return outcome.get_result() | Execute a call into multiple python functions/methods and return the
result(s).
``caller_kwargs`` comes from _HookCaller.__call__(). | train | https://github.com/pytest-dev/pluggy/blob/4de9e440eeadd9f0eb8c5232b349ef64e20e33fb/pluggy/callers.py#L157-L208 | [
"def _raise_wrapfail(wrap_controller, msg):\n co = wrap_controller.gi_code\n raise RuntimeError(\n \"wrap_controller at %r %s:%d %s\"\n % (co.co_name, co.co_filename, co.co_firstlineno, msg)\n )\n",
"def get_result(self):\n \"\"\"Get the result(s) for this hook call.\n\n If the hook w... | """
Call loop machinery
"""
import sys
import warnings
_py3 = sys.version_info > (3, 0)
if not _py3:
exec(
"""
def _reraise(cls, val, tb):
raise cls, val, tb
"""
)
def _raise_wrapfail(wrap_controller, msg):
co = wrap_controller.gi_code
raise RuntimeError(
"wrap_controller at %r %s:%d %s"
% (co.co_name, co.co_filename, co.co_firstlineno, msg)
)
class HookCallError(Exception):
""" Hook was called wrongly. """
class _Result(object):
def __init__(self, result, excinfo):
self._result = result
self._excinfo = excinfo
@property
def excinfo(self):
return self._excinfo
@property
def result(self):
"""Get the result(s) for this hook call (DEPRECATED in favor of ``get_result()``)."""
msg = "Use get_result() which forces correct exception handling"
warnings.warn(DeprecationWarning(msg), stacklevel=2)
return self._result
@classmethod
def from_call(cls, func):
__tracebackhide__ = True
result = excinfo = None
try:
result = func()
except BaseException:
excinfo = sys.exc_info()
return cls(result, excinfo)
def force_result(self, result):
"""Force the result(s) to ``result``.
If the hook was marked as a ``firstresult`` a single value should
be set otherwise set a (modified) list of results. Any exceptions
found during invocation will be deleted.
"""
self._result = result
self._excinfo = None
def get_result(self):
"""Get the result(s) for this hook call.
If the hook was marked as a ``firstresult`` only a single value
will be returned otherwise a list of results.
"""
__tracebackhide__ = True
if self._excinfo is None:
return self._result
else:
ex = self._excinfo
if _py3:
raise ex[1].with_traceback(ex[2])
_reraise(*ex) # noqa
def _wrapped_call(wrap_controller, func):
""" Wrap calling to a function with a generator which needs to yield
exactly once. The yield point will trigger calling the wrapped function
and return its ``_Result`` to the yield point. The generator then needs
to finish (raise StopIteration) in order for the wrapped call to complete.
"""
try:
next(wrap_controller) # first yield
except StopIteration:
_raise_wrapfail(wrap_controller, "did not yield")
call_outcome = _Result.from_call(func)
try:
wrap_controller.send(call_outcome)
_raise_wrapfail(wrap_controller, "has second yield")
except StopIteration:
pass
return call_outcome.get_result()
class _LegacyMultiCall(object):
""" execute a call into multiple python functions/methods. """
# XXX note that the __multicall__ argument is supported only
# for pytest compatibility reasons. It was never officially
# supported there and is explicitely deprecated since 2.8
# so we can remove it soon, allowing to avoid the below recursion
# in execute() and simplify/speed up the execute loop.
def __init__(self, hook_impls, kwargs, firstresult=False):
self.hook_impls = hook_impls
self.caller_kwargs = kwargs # come from _HookCaller.__call__()
self.caller_kwargs["__multicall__"] = self
self.firstresult = firstresult
def execute(self):
caller_kwargs = self.caller_kwargs
self.results = results = []
firstresult = self.firstresult
while self.hook_impls:
hook_impl = self.hook_impls.pop()
try:
args = [caller_kwargs[argname] for argname in hook_impl.argnames]
except KeyError:
for argname in hook_impl.argnames:
if argname not in caller_kwargs:
raise HookCallError(
"hook call must provide argument %r" % (argname,)
)
if hook_impl.hookwrapper:
return _wrapped_call(hook_impl.function(*args), self.execute)
res = hook_impl.function(*args)
if res is not None:
if firstresult:
return res
results.append(res)
if not firstresult:
return results
def __repr__(self):
status = "%d meths" % (len(self.hook_impls),)
if hasattr(self, "results"):
status = ("%d results, " % len(self.results)) + status
return "<_MultiCall %s, kwargs=%r>" % (status, self.caller_kwargs)
def _legacymulticall(hook_impls, caller_kwargs, firstresult=False):
return _LegacyMultiCall(
hook_impls, caller_kwargs, firstresult=firstresult
).execute()
|
pytest-dev/pluggy | pluggy/callers.py | _Result.result | python | def result(self):
msg = "Use get_result() which forces correct exception handling"
warnings.warn(DeprecationWarning(msg), stacklevel=2)
return self._result | Get the result(s) for this hook call (DEPRECATED in favor of ``get_result()``). | train | https://github.com/pytest-dev/pluggy/blob/4de9e440eeadd9f0eb8c5232b349ef64e20e33fb/pluggy/callers.py#L41-L45 | null | class _Result(object):
def __init__(self, result, excinfo):
self._result = result
self._excinfo = excinfo
@property
def excinfo(self):
return self._excinfo
@property
@classmethod
def from_call(cls, func):
__tracebackhide__ = True
result = excinfo = None
try:
result = func()
except BaseException:
excinfo = sys.exc_info()
return cls(result, excinfo)
def force_result(self, result):
"""Force the result(s) to ``result``.
If the hook was marked as a ``firstresult`` a single value should
be set otherwise set a (modified) list of results. Any exceptions
found during invocation will be deleted.
"""
self._result = result
self._excinfo = None
def get_result(self):
"""Get the result(s) for this hook call.
If the hook was marked as a ``firstresult`` only a single value
will be returned otherwise a list of results.
"""
__tracebackhide__ = True
if self._excinfo is None:
return self._result
else:
ex = self._excinfo
if _py3:
raise ex[1].with_traceback(ex[2])
_reraise(*ex) # noqa
|
pytest-dev/pluggy | pluggy/callers.py | _Result.get_result | python | def get_result(self):
__tracebackhide__ = True
if self._excinfo is None:
return self._result
else:
ex = self._excinfo
if _py3:
raise ex[1].with_traceback(ex[2])
_reraise(*ex) | Get the result(s) for this hook call.
If the hook was marked as a ``firstresult`` only a single value
will be returned otherwise a list of results. | train | https://github.com/pytest-dev/pluggy/blob/4de9e440eeadd9f0eb8c5232b349ef64e20e33fb/pluggy/callers.py#L68-L81 | null | class _Result(object):
def __init__(self, result, excinfo):
self._result = result
self._excinfo = excinfo
@property
def excinfo(self):
return self._excinfo
@property
def result(self):
"""Get the result(s) for this hook call (DEPRECATED in favor of ``get_result()``)."""
msg = "Use get_result() which forces correct exception handling"
warnings.warn(DeprecationWarning(msg), stacklevel=2)
return self._result
@classmethod
def from_call(cls, func):
__tracebackhide__ = True
result = excinfo = None
try:
result = func()
except BaseException:
excinfo = sys.exc_info()
return cls(result, excinfo)
def force_result(self, result):
"""Force the result(s) to ``result``.
If the hook was marked as a ``firstresult`` a single value should
be set otherwise set a (modified) list of results. Any exceptions
found during invocation will be deleted.
"""
self._result = result
self._excinfo = None
# noqa
|
pytest-dev/pluggy | pluggy/manager.py | PluginManager.register | python | def register(self, plugin, name=None):
plugin_name = name or self.get_canonical_name(plugin)
if plugin_name in self._name2plugin or plugin in self._plugin2hookcallers:
if self._name2plugin.get(plugin_name, -1) is None:
return # blocked plugin, return None to indicate no registration
raise ValueError(
"Plugin already registered: %s=%s\n%s"
% (plugin_name, plugin, self._name2plugin)
)
# XXX if an error happens we should make sure no state has been
# changed at point of return
self._name2plugin[plugin_name] = plugin
# register matching hook implementations of the plugin
self._plugin2hookcallers[plugin] = hookcallers = []
for name in dir(plugin):
hookimpl_opts = self.parse_hookimpl_opts(plugin, name)
if hookimpl_opts is not None:
normalize_hookimpl_opts(hookimpl_opts)
method = getattr(plugin, name)
hookimpl = HookImpl(plugin, plugin_name, method, hookimpl_opts)
hook = getattr(self.hook, name, None)
if hook is None:
hook = _HookCaller(name, self._hookexec)
setattr(self.hook, name, hook)
elif hook.has_spec():
self._verify_hook(hook, hookimpl)
hook._maybe_apply_history(hookimpl)
hook._add_hookimpl(hookimpl)
hookcallers.append(hook)
return plugin_name | Register a plugin and return its canonical name or None if the name
is blocked from registering. Raise a ValueError if the plugin is already
registered. | train | https://github.com/pytest-dev/pluggy/blob/4de9e440eeadd9f0eb8c5232b349ef64e20e33fb/pluggy/manager.py#L70-L105 | [
"def normalize_hookimpl_opts(opts):\n opts.setdefault(\"tryfirst\", False)\n opts.setdefault(\"trylast\", False)\n opts.setdefault(\"hookwrapper\", False)\n opts.setdefault(\"optionalhook\", False)\n",
"def _add_hookimpl(self, hookimpl):\n \"\"\"Add an implementation to the callback chain.\n \"\... | class PluginManager(object):
""" Core Pluginmanager class which manages registration
of plugin objects and 1:N hook calling.
You can register new hooks by calling ``add_hookspecs(module_or_class)``.
You can register plugin objects (which contain hooks) by calling
``register(plugin)``. The Pluginmanager is initialized with a
prefix that is searched for in the names of the dict of registered
plugin objects.
For debugging purposes you can call ``enable_tracing()``
which will subsequently send debug information to the trace helper.
"""
def __init__(self, project_name, implprefix=None):
"""If ``implprefix`` is given implementation functions
will be recognized if their name matches the implprefix. """
self.project_name = project_name
self._name2plugin = {}
self._plugin2hookcallers = {}
self._plugin_distinfo = []
self.trace = _tracing.TagTracer().get("pluginmanage")
self.hook = _HookRelay(self.trace.root.get("hook"))
if implprefix is not None:
warnings.warn(
"Support for the `implprefix` arg is now deprecated and will "
"be removed in an upcoming release. Please use HookimplMarker.",
DeprecationWarning,
stacklevel=2,
)
self._implprefix = implprefix
self._inner_hookexec = lambda hook, methods, kwargs: hook.multicall(
methods,
kwargs,
firstresult=hook.spec.opts.get("firstresult") if hook.spec else False,
)
def _hookexec(self, hook, methods, kwargs):
# called from all hookcaller instances.
# enable_tracing will set its own wrapping function at self._inner_hookexec
return self._inner_hookexec(hook, methods, kwargs)
def parse_hookimpl_opts(self, plugin, name):
method = getattr(plugin, name)
if not inspect.isroutine(method):
return
try:
res = getattr(method, self.project_name + "_impl", None)
except Exception:
res = {}
if res is not None and not isinstance(res, dict):
# false positive
res = None
# TODO: remove when we drop implprefix in 1.0
elif res is None and self._implprefix and name.startswith(self._implprefix):
_warn_for_function(
DeprecationWarning(
"The `implprefix` system is deprecated please decorate "
"this function using an instance of HookimplMarker."
),
method,
)
res = {}
return res
def unregister(self, plugin=None, name=None):
""" unregister a plugin object and all its contained hook implementations
from internal data structures. """
if name is None:
assert plugin is not None, "one of name or plugin needs to be specified"
name = self.get_name(plugin)
if plugin is None:
plugin = self.get_plugin(name)
# if self._name2plugin[name] == None registration was blocked: ignore
if self._name2plugin.get(name):
del self._name2plugin[name]
for hookcaller in self._plugin2hookcallers.pop(plugin, []):
hookcaller._remove_plugin(plugin)
return plugin
def set_blocked(self, name):
""" block registrations of the given name, unregister if already registered. """
self.unregister(name=name)
self._name2plugin[name] = None
def is_blocked(self, name):
""" return True if the given plugin name is blocked. """
return name in self._name2plugin and self._name2plugin[name] is None
def add_hookspecs(self, module_or_class):
""" add new hook specifications defined in the given module_or_class.
Functions are recognized if they have been decorated accordingly. """
names = []
for name in dir(module_or_class):
spec_opts = self.parse_hookspec_opts(module_or_class, name)
if spec_opts is not None:
hc = getattr(self.hook, name, None)
if hc is None:
hc = _HookCaller(name, self._hookexec, module_or_class, spec_opts)
setattr(self.hook, name, hc)
else:
# plugins registered this hook without knowing the spec
hc.set_specification(module_or_class, spec_opts)
for hookfunction in hc.get_hookimpls():
self._verify_hook(hc, hookfunction)
names.append(name)
if not names:
raise ValueError(
"did not find any %r hooks in %r" % (self.project_name, module_or_class)
)
def parse_hookspec_opts(self, module_or_class, name):
method = getattr(module_or_class, name)
return getattr(method, self.project_name + "_spec", None)
def get_plugins(self):
""" return the set of registered plugins. """
return set(self._plugin2hookcallers)
def is_registered(self, plugin):
""" Return True if the plugin is already registered. """
return plugin in self._plugin2hookcallers
def get_canonical_name(self, plugin):
""" Return canonical name for a plugin object. Note that a plugin
may be registered under a different name which was specified
by the caller of register(plugin, name). To obtain the name
of an registered plugin use ``get_name(plugin)`` instead."""
return getattr(plugin, "__name__", None) or str(id(plugin))
def get_plugin(self, name):
""" Return a plugin or None for the given name. """
return self._name2plugin.get(name)
def has_plugin(self, name):
""" Return True if a plugin with the given name is registered. """
return self.get_plugin(name) is not None
def get_name(self, plugin):
""" Return name for registered plugin or None if not registered. """
for name, val in self._name2plugin.items():
if plugin == val:
return name
def _verify_hook(self, hook, hookimpl):
if hook.is_historic() and hookimpl.hookwrapper:
raise PluginValidationError(
hookimpl.plugin,
"Plugin %r\nhook %r\nhistoric incompatible to hookwrapper"
% (hookimpl.plugin_name, hook.name),
)
if hook.spec.warn_on_impl:
_warn_for_function(hook.spec.warn_on_impl, hookimpl.function)
# positional arg checking
notinspec = set(hookimpl.argnames) - set(hook.spec.argnames)
if notinspec:
raise PluginValidationError(
hookimpl.plugin,
"Plugin %r for hook %r\nhookimpl definition: %s\n"
"Argument(s) %s are declared in the hookimpl but "
"can not be found in the hookspec"
% (
hookimpl.plugin_name,
hook.name,
_formatdef(hookimpl.function),
notinspec,
),
)
def check_pending(self):
""" Verify that all hooks which have not been verified against
a hook specification are optional, otherwise raise PluginValidationError"""
for name in self.hook.__dict__:
if name[0] != "_":
hook = getattr(self.hook, name)
if not hook.has_spec():
for hookimpl in hook.get_hookimpls():
if not hookimpl.optionalhook:
raise PluginValidationError(
hookimpl.plugin,
"unknown hook %r in plugin %r"
% (name, hookimpl.plugin),
)
def load_setuptools_entrypoints(self, group, name=None):
""" Load modules from querying the specified setuptools ``group``.
:param str group: entry point group to load plugins
:param str name: if given, loads only plugins with the given ``name``.
:rtype: int
:return: return the number of loaded plugins by this call.
"""
from pkg_resources import (
iter_entry_points,
DistributionNotFound,
VersionConflict,
)
count = 0
for ep in iter_entry_points(group, name=name):
# is the plugin registered or blocked?
if self.get_plugin(ep.name) or self.is_blocked(ep.name):
continue
try:
plugin = ep.load()
except DistributionNotFound:
continue
except VersionConflict as e:
raise PluginValidationError(
plugin=None,
message="Plugin %r could not be loaded: %s!" % (ep.name, e),
)
self.register(plugin, name=ep.name)
self._plugin_distinfo.append((plugin, ep.dist))
count += 1
return count
def list_plugin_distinfo(self):
""" return list of distinfo/plugin tuples for all setuptools registered
plugins. """
return list(self._plugin_distinfo)
def list_name_plugin(self):
""" return list of name/plugin pairs. """
return list(self._name2plugin.items())
def get_hookcallers(self, plugin):
""" get all hook callers for the specified plugin. """
return self._plugin2hookcallers.get(plugin)
def add_hookcall_monitoring(self, before, after):
""" add before/after tracing functions for all hooks
and return an undo function which, when called,
will remove the added tracers.
``before(hook_name, hook_impls, kwargs)`` will be called ahead
of all hook calls and receive a hookcaller instance, a list
of HookImpl instances and the keyword arguments for the hook call.
``after(outcome, hook_name, hook_impls, kwargs)`` receives the
same arguments as ``before`` but also a :py:class:`_Result`` object
which represents the result of the overall hook call.
"""
return _tracing._TracedHookExecution(self, before, after).undo
def enable_tracing(self):
""" enable tracing of hook calls and return an undo function. """
hooktrace = self.hook._trace
def before(hook_name, methods, kwargs):
hooktrace.root.indent += 1
hooktrace(hook_name, kwargs)
def after(outcome, hook_name, methods, kwargs):
if outcome.excinfo is None:
hooktrace("finish", hook_name, "-->", outcome.get_result())
hooktrace.root.indent -= 1
return self.add_hookcall_monitoring(before, after)
def subset_hook_caller(self, name, remove_plugins):
""" Return a new _HookCaller instance for the named method
which manages calls to all registered plugins except the
ones from remove_plugins. """
orig = getattr(self.hook, name)
plugins_to_remove = [plug for plug in remove_plugins if hasattr(plug, name)]
if plugins_to_remove:
hc = _HookCaller(
orig.name, orig._hookexec, orig.spec.namespace, orig.spec.opts
)
for hookimpl in orig.get_hookimpls():
plugin = hookimpl.plugin
if plugin not in plugins_to_remove:
hc._add_hookimpl(hookimpl)
# we also keep track of this hook caller so it
# gets properly removed on plugin unregistration
self._plugin2hookcallers.setdefault(plugin, []).append(hc)
return hc
return orig
|
pytest-dev/pluggy | pluggy/manager.py | PluginManager.unregister | python | def unregister(self, plugin=None, name=None):
if name is None:
assert plugin is not None, "one of name or plugin needs to be specified"
name = self.get_name(plugin)
if plugin is None:
plugin = self.get_plugin(name)
# if self._name2plugin[name] == None registration was blocked: ignore
if self._name2plugin.get(name):
del self._name2plugin[name]
for hookcaller in self._plugin2hookcallers.pop(plugin, []):
hookcaller._remove_plugin(plugin)
return plugin | unregister a plugin object and all its contained hook implementations
from internal data structures. | train | https://github.com/pytest-dev/pluggy/blob/4de9e440eeadd9f0eb8c5232b349ef64e20e33fb/pluggy/manager.py#L130-L147 | [
"def get_plugin(self, name):\n \"\"\" Return a plugin or None for the given name. \"\"\"\n return self._name2plugin.get(name)\n",
"def get_name(self, plugin):\n \"\"\" Return name for registered plugin or None if not registered. \"\"\"\n for name, val in self._name2plugin.items():\n if plugin =... | class PluginManager(object):
""" Core Pluginmanager class which manages registration
of plugin objects and 1:N hook calling.
You can register new hooks by calling ``add_hookspecs(module_or_class)``.
You can register plugin objects (which contain hooks) by calling
``register(plugin)``. The Pluginmanager is initialized with a
prefix that is searched for in the names of the dict of registered
plugin objects.
For debugging purposes you can call ``enable_tracing()``
which will subsequently send debug information to the trace helper.
"""
def __init__(self, project_name, implprefix=None):
"""If ``implprefix`` is given implementation functions
will be recognized if their name matches the implprefix. """
self.project_name = project_name
self._name2plugin = {}
self._plugin2hookcallers = {}
self._plugin_distinfo = []
self.trace = _tracing.TagTracer().get("pluginmanage")
self.hook = _HookRelay(self.trace.root.get("hook"))
if implprefix is not None:
warnings.warn(
"Support for the `implprefix` arg is now deprecated and will "
"be removed in an upcoming release. Please use HookimplMarker.",
DeprecationWarning,
stacklevel=2,
)
self._implprefix = implprefix
self._inner_hookexec = lambda hook, methods, kwargs: hook.multicall(
methods,
kwargs,
firstresult=hook.spec.opts.get("firstresult") if hook.spec else False,
)
def _hookexec(self, hook, methods, kwargs):
# called from all hookcaller instances.
# enable_tracing will set its own wrapping function at self._inner_hookexec
return self._inner_hookexec(hook, methods, kwargs)
def register(self, plugin, name=None):
""" Register a plugin and return its canonical name or None if the name
is blocked from registering. Raise a ValueError if the plugin is already
registered. """
plugin_name = name or self.get_canonical_name(plugin)
if plugin_name in self._name2plugin or plugin in self._plugin2hookcallers:
if self._name2plugin.get(plugin_name, -1) is None:
return # blocked plugin, return None to indicate no registration
raise ValueError(
"Plugin already registered: %s=%s\n%s"
% (plugin_name, plugin, self._name2plugin)
)
# XXX if an error happens we should make sure no state has been
# changed at point of return
self._name2plugin[plugin_name] = plugin
# register matching hook implementations of the plugin
self._plugin2hookcallers[plugin] = hookcallers = []
for name in dir(plugin):
hookimpl_opts = self.parse_hookimpl_opts(plugin, name)
if hookimpl_opts is not None:
normalize_hookimpl_opts(hookimpl_opts)
method = getattr(plugin, name)
hookimpl = HookImpl(plugin, plugin_name, method, hookimpl_opts)
hook = getattr(self.hook, name, None)
if hook is None:
hook = _HookCaller(name, self._hookexec)
setattr(self.hook, name, hook)
elif hook.has_spec():
self._verify_hook(hook, hookimpl)
hook._maybe_apply_history(hookimpl)
hook._add_hookimpl(hookimpl)
hookcallers.append(hook)
return plugin_name
def parse_hookimpl_opts(self, plugin, name):
method = getattr(plugin, name)
if not inspect.isroutine(method):
return
try:
res = getattr(method, self.project_name + "_impl", None)
except Exception:
res = {}
if res is not None and not isinstance(res, dict):
# false positive
res = None
# TODO: remove when we drop implprefix in 1.0
elif res is None and self._implprefix and name.startswith(self._implprefix):
_warn_for_function(
DeprecationWarning(
"The `implprefix` system is deprecated please decorate "
"this function using an instance of HookimplMarker."
),
method,
)
res = {}
return res
def set_blocked(self, name):
""" block registrations of the given name, unregister if already registered. """
self.unregister(name=name)
self._name2plugin[name] = None
def is_blocked(self, name):
""" return True if the given plugin name is blocked. """
return name in self._name2plugin and self._name2plugin[name] is None
def add_hookspecs(self, module_or_class):
""" add new hook specifications defined in the given module_or_class.
Functions are recognized if they have been decorated accordingly. """
names = []
for name in dir(module_or_class):
spec_opts = self.parse_hookspec_opts(module_or_class, name)
if spec_opts is not None:
hc = getattr(self.hook, name, None)
if hc is None:
hc = _HookCaller(name, self._hookexec, module_or_class, spec_opts)
setattr(self.hook, name, hc)
else:
# plugins registered this hook without knowing the spec
hc.set_specification(module_or_class, spec_opts)
for hookfunction in hc.get_hookimpls():
self._verify_hook(hc, hookfunction)
names.append(name)
if not names:
raise ValueError(
"did not find any %r hooks in %r" % (self.project_name, module_or_class)
)
def parse_hookspec_opts(self, module_or_class, name):
method = getattr(module_or_class, name)
return getattr(method, self.project_name + "_spec", None)
def get_plugins(self):
""" return the set of registered plugins. """
return set(self._plugin2hookcallers)
def is_registered(self, plugin):
""" Return True if the plugin is already registered. """
return plugin in self._plugin2hookcallers
def get_canonical_name(self, plugin):
""" Return canonical name for a plugin object. Note that a plugin
may be registered under a different name which was specified
by the caller of register(plugin, name). To obtain the name
of an registered plugin use ``get_name(plugin)`` instead."""
return getattr(plugin, "__name__", None) or str(id(plugin))
def get_plugin(self, name):
""" Return a plugin or None for the given name. """
return self._name2plugin.get(name)
def has_plugin(self, name):
""" Return True if a plugin with the given name is registered. """
return self.get_plugin(name) is not None
def get_name(self, plugin):
""" Return name for registered plugin or None if not registered. """
for name, val in self._name2plugin.items():
if plugin == val:
return name
def _verify_hook(self, hook, hookimpl):
if hook.is_historic() and hookimpl.hookwrapper:
raise PluginValidationError(
hookimpl.plugin,
"Plugin %r\nhook %r\nhistoric incompatible to hookwrapper"
% (hookimpl.plugin_name, hook.name),
)
if hook.spec.warn_on_impl:
_warn_for_function(hook.spec.warn_on_impl, hookimpl.function)
# positional arg checking
notinspec = set(hookimpl.argnames) - set(hook.spec.argnames)
if notinspec:
raise PluginValidationError(
hookimpl.plugin,
"Plugin %r for hook %r\nhookimpl definition: %s\n"
"Argument(s) %s are declared in the hookimpl but "
"can not be found in the hookspec"
% (
hookimpl.plugin_name,
hook.name,
_formatdef(hookimpl.function),
notinspec,
),
)
def check_pending(self):
""" Verify that all hooks which have not been verified against
a hook specification are optional, otherwise raise PluginValidationError"""
for name in self.hook.__dict__:
if name[0] != "_":
hook = getattr(self.hook, name)
if not hook.has_spec():
for hookimpl in hook.get_hookimpls():
if not hookimpl.optionalhook:
raise PluginValidationError(
hookimpl.plugin,
"unknown hook %r in plugin %r"
% (name, hookimpl.plugin),
)
def load_setuptools_entrypoints(self, group, name=None):
""" Load modules from querying the specified setuptools ``group``.
:param str group: entry point group to load plugins
:param str name: if given, loads only plugins with the given ``name``.
:rtype: int
:return: return the number of loaded plugins by this call.
"""
from pkg_resources import (
iter_entry_points,
DistributionNotFound,
VersionConflict,
)
count = 0
for ep in iter_entry_points(group, name=name):
# is the plugin registered or blocked?
if self.get_plugin(ep.name) or self.is_blocked(ep.name):
continue
try:
plugin = ep.load()
except DistributionNotFound:
continue
except VersionConflict as e:
raise PluginValidationError(
plugin=None,
message="Plugin %r could not be loaded: %s!" % (ep.name, e),
)
self.register(plugin, name=ep.name)
self._plugin_distinfo.append((plugin, ep.dist))
count += 1
return count
def list_plugin_distinfo(self):
""" return list of distinfo/plugin tuples for all setuptools registered
plugins. """
return list(self._plugin_distinfo)
def list_name_plugin(self):
""" return list of name/plugin pairs. """
return list(self._name2plugin.items())
def get_hookcallers(self, plugin):
""" get all hook callers for the specified plugin. """
return self._plugin2hookcallers.get(plugin)
def add_hookcall_monitoring(self, before, after):
""" add before/after tracing functions for all hooks
and return an undo function which, when called,
will remove the added tracers.
``before(hook_name, hook_impls, kwargs)`` will be called ahead
of all hook calls and receive a hookcaller instance, a list
of HookImpl instances and the keyword arguments for the hook call.
``after(outcome, hook_name, hook_impls, kwargs)`` receives the
same arguments as ``before`` but also a :py:class:`_Result`` object
which represents the result of the overall hook call.
"""
return _tracing._TracedHookExecution(self, before, after).undo
def enable_tracing(self):
""" enable tracing of hook calls and return an undo function. """
hooktrace = self.hook._trace
def before(hook_name, methods, kwargs):
hooktrace.root.indent += 1
hooktrace(hook_name, kwargs)
def after(outcome, hook_name, methods, kwargs):
if outcome.excinfo is None:
hooktrace("finish", hook_name, "-->", outcome.get_result())
hooktrace.root.indent -= 1
return self.add_hookcall_monitoring(before, after)
def subset_hook_caller(self, name, remove_plugins):
""" Return a new _HookCaller instance for the named method
which manages calls to all registered plugins except the
ones from remove_plugins. """
orig = getattr(self.hook, name)
plugins_to_remove = [plug for plug in remove_plugins if hasattr(plug, name)]
if plugins_to_remove:
hc = _HookCaller(
orig.name, orig._hookexec, orig.spec.namespace, orig.spec.opts
)
for hookimpl in orig.get_hookimpls():
plugin = hookimpl.plugin
if plugin not in plugins_to_remove:
hc._add_hookimpl(hookimpl)
# we also keep track of this hook caller so it
# gets properly removed on plugin unregistration
self._plugin2hookcallers.setdefault(plugin, []).append(hc)
return hc
return orig
|
pytest-dev/pluggy | pluggy/manager.py | PluginManager.set_blocked | python | def set_blocked(self, name):
self.unregister(name=name)
self._name2plugin[name] = None | block registrations of the given name, unregister if already registered. | train | https://github.com/pytest-dev/pluggy/blob/4de9e440eeadd9f0eb8c5232b349ef64e20e33fb/pluggy/manager.py#L149-L152 | [
"def unregister(self, plugin=None, name=None):\n \"\"\" unregister a plugin object and all its contained hook implementations\n from internal data structures. \"\"\"\n if name is None:\n assert plugin is not None, \"one of name or plugin needs to be specified\"\n name = self.get_name(plugin)\... | class PluginManager(object):
""" Core Pluginmanager class which manages registration
of plugin objects and 1:N hook calling.
You can register new hooks by calling ``add_hookspecs(module_or_class)``.
You can register plugin objects (which contain hooks) by calling
``register(plugin)``. The Pluginmanager is initialized with a
prefix that is searched for in the names of the dict of registered
plugin objects.
For debugging purposes you can call ``enable_tracing()``
which will subsequently send debug information to the trace helper.
"""
def __init__(self, project_name, implprefix=None):
"""If ``implprefix`` is given implementation functions
will be recognized if their name matches the implprefix. """
self.project_name = project_name
self._name2plugin = {}
self._plugin2hookcallers = {}
self._plugin_distinfo = []
self.trace = _tracing.TagTracer().get("pluginmanage")
self.hook = _HookRelay(self.trace.root.get("hook"))
if implprefix is not None:
warnings.warn(
"Support for the `implprefix` arg is now deprecated and will "
"be removed in an upcoming release. Please use HookimplMarker.",
DeprecationWarning,
stacklevel=2,
)
self._implprefix = implprefix
self._inner_hookexec = lambda hook, methods, kwargs: hook.multicall(
methods,
kwargs,
firstresult=hook.spec.opts.get("firstresult") if hook.spec else False,
)
def _hookexec(self, hook, methods, kwargs):
# called from all hookcaller instances.
# enable_tracing will set its own wrapping function at self._inner_hookexec
return self._inner_hookexec(hook, methods, kwargs)
def register(self, plugin, name=None):
""" Register a plugin and return its canonical name or None if the name
is blocked from registering. Raise a ValueError if the plugin is already
registered. """
plugin_name = name or self.get_canonical_name(plugin)
if plugin_name in self._name2plugin or plugin in self._plugin2hookcallers:
if self._name2plugin.get(plugin_name, -1) is None:
return # blocked plugin, return None to indicate no registration
raise ValueError(
"Plugin already registered: %s=%s\n%s"
% (plugin_name, plugin, self._name2plugin)
)
# XXX if an error happens we should make sure no state has been
# changed at point of return
self._name2plugin[plugin_name] = plugin
# register matching hook implementations of the plugin
self._plugin2hookcallers[plugin] = hookcallers = []
for name in dir(plugin):
hookimpl_opts = self.parse_hookimpl_opts(plugin, name)
if hookimpl_opts is not None:
normalize_hookimpl_opts(hookimpl_opts)
method = getattr(plugin, name)
hookimpl = HookImpl(plugin, plugin_name, method, hookimpl_opts)
hook = getattr(self.hook, name, None)
if hook is None:
hook = _HookCaller(name, self._hookexec)
setattr(self.hook, name, hook)
elif hook.has_spec():
self._verify_hook(hook, hookimpl)
hook._maybe_apply_history(hookimpl)
hook._add_hookimpl(hookimpl)
hookcallers.append(hook)
return plugin_name
def parse_hookimpl_opts(self, plugin, name):
method = getattr(plugin, name)
if not inspect.isroutine(method):
return
try:
res = getattr(method, self.project_name + "_impl", None)
except Exception:
res = {}
if res is not None and not isinstance(res, dict):
# false positive
res = None
# TODO: remove when we drop implprefix in 1.0
elif res is None and self._implprefix and name.startswith(self._implprefix):
_warn_for_function(
DeprecationWarning(
"The `implprefix` system is deprecated please decorate "
"this function using an instance of HookimplMarker."
),
method,
)
res = {}
return res
def unregister(self, plugin=None, name=None):
""" unregister a plugin object and all its contained hook implementations
from internal data structures. """
if name is None:
assert plugin is not None, "one of name or plugin needs to be specified"
name = self.get_name(plugin)
if plugin is None:
plugin = self.get_plugin(name)
# if self._name2plugin[name] == None registration was blocked: ignore
if self._name2plugin.get(name):
del self._name2plugin[name]
for hookcaller in self._plugin2hookcallers.pop(plugin, []):
hookcaller._remove_plugin(plugin)
return plugin
def is_blocked(self, name):
""" return True if the given plugin name is blocked. """
return name in self._name2plugin and self._name2plugin[name] is None
def add_hookspecs(self, module_or_class):
""" add new hook specifications defined in the given module_or_class.
Functions are recognized if they have been decorated accordingly. """
names = []
for name in dir(module_or_class):
spec_opts = self.parse_hookspec_opts(module_or_class, name)
if spec_opts is not None:
hc = getattr(self.hook, name, None)
if hc is None:
hc = _HookCaller(name, self._hookexec, module_or_class, spec_opts)
setattr(self.hook, name, hc)
else:
# plugins registered this hook without knowing the spec
hc.set_specification(module_or_class, spec_opts)
for hookfunction in hc.get_hookimpls():
self._verify_hook(hc, hookfunction)
names.append(name)
if not names:
raise ValueError(
"did not find any %r hooks in %r" % (self.project_name, module_or_class)
)
def parse_hookspec_opts(self, module_or_class, name):
method = getattr(module_or_class, name)
return getattr(method, self.project_name + "_spec", None)
def get_plugins(self):
""" return the set of registered plugins. """
return set(self._plugin2hookcallers)
def is_registered(self, plugin):
""" Return True if the plugin is already registered. """
return plugin in self._plugin2hookcallers
def get_canonical_name(self, plugin):
""" Return canonical name for a plugin object. Note that a plugin
may be registered under a different name which was specified
by the caller of register(plugin, name). To obtain the name
of an registered plugin use ``get_name(plugin)`` instead."""
return getattr(plugin, "__name__", None) or str(id(plugin))
def get_plugin(self, name):
""" Return a plugin or None for the given name. """
return self._name2plugin.get(name)
def has_plugin(self, name):
""" Return True if a plugin with the given name is registered. """
return self.get_plugin(name) is not None
def get_name(self, plugin):
""" Return name for registered plugin or None if not registered. """
for name, val in self._name2plugin.items():
if plugin == val:
return name
def _verify_hook(self, hook, hookimpl):
if hook.is_historic() and hookimpl.hookwrapper:
raise PluginValidationError(
hookimpl.plugin,
"Plugin %r\nhook %r\nhistoric incompatible to hookwrapper"
% (hookimpl.plugin_name, hook.name),
)
if hook.spec.warn_on_impl:
_warn_for_function(hook.spec.warn_on_impl, hookimpl.function)
# positional arg checking
notinspec = set(hookimpl.argnames) - set(hook.spec.argnames)
if notinspec:
raise PluginValidationError(
hookimpl.plugin,
"Plugin %r for hook %r\nhookimpl definition: %s\n"
"Argument(s) %s are declared in the hookimpl but "
"can not be found in the hookspec"
% (
hookimpl.plugin_name,
hook.name,
_formatdef(hookimpl.function),
notinspec,
),
)
def check_pending(self):
""" Verify that all hooks which have not been verified against
a hook specification are optional, otherwise raise PluginValidationError"""
for name in self.hook.__dict__:
if name[0] != "_":
hook = getattr(self.hook, name)
if not hook.has_spec():
for hookimpl in hook.get_hookimpls():
if not hookimpl.optionalhook:
raise PluginValidationError(
hookimpl.plugin,
"unknown hook %r in plugin %r"
% (name, hookimpl.plugin),
)
def load_setuptools_entrypoints(self, group, name=None):
""" Load modules from querying the specified setuptools ``group``.
:param str group: entry point group to load plugins
:param str name: if given, loads only plugins with the given ``name``.
:rtype: int
:return: return the number of loaded plugins by this call.
"""
from pkg_resources import (
iter_entry_points,
DistributionNotFound,
VersionConflict,
)
count = 0
for ep in iter_entry_points(group, name=name):
# is the plugin registered or blocked?
if self.get_plugin(ep.name) or self.is_blocked(ep.name):
continue
try:
plugin = ep.load()
except DistributionNotFound:
continue
except VersionConflict as e:
raise PluginValidationError(
plugin=None,
message="Plugin %r could not be loaded: %s!" % (ep.name, e),
)
self.register(plugin, name=ep.name)
self._plugin_distinfo.append((plugin, ep.dist))
count += 1
return count
def list_plugin_distinfo(self):
""" return list of distinfo/plugin tuples for all setuptools registered
plugins. """
return list(self._plugin_distinfo)
def list_name_plugin(self):
""" return list of name/plugin pairs. """
return list(self._name2plugin.items())
def get_hookcallers(self, plugin):
""" get all hook callers for the specified plugin. """
return self._plugin2hookcallers.get(plugin)
def add_hookcall_monitoring(self, before, after):
""" add before/after tracing functions for all hooks
and return an undo function which, when called,
will remove the added tracers.
``before(hook_name, hook_impls, kwargs)`` will be called ahead
of all hook calls and receive a hookcaller instance, a list
of HookImpl instances and the keyword arguments for the hook call.
``after(outcome, hook_name, hook_impls, kwargs)`` receives the
same arguments as ``before`` but also a :py:class:`_Result`` object
which represents the result of the overall hook call.
"""
return _tracing._TracedHookExecution(self, before, after).undo
def enable_tracing(self):
""" enable tracing of hook calls and return an undo function. """
hooktrace = self.hook._trace
def before(hook_name, methods, kwargs):
hooktrace.root.indent += 1
hooktrace(hook_name, kwargs)
def after(outcome, hook_name, methods, kwargs):
if outcome.excinfo is None:
hooktrace("finish", hook_name, "-->", outcome.get_result())
hooktrace.root.indent -= 1
return self.add_hookcall_monitoring(before, after)
def subset_hook_caller(self, name, remove_plugins):
""" Return a new _HookCaller instance for the named method
which manages calls to all registered plugins except the
ones from remove_plugins. """
orig = getattr(self.hook, name)
plugins_to_remove = [plug for plug in remove_plugins if hasattr(plug, name)]
if plugins_to_remove:
hc = _HookCaller(
orig.name, orig._hookexec, orig.spec.namespace, orig.spec.opts
)
for hookimpl in orig.get_hookimpls():
plugin = hookimpl.plugin
if plugin not in plugins_to_remove:
hc._add_hookimpl(hookimpl)
# we also keep track of this hook caller so it
# gets properly removed on plugin unregistration
self._plugin2hookcallers.setdefault(plugin, []).append(hc)
return hc
return orig
|
pytest-dev/pluggy | pluggy/manager.py | PluginManager.add_hookspecs | python | def add_hookspecs(self, module_or_class):
names = []
for name in dir(module_or_class):
spec_opts = self.parse_hookspec_opts(module_or_class, name)
if spec_opts is not None:
hc = getattr(self.hook, name, None)
if hc is None:
hc = _HookCaller(name, self._hookexec, module_or_class, spec_opts)
setattr(self.hook, name, hc)
else:
# plugins registered this hook without knowing the spec
hc.set_specification(module_or_class, spec_opts)
for hookfunction in hc.get_hookimpls():
self._verify_hook(hc, hookfunction)
names.append(name)
if not names:
raise ValueError(
"did not find any %r hooks in %r" % (self.project_name, module_or_class)
) | add new hook specifications defined in the given module_or_class.
Functions are recognized if they have been decorated accordingly. | train | https://github.com/pytest-dev/pluggy/blob/4de9e440eeadd9f0eb8c5232b349ef64e20e33fb/pluggy/manager.py#L158-L179 | [
"def parse_hookspec_opts(self, module_or_class, name):\n method = getattr(module_or_class, name)\n return getattr(method, self.project_name + \"_spec\", None)\n",
"def _verify_hook(self, hook, hookimpl):\n if hook.is_historic() and hookimpl.hookwrapper:\n raise PluginValidationError(\n ... | class PluginManager(object):
""" Core Pluginmanager class which manages registration
of plugin objects and 1:N hook calling.
You can register new hooks by calling ``add_hookspecs(module_or_class)``.
You can register plugin objects (which contain hooks) by calling
``register(plugin)``. The Pluginmanager is initialized with a
prefix that is searched for in the names of the dict of registered
plugin objects.
For debugging purposes you can call ``enable_tracing()``
which will subsequently send debug information to the trace helper.
"""
def __init__(self, project_name, implprefix=None):
"""If ``implprefix`` is given implementation functions
will be recognized if their name matches the implprefix. """
self.project_name = project_name
self._name2plugin = {}
self._plugin2hookcallers = {}
self._plugin_distinfo = []
self.trace = _tracing.TagTracer().get("pluginmanage")
self.hook = _HookRelay(self.trace.root.get("hook"))
if implprefix is not None:
warnings.warn(
"Support for the `implprefix` arg is now deprecated and will "
"be removed in an upcoming release. Please use HookimplMarker.",
DeprecationWarning,
stacklevel=2,
)
self._implprefix = implprefix
self._inner_hookexec = lambda hook, methods, kwargs: hook.multicall(
methods,
kwargs,
firstresult=hook.spec.opts.get("firstresult") if hook.spec else False,
)
def _hookexec(self, hook, methods, kwargs):
# called from all hookcaller instances.
# enable_tracing will set its own wrapping function at self._inner_hookexec
return self._inner_hookexec(hook, methods, kwargs)
def register(self, plugin, name=None):
""" Register a plugin and return its canonical name or None if the name
is blocked from registering. Raise a ValueError if the plugin is already
registered. """
plugin_name = name or self.get_canonical_name(plugin)
if plugin_name in self._name2plugin or plugin in self._plugin2hookcallers:
if self._name2plugin.get(plugin_name, -1) is None:
return # blocked plugin, return None to indicate no registration
raise ValueError(
"Plugin already registered: %s=%s\n%s"
% (plugin_name, plugin, self._name2plugin)
)
# XXX if an error happens we should make sure no state has been
# changed at point of return
self._name2plugin[plugin_name] = plugin
# register matching hook implementations of the plugin
self._plugin2hookcallers[plugin] = hookcallers = []
for name in dir(plugin):
hookimpl_opts = self.parse_hookimpl_opts(plugin, name)
if hookimpl_opts is not None:
normalize_hookimpl_opts(hookimpl_opts)
method = getattr(plugin, name)
hookimpl = HookImpl(plugin, plugin_name, method, hookimpl_opts)
hook = getattr(self.hook, name, None)
if hook is None:
hook = _HookCaller(name, self._hookexec)
setattr(self.hook, name, hook)
elif hook.has_spec():
self._verify_hook(hook, hookimpl)
hook._maybe_apply_history(hookimpl)
hook._add_hookimpl(hookimpl)
hookcallers.append(hook)
return plugin_name
def parse_hookimpl_opts(self, plugin, name):
method = getattr(plugin, name)
if not inspect.isroutine(method):
return
try:
res = getattr(method, self.project_name + "_impl", None)
except Exception:
res = {}
if res is not None and not isinstance(res, dict):
# false positive
res = None
# TODO: remove when we drop implprefix in 1.0
elif res is None and self._implprefix and name.startswith(self._implprefix):
_warn_for_function(
DeprecationWarning(
"The `implprefix` system is deprecated please decorate "
"this function using an instance of HookimplMarker."
),
method,
)
res = {}
return res
def unregister(self, plugin=None, name=None):
""" unregister a plugin object and all its contained hook implementations
from internal data structures. """
if name is None:
assert plugin is not None, "one of name or plugin needs to be specified"
name = self.get_name(plugin)
if plugin is None:
plugin = self.get_plugin(name)
# if self._name2plugin[name] == None registration was blocked: ignore
if self._name2plugin.get(name):
del self._name2plugin[name]
for hookcaller in self._plugin2hookcallers.pop(plugin, []):
hookcaller._remove_plugin(plugin)
return plugin
def set_blocked(self, name):
""" block registrations of the given name, unregister if already registered. """
self.unregister(name=name)
self._name2plugin[name] = None
def is_blocked(self, name):
""" return True if the given plugin name is blocked. """
return name in self._name2plugin and self._name2plugin[name] is None
def parse_hookspec_opts(self, module_or_class, name):
method = getattr(module_or_class, name)
return getattr(method, self.project_name + "_spec", None)
def get_plugins(self):
""" return the set of registered plugins. """
return set(self._plugin2hookcallers)
def is_registered(self, plugin):
""" Return True if the plugin is already registered. """
return plugin in self._plugin2hookcallers
def get_canonical_name(self, plugin):
""" Return canonical name for a plugin object. Note that a plugin
may be registered under a different name which was specified
by the caller of register(plugin, name). To obtain the name
of an registered plugin use ``get_name(plugin)`` instead."""
return getattr(plugin, "__name__", None) or str(id(plugin))
def get_plugin(self, name):
""" Return a plugin or None for the given name. """
return self._name2plugin.get(name)
def has_plugin(self, name):
""" Return True if a plugin with the given name is registered. """
return self.get_plugin(name) is not None
def get_name(self, plugin):
""" Return name for registered plugin or None if not registered. """
for name, val in self._name2plugin.items():
if plugin == val:
return name
def _verify_hook(self, hook, hookimpl):
if hook.is_historic() and hookimpl.hookwrapper:
raise PluginValidationError(
hookimpl.plugin,
"Plugin %r\nhook %r\nhistoric incompatible to hookwrapper"
% (hookimpl.plugin_name, hook.name),
)
if hook.spec.warn_on_impl:
_warn_for_function(hook.spec.warn_on_impl, hookimpl.function)
# positional arg checking
notinspec = set(hookimpl.argnames) - set(hook.spec.argnames)
if notinspec:
raise PluginValidationError(
hookimpl.plugin,
"Plugin %r for hook %r\nhookimpl definition: %s\n"
"Argument(s) %s are declared in the hookimpl but "
"can not be found in the hookspec"
% (
hookimpl.plugin_name,
hook.name,
_formatdef(hookimpl.function),
notinspec,
),
)
def check_pending(self):
""" Verify that all hooks which have not been verified against
a hook specification are optional, otherwise raise PluginValidationError"""
for name in self.hook.__dict__:
if name[0] != "_":
hook = getattr(self.hook, name)
if not hook.has_spec():
for hookimpl in hook.get_hookimpls():
if not hookimpl.optionalhook:
raise PluginValidationError(
hookimpl.plugin,
"unknown hook %r in plugin %r"
% (name, hookimpl.plugin),
)
def load_setuptools_entrypoints(self, group, name=None):
""" Load modules from querying the specified setuptools ``group``.
:param str group: entry point group to load plugins
:param str name: if given, loads only plugins with the given ``name``.
:rtype: int
:return: return the number of loaded plugins by this call.
"""
from pkg_resources import (
iter_entry_points,
DistributionNotFound,
VersionConflict,
)
count = 0
for ep in iter_entry_points(group, name=name):
# is the plugin registered or blocked?
if self.get_plugin(ep.name) or self.is_blocked(ep.name):
continue
try:
plugin = ep.load()
except DistributionNotFound:
continue
except VersionConflict as e:
raise PluginValidationError(
plugin=None,
message="Plugin %r could not be loaded: %s!" % (ep.name, e),
)
self.register(plugin, name=ep.name)
self._plugin_distinfo.append((plugin, ep.dist))
count += 1
return count
def list_plugin_distinfo(self):
""" return list of distinfo/plugin tuples for all setuptools registered
plugins. """
return list(self._plugin_distinfo)
def list_name_plugin(self):
""" return list of name/plugin pairs. """
return list(self._name2plugin.items())
def get_hookcallers(self, plugin):
""" get all hook callers for the specified plugin. """
return self._plugin2hookcallers.get(plugin)
def add_hookcall_monitoring(self, before, after):
""" add before/after tracing functions for all hooks
and return an undo function which, when called,
will remove the added tracers.
``before(hook_name, hook_impls, kwargs)`` will be called ahead
of all hook calls and receive a hookcaller instance, a list
of HookImpl instances and the keyword arguments for the hook call.
``after(outcome, hook_name, hook_impls, kwargs)`` receives the
same arguments as ``before`` but also a :py:class:`_Result`` object
which represents the result of the overall hook call.
"""
return _tracing._TracedHookExecution(self, before, after).undo
def enable_tracing(self):
""" enable tracing of hook calls and return an undo function. """
hooktrace = self.hook._trace
def before(hook_name, methods, kwargs):
hooktrace.root.indent += 1
hooktrace(hook_name, kwargs)
def after(outcome, hook_name, methods, kwargs):
if outcome.excinfo is None:
hooktrace("finish", hook_name, "-->", outcome.get_result())
hooktrace.root.indent -= 1
return self.add_hookcall_monitoring(before, after)
def subset_hook_caller(self, name, remove_plugins):
""" Return a new _HookCaller instance for the named method
which manages calls to all registered plugins except the
ones from remove_plugins. """
orig = getattr(self.hook, name)
plugins_to_remove = [plug for plug in remove_plugins if hasattr(plug, name)]
if plugins_to_remove:
hc = _HookCaller(
orig.name, orig._hookexec, orig.spec.namespace, orig.spec.opts
)
for hookimpl in orig.get_hookimpls():
plugin = hookimpl.plugin
if plugin not in plugins_to_remove:
hc._add_hookimpl(hookimpl)
# we also keep track of this hook caller so it
# gets properly removed on plugin unregistration
self._plugin2hookcallers.setdefault(plugin, []).append(hc)
return hc
return orig
|
pytest-dev/pluggy | pluggy/manager.py | PluginManager.get_name | python | def get_name(self, plugin):
for name, val in self._name2plugin.items():
if plugin == val:
return name | Return name for registered plugin or None if not registered. | train | https://github.com/pytest-dev/pluggy/blob/4de9e440eeadd9f0eb8c5232b349ef64e20e33fb/pluggy/manager.py#L208-L212 | null | class PluginManager(object):
""" Core Pluginmanager class which manages registration
of plugin objects and 1:N hook calling.
You can register new hooks by calling ``add_hookspecs(module_or_class)``.
You can register plugin objects (which contain hooks) by calling
``register(plugin)``. The Pluginmanager is initialized with a
prefix that is searched for in the names of the dict of registered
plugin objects.
For debugging purposes you can call ``enable_tracing()``
which will subsequently send debug information to the trace helper.
"""
def __init__(self, project_name, implprefix=None):
"""If ``implprefix`` is given implementation functions
will be recognized if their name matches the implprefix. """
self.project_name = project_name
self._name2plugin = {}
self._plugin2hookcallers = {}
self._plugin_distinfo = []
self.trace = _tracing.TagTracer().get("pluginmanage")
self.hook = _HookRelay(self.trace.root.get("hook"))
if implprefix is not None:
warnings.warn(
"Support for the `implprefix` arg is now deprecated and will "
"be removed in an upcoming release. Please use HookimplMarker.",
DeprecationWarning,
stacklevel=2,
)
self._implprefix = implprefix
self._inner_hookexec = lambda hook, methods, kwargs: hook.multicall(
methods,
kwargs,
firstresult=hook.spec.opts.get("firstresult") if hook.spec else False,
)
def _hookexec(self, hook, methods, kwargs):
# called from all hookcaller instances.
# enable_tracing will set its own wrapping function at self._inner_hookexec
return self._inner_hookexec(hook, methods, kwargs)
def register(self, plugin, name=None):
""" Register a plugin and return its canonical name or None if the name
is blocked from registering. Raise a ValueError if the plugin is already
registered. """
plugin_name = name or self.get_canonical_name(plugin)
if plugin_name in self._name2plugin or plugin in self._plugin2hookcallers:
if self._name2plugin.get(plugin_name, -1) is None:
return # blocked plugin, return None to indicate no registration
raise ValueError(
"Plugin already registered: %s=%s\n%s"
% (plugin_name, plugin, self._name2plugin)
)
# XXX if an error happens we should make sure no state has been
# changed at point of return
self._name2plugin[plugin_name] = plugin
# register matching hook implementations of the plugin
self._plugin2hookcallers[plugin] = hookcallers = []
for name in dir(plugin):
hookimpl_opts = self.parse_hookimpl_opts(plugin, name)
if hookimpl_opts is not None:
normalize_hookimpl_opts(hookimpl_opts)
method = getattr(plugin, name)
hookimpl = HookImpl(plugin, plugin_name, method, hookimpl_opts)
hook = getattr(self.hook, name, None)
if hook is None:
hook = _HookCaller(name, self._hookexec)
setattr(self.hook, name, hook)
elif hook.has_spec():
self._verify_hook(hook, hookimpl)
hook._maybe_apply_history(hookimpl)
hook._add_hookimpl(hookimpl)
hookcallers.append(hook)
return plugin_name
def parse_hookimpl_opts(self, plugin, name):
method = getattr(plugin, name)
if not inspect.isroutine(method):
return
try:
res = getattr(method, self.project_name + "_impl", None)
except Exception:
res = {}
if res is not None and not isinstance(res, dict):
# false positive
res = None
# TODO: remove when we drop implprefix in 1.0
elif res is None and self._implprefix and name.startswith(self._implprefix):
_warn_for_function(
DeprecationWarning(
"The `implprefix` system is deprecated please decorate "
"this function using an instance of HookimplMarker."
),
method,
)
res = {}
return res
def unregister(self, plugin=None, name=None):
""" unregister a plugin object and all its contained hook implementations
from internal data structures. """
if name is None:
assert plugin is not None, "one of name or plugin needs to be specified"
name = self.get_name(plugin)
if plugin is None:
plugin = self.get_plugin(name)
# if self._name2plugin[name] == None registration was blocked: ignore
if self._name2plugin.get(name):
del self._name2plugin[name]
for hookcaller in self._plugin2hookcallers.pop(plugin, []):
hookcaller._remove_plugin(plugin)
return plugin
def set_blocked(self, name):
""" block registrations of the given name, unregister if already registered. """
self.unregister(name=name)
self._name2plugin[name] = None
def is_blocked(self, name):
""" return True if the given plugin name is blocked. """
return name in self._name2plugin and self._name2plugin[name] is None
def add_hookspecs(self, module_or_class):
""" add new hook specifications defined in the given module_or_class.
Functions are recognized if they have been decorated accordingly. """
names = []
for name in dir(module_or_class):
spec_opts = self.parse_hookspec_opts(module_or_class, name)
if spec_opts is not None:
hc = getattr(self.hook, name, None)
if hc is None:
hc = _HookCaller(name, self._hookexec, module_or_class, spec_opts)
setattr(self.hook, name, hc)
else:
# plugins registered this hook without knowing the spec
hc.set_specification(module_or_class, spec_opts)
for hookfunction in hc.get_hookimpls():
self._verify_hook(hc, hookfunction)
names.append(name)
if not names:
raise ValueError(
"did not find any %r hooks in %r" % (self.project_name, module_or_class)
)
def parse_hookspec_opts(self, module_or_class, name):
method = getattr(module_or_class, name)
return getattr(method, self.project_name + "_spec", None)
def get_plugins(self):
""" return the set of registered plugins. """
return set(self._plugin2hookcallers)
def is_registered(self, plugin):
""" Return True if the plugin is already registered. """
return plugin in self._plugin2hookcallers
def get_canonical_name(self, plugin):
""" Return canonical name for a plugin object. Note that a plugin
may be registered under a different name which was specified
by the caller of register(plugin, name). To obtain the name
of an registered plugin use ``get_name(plugin)`` instead."""
return getattr(plugin, "__name__", None) or str(id(plugin))
def get_plugin(self, name):
""" Return a plugin or None for the given name. """
return self._name2plugin.get(name)
def has_plugin(self, name):
""" Return True if a plugin with the given name is registered. """
return self.get_plugin(name) is not None
def _verify_hook(self, hook, hookimpl):
if hook.is_historic() and hookimpl.hookwrapper:
raise PluginValidationError(
hookimpl.plugin,
"Plugin %r\nhook %r\nhistoric incompatible to hookwrapper"
% (hookimpl.plugin_name, hook.name),
)
if hook.spec.warn_on_impl:
_warn_for_function(hook.spec.warn_on_impl, hookimpl.function)
# positional arg checking
notinspec = set(hookimpl.argnames) - set(hook.spec.argnames)
if notinspec:
raise PluginValidationError(
hookimpl.plugin,
"Plugin %r for hook %r\nhookimpl definition: %s\n"
"Argument(s) %s are declared in the hookimpl but "
"can not be found in the hookspec"
% (
hookimpl.plugin_name,
hook.name,
_formatdef(hookimpl.function),
notinspec,
),
)
def check_pending(self):
""" Verify that all hooks which have not been verified against
a hook specification are optional, otherwise raise PluginValidationError"""
for name in self.hook.__dict__:
if name[0] != "_":
hook = getattr(self.hook, name)
if not hook.has_spec():
for hookimpl in hook.get_hookimpls():
if not hookimpl.optionalhook:
raise PluginValidationError(
hookimpl.plugin,
"unknown hook %r in plugin %r"
% (name, hookimpl.plugin),
)
def load_setuptools_entrypoints(self, group, name=None):
""" Load modules from querying the specified setuptools ``group``.
:param str group: entry point group to load plugins
:param str name: if given, loads only plugins with the given ``name``.
:rtype: int
:return: return the number of loaded plugins by this call.
"""
from pkg_resources import (
iter_entry_points,
DistributionNotFound,
VersionConflict,
)
count = 0
for ep in iter_entry_points(group, name=name):
# is the plugin registered or blocked?
if self.get_plugin(ep.name) or self.is_blocked(ep.name):
continue
try:
plugin = ep.load()
except DistributionNotFound:
continue
except VersionConflict as e:
raise PluginValidationError(
plugin=None,
message="Plugin %r could not be loaded: %s!" % (ep.name, e),
)
self.register(plugin, name=ep.name)
self._plugin_distinfo.append((plugin, ep.dist))
count += 1
return count
def list_plugin_distinfo(self):
""" return list of distinfo/plugin tuples for all setuptools registered
plugins. """
return list(self._plugin_distinfo)
def list_name_plugin(self):
""" return list of name/plugin pairs. """
return list(self._name2plugin.items())
def get_hookcallers(self, plugin):
""" get all hook callers for the specified plugin. """
return self._plugin2hookcallers.get(plugin)
def add_hookcall_monitoring(self, before, after):
""" add before/after tracing functions for all hooks
and return an undo function which, when called,
will remove the added tracers.
``before(hook_name, hook_impls, kwargs)`` will be called ahead
of all hook calls and receive a hookcaller instance, a list
of HookImpl instances and the keyword arguments for the hook call.
``after(outcome, hook_name, hook_impls, kwargs)`` receives the
same arguments as ``before`` but also a :py:class:`_Result`` object
which represents the result of the overall hook call.
"""
return _tracing._TracedHookExecution(self, before, after).undo
def enable_tracing(self):
""" enable tracing of hook calls and return an undo function. """
hooktrace = self.hook._trace
def before(hook_name, methods, kwargs):
hooktrace.root.indent += 1
hooktrace(hook_name, kwargs)
def after(outcome, hook_name, methods, kwargs):
if outcome.excinfo is None:
hooktrace("finish", hook_name, "-->", outcome.get_result())
hooktrace.root.indent -= 1
return self.add_hookcall_monitoring(before, after)
def subset_hook_caller(self, name, remove_plugins):
""" Return a new _HookCaller instance for the named method
which manages calls to all registered plugins except the
ones from remove_plugins. """
orig = getattr(self.hook, name)
plugins_to_remove = [plug for plug in remove_plugins if hasattr(plug, name)]
if plugins_to_remove:
hc = _HookCaller(
orig.name, orig._hookexec, orig.spec.namespace, orig.spec.opts
)
for hookimpl in orig.get_hookimpls():
plugin = hookimpl.plugin
if plugin not in plugins_to_remove:
hc._add_hookimpl(hookimpl)
# we also keep track of this hook caller so it
# gets properly removed on plugin unregistration
self._plugin2hookcallers.setdefault(plugin, []).append(hc)
return hc
return orig
|
pytest-dev/pluggy | pluggy/manager.py | PluginManager.check_pending | python | def check_pending(self):
for name in self.hook.__dict__:
if name[0] != "_":
hook = getattr(self.hook, name)
if not hook.has_spec():
for hookimpl in hook.get_hookimpls():
if not hookimpl.optionalhook:
raise PluginValidationError(
hookimpl.plugin,
"unknown hook %r in plugin %r"
% (name, hookimpl.plugin),
) | Verify that all hooks which have not been verified against
a hook specification are optional, otherwise raise PluginValidationError | train | https://github.com/pytest-dev/pluggy/blob/4de9e440eeadd9f0eb8c5232b349ef64e20e33fb/pluggy/manager.py#L239-L252 | null | class PluginManager(object):
""" Core Pluginmanager class which manages registration
of plugin objects and 1:N hook calling.
You can register new hooks by calling ``add_hookspecs(module_or_class)``.
You can register plugin objects (which contain hooks) by calling
``register(plugin)``. The Pluginmanager is initialized with a
prefix that is searched for in the names of the dict of registered
plugin objects.
For debugging purposes you can call ``enable_tracing()``
which will subsequently send debug information to the trace helper.
"""
def __init__(self, project_name, implprefix=None):
"""If ``implprefix`` is given implementation functions
will be recognized if their name matches the implprefix. """
self.project_name = project_name
self._name2plugin = {}
self._plugin2hookcallers = {}
self._plugin_distinfo = []
self.trace = _tracing.TagTracer().get("pluginmanage")
self.hook = _HookRelay(self.trace.root.get("hook"))
if implprefix is not None:
warnings.warn(
"Support for the `implprefix` arg is now deprecated and will "
"be removed in an upcoming release. Please use HookimplMarker.",
DeprecationWarning,
stacklevel=2,
)
self._implprefix = implprefix
self._inner_hookexec = lambda hook, methods, kwargs: hook.multicall(
methods,
kwargs,
firstresult=hook.spec.opts.get("firstresult") if hook.spec else False,
)
def _hookexec(self, hook, methods, kwargs):
# called from all hookcaller instances.
# enable_tracing will set its own wrapping function at self._inner_hookexec
return self._inner_hookexec(hook, methods, kwargs)
def register(self, plugin, name=None):
""" Register a plugin and return its canonical name or None if the name
is blocked from registering. Raise a ValueError if the plugin is already
registered. """
plugin_name = name or self.get_canonical_name(plugin)
if plugin_name in self._name2plugin or plugin in self._plugin2hookcallers:
if self._name2plugin.get(plugin_name, -1) is None:
return # blocked plugin, return None to indicate no registration
raise ValueError(
"Plugin already registered: %s=%s\n%s"
% (plugin_name, plugin, self._name2plugin)
)
# XXX if an error happens we should make sure no state has been
# changed at point of return
self._name2plugin[plugin_name] = plugin
# register matching hook implementations of the plugin
self._plugin2hookcallers[plugin] = hookcallers = []
for name in dir(plugin):
hookimpl_opts = self.parse_hookimpl_opts(plugin, name)
if hookimpl_opts is not None:
normalize_hookimpl_opts(hookimpl_opts)
method = getattr(plugin, name)
hookimpl = HookImpl(plugin, plugin_name, method, hookimpl_opts)
hook = getattr(self.hook, name, None)
if hook is None:
hook = _HookCaller(name, self._hookexec)
setattr(self.hook, name, hook)
elif hook.has_spec():
self._verify_hook(hook, hookimpl)
hook._maybe_apply_history(hookimpl)
hook._add_hookimpl(hookimpl)
hookcallers.append(hook)
return plugin_name
def parse_hookimpl_opts(self, plugin, name):
method = getattr(plugin, name)
if not inspect.isroutine(method):
return
try:
res = getattr(method, self.project_name + "_impl", None)
except Exception:
res = {}
if res is not None and not isinstance(res, dict):
# false positive
res = None
# TODO: remove when we drop implprefix in 1.0
elif res is None and self._implprefix and name.startswith(self._implprefix):
_warn_for_function(
DeprecationWarning(
"The `implprefix` system is deprecated please decorate "
"this function using an instance of HookimplMarker."
),
method,
)
res = {}
return res
def unregister(self, plugin=None, name=None):
""" unregister a plugin object and all its contained hook implementations
from internal data structures. """
if name is None:
assert plugin is not None, "one of name or plugin needs to be specified"
name = self.get_name(plugin)
if plugin is None:
plugin = self.get_plugin(name)
# if self._name2plugin[name] == None registration was blocked: ignore
if self._name2plugin.get(name):
del self._name2plugin[name]
for hookcaller in self._plugin2hookcallers.pop(plugin, []):
hookcaller._remove_plugin(plugin)
return plugin
def set_blocked(self, name):
""" block registrations of the given name, unregister if already registered. """
self.unregister(name=name)
self._name2plugin[name] = None
def is_blocked(self, name):
""" return True if the given plugin name is blocked. """
return name in self._name2plugin and self._name2plugin[name] is None
def add_hookspecs(self, module_or_class):
""" add new hook specifications defined in the given module_or_class.
Functions are recognized if they have been decorated accordingly. """
names = []
for name in dir(module_or_class):
spec_opts = self.parse_hookspec_opts(module_or_class, name)
if spec_opts is not None:
hc = getattr(self.hook, name, None)
if hc is None:
hc = _HookCaller(name, self._hookexec, module_or_class, spec_opts)
setattr(self.hook, name, hc)
else:
# plugins registered this hook without knowing the spec
hc.set_specification(module_or_class, spec_opts)
for hookfunction in hc.get_hookimpls():
self._verify_hook(hc, hookfunction)
names.append(name)
if not names:
raise ValueError(
"did not find any %r hooks in %r" % (self.project_name, module_or_class)
)
def parse_hookspec_opts(self, module_or_class, name):
method = getattr(module_or_class, name)
return getattr(method, self.project_name + "_spec", None)
def get_plugins(self):
""" return the set of registered plugins. """
return set(self._plugin2hookcallers)
def is_registered(self, plugin):
""" Return True if the plugin is already registered. """
return plugin in self._plugin2hookcallers
def get_canonical_name(self, plugin):
""" Return canonical name for a plugin object. Note that a plugin
may be registered under a different name which was specified
by the caller of register(plugin, name). To obtain the name
of an registered plugin use ``get_name(plugin)`` instead."""
return getattr(plugin, "__name__", None) or str(id(plugin))
def get_plugin(self, name):
""" Return a plugin or None for the given name. """
return self._name2plugin.get(name)
def has_plugin(self, name):
""" Return True if a plugin with the given name is registered. """
return self.get_plugin(name) is not None
def get_name(self, plugin):
""" Return name for registered plugin or None if not registered. """
for name, val in self._name2plugin.items():
if plugin == val:
return name
def _verify_hook(self, hook, hookimpl):
if hook.is_historic() and hookimpl.hookwrapper:
raise PluginValidationError(
hookimpl.plugin,
"Plugin %r\nhook %r\nhistoric incompatible to hookwrapper"
% (hookimpl.plugin_name, hook.name),
)
if hook.spec.warn_on_impl:
_warn_for_function(hook.spec.warn_on_impl, hookimpl.function)
# positional arg checking
notinspec = set(hookimpl.argnames) - set(hook.spec.argnames)
if notinspec:
raise PluginValidationError(
hookimpl.plugin,
"Plugin %r for hook %r\nhookimpl definition: %s\n"
"Argument(s) %s are declared in the hookimpl but "
"can not be found in the hookspec"
% (
hookimpl.plugin_name,
hook.name,
_formatdef(hookimpl.function),
notinspec,
),
)
def load_setuptools_entrypoints(self, group, name=None):
""" Load modules from querying the specified setuptools ``group``.
:param str group: entry point group to load plugins
:param str name: if given, loads only plugins with the given ``name``.
:rtype: int
:return: return the number of loaded plugins by this call.
"""
from pkg_resources import (
iter_entry_points,
DistributionNotFound,
VersionConflict,
)
count = 0
for ep in iter_entry_points(group, name=name):
# is the plugin registered or blocked?
if self.get_plugin(ep.name) or self.is_blocked(ep.name):
continue
try:
plugin = ep.load()
except DistributionNotFound:
continue
except VersionConflict as e:
raise PluginValidationError(
plugin=None,
message="Plugin %r could not be loaded: %s!" % (ep.name, e),
)
self.register(plugin, name=ep.name)
self._plugin_distinfo.append((plugin, ep.dist))
count += 1
return count
def list_plugin_distinfo(self):
""" return list of distinfo/plugin tuples for all setuptools registered
plugins. """
return list(self._plugin_distinfo)
def list_name_plugin(self):
""" return list of name/plugin pairs. """
return list(self._name2plugin.items())
def get_hookcallers(self, plugin):
""" get all hook callers for the specified plugin. """
return self._plugin2hookcallers.get(plugin)
def add_hookcall_monitoring(self, before, after):
""" add before/after tracing functions for all hooks
and return an undo function which, when called,
will remove the added tracers.
``before(hook_name, hook_impls, kwargs)`` will be called ahead
of all hook calls and receive a hookcaller instance, a list
of HookImpl instances and the keyword arguments for the hook call.
``after(outcome, hook_name, hook_impls, kwargs)`` receives the
same arguments as ``before`` but also a :py:class:`_Result`` object
which represents the result of the overall hook call.
"""
return _tracing._TracedHookExecution(self, before, after).undo
def enable_tracing(self):
""" enable tracing of hook calls and return an undo function. """
hooktrace = self.hook._trace
def before(hook_name, methods, kwargs):
hooktrace.root.indent += 1
hooktrace(hook_name, kwargs)
def after(outcome, hook_name, methods, kwargs):
if outcome.excinfo is None:
hooktrace("finish", hook_name, "-->", outcome.get_result())
hooktrace.root.indent -= 1
return self.add_hookcall_monitoring(before, after)
def subset_hook_caller(self, name, remove_plugins):
""" Return a new _HookCaller instance for the named method
which manages calls to all registered plugins except the
ones from remove_plugins. """
orig = getattr(self.hook, name)
plugins_to_remove = [plug for plug in remove_plugins if hasattr(plug, name)]
if plugins_to_remove:
hc = _HookCaller(
orig.name, orig._hookexec, orig.spec.namespace, orig.spec.opts
)
for hookimpl in orig.get_hookimpls():
plugin = hookimpl.plugin
if plugin not in plugins_to_remove:
hc._add_hookimpl(hookimpl)
# we also keep track of this hook caller so it
# gets properly removed on plugin unregistration
self._plugin2hookcallers.setdefault(plugin, []).append(hc)
return hc
return orig
|
pytest-dev/pluggy | pluggy/manager.py | PluginManager.load_setuptools_entrypoints | python | def load_setuptools_entrypoints(self, group, name=None):
from pkg_resources import (
iter_entry_points,
DistributionNotFound,
VersionConflict,
)
count = 0
for ep in iter_entry_points(group, name=name):
# is the plugin registered or blocked?
if self.get_plugin(ep.name) or self.is_blocked(ep.name):
continue
try:
plugin = ep.load()
except DistributionNotFound:
continue
except VersionConflict as e:
raise PluginValidationError(
plugin=None,
message="Plugin %r could not be loaded: %s!" % (ep.name, e),
)
self.register(plugin, name=ep.name)
self._plugin_distinfo.append((plugin, ep.dist))
count += 1
return count | Load modules from querying the specified setuptools ``group``.
:param str group: entry point group to load plugins
:param str name: if given, loads only plugins with the given ``name``.
:rtype: int
:return: return the number of loaded plugins by this call. | train | https://github.com/pytest-dev/pluggy/blob/4de9e440eeadd9f0eb8c5232b349ef64e20e33fb/pluggy/manager.py#L254-L285 | [
"def get_plugin(self, name):\n \"\"\" Return a plugin or None for the given name. \"\"\"\n return self._name2plugin.get(name)\n"
] | class PluginManager(object):
""" Core Pluginmanager class which manages registration
of plugin objects and 1:N hook calling.
You can register new hooks by calling ``add_hookspecs(module_or_class)``.
You can register plugin objects (which contain hooks) by calling
``register(plugin)``. The Pluginmanager is initialized with a
prefix that is searched for in the names of the dict of registered
plugin objects.
For debugging purposes you can call ``enable_tracing()``
which will subsequently send debug information to the trace helper.
"""
def __init__(self, project_name, implprefix=None):
"""If ``implprefix`` is given implementation functions
will be recognized if their name matches the implprefix. """
self.project_name = project_name
self._name2plugin = {}
self._plugin2hookcallers = {}
self._plugin_distinfo = []
self.trace = _tracing.TagTracer().get("pluginmanage")
self.hook = _HookRelay(self.trace.root.get("hook"))
if implprefix is not None:
warnings.warn(
"Support for the `implprefix` arg is now deprecated and will "
"be removed in an upcoming release. Please use HookimplMarker.",
DeprecationWarning,
stacklevel=2,
)
self._implprefix = implprefix
self._inner_hookexec = lambda hook, methods, kwargs: hook.multicall(
methods,
kwargs,
firstresult=hook.spec.opts.get("firstresult") if hook.spec else False,
)
def _hookexec(self, hook, methods, kwargs):
# called from all hookcaller instances.
# enable_tracing will set its own wrapping function at self._inner_hookexec
return self._inner_hookexec(hook, methods, kwargs)
def register(self, plugin, name=None):
""" Register a plugin and return its canonical name or None if the name
is blocked from registering. Raise a ValueError if the plugin is already
registered. """
plugin_name = name or self.get_canonical_name(plugin)
if plugin_name in self._name2plugin or plugin in self._plugin2hookcallers:
if self._name2plugin.get(plugin_name, -1) is None:
return # blocked plugin, return None to indicate no registration
raise ValueError(
"Plugin already registered: %s=%s\n%s"
% (plugin_name, plugin, self._name2plugin)
)
# XXX if an error happens we should make sure no state has been
# changed at point of return
self._name2plugin[plugin_name] = plugin
# register matching hook implementations of the plugin
self._plugin2hookcallers[plugin] = hookcallers = []
for name in dir(plugin):
hookimpl_opts = self.parse_hookimpl_opts(plugin, name)
if hookimpl_opts is not None:
normalize_hookimpl_opts(hookimpl_opts)
method = getattr(plugin, name)
hookimpl = HookImpl(plugin, plugin_name, method, hookimpl_opts)
hook = getattr(self.hook, name, None)
if hook is None:
hook = _HookCaller(name, self._hookexec)
setattr(self.hook, name, hook)
elif hook.has_spec():
self._verify_hook(hook, hookimpl)
hook._maybe_apply_history(hookimpl)
hook._add_hookimpl(hookimpl)
hookcallers.append(hook)
return plugin_name
def parse_hookimpl_opts(self, plugin, name):
method = getattr(plugin, name)
if not inspect.isroutine(method):
return
try:
res = getattr(method, self.project_name + "_impl", None)
except Exception:
res = {}
if res is not None and not isinstance(res, dict):
# false positive
res = None
# TODO: remove when we drop implprefix in 1.0
elif res is None and self._implprefix and name.startswith(self._implprefix):
_warn_for_function(
DeprecationWarning(
"The `implprefix` system is deprecated please decorate "
"this function using an instance of HookimplMarker."
),
method,
)
res = {}
return res
def unregister(self, plugin=None, name=None):
""" unregister a plugin object and all its contained hook implementations
from internal data structures. """
if name is None:
assert plugin is not None, "one of name or plugin needs to be specified"
name = self.get_name(plugin)
if plugin is None:
plugin = self.get_plugin(name)
# if self._name2plugin[name] == None registration was blocked: ignore
if self._name2plugin.get(name):
del self._name2plugin[name]
for hookcaller in self._plugin2hookcallers.pop(plugin, []):
hookcaller._remove_plugin(plugin)
return plugin
def set_blocked(self, name):
""" block registrations of the given name, unregister if already registered. """
self.unregister(name=name)
self._name2plugin[name] = None
def is_blocked(self, name):
""" return True if the given plugin name is blocked. """
return name in self._name2plugin and self._name2plugin[name] is None
def add_hookspecs(self, module_or_class):
""" add new hook specifications defined in the given module_or_class.
Functions are recognized if they have been decorated accordingly. """
names = []
for name in dir(module_or_class):
spec_opts = self.parse_hookspec_opts(module_or_class, name)
if spec_opts is not None:
hc = getattr(self.hook, name, None)
if hc is None:
hc = _HookCaller(name, self._hookexec, module_or_class, spec_opts)
setattr(self.hook, name, hc)
else:
# plugins registered this hook without knowing the spec
hc.set_specification(module_or_class, spec_opts)
for hookfunction in hc.get_hookimpls():
self._verify_hook(hc, hookfunction)
names.append(name)
if not names:
raise ValueError(
"did not find any %r hooks in %r" % (self.project_name, module_or_class)
)
def parse_hookspec_opts(self, module_or_class, name):
method = getattr(module_or_class, name)
return getattr(method, self.project_name + "_spec", None)
def get_plugins(self):
""" return the set of registered plugins. """
return set(self._plugin2hookcallers)
def is_registered(self, plugin):
""" Return True if the plugin is already registered. """
return plugin in self._plugin2hookcallers
def get_canonical_name(self, plugin):
""" Return canonical name for a plugin object. Note that a plugin
may be registered under a different name which was specified
by the caller of register(plugin, name). To obtain the name
of an registered plugin use ``get_name(plugin)`` instead."""
return getattr(plugin, "__name__", None) or str(id(plugin))
def get_plugin(self, name):
""" Return a plugin or None for the given name. """
return self._name2plugin.get(name)
def has_plugin(self, name):
""" Return True if a plugin with the given name is registered. """
return self.get_plugin(name) is not None
def get_name(self, plugin):
""" Return name for registered plugin or None if not registered. """
for name, val in self._name2plugin.items():
if plugin == val:
return name
def _verify_hook(self, hook, hookimpl):
if hook.is_historic() and hookimpl.hookwrapper:
raise PluginValidationError(
hookimpl.plugin,
"Plugin %r\nhook %r\nhistoric incompatible to hookwrapper"
% (hookimpl.plugin_name, hook.name),
)
if hook.spec.warn_on_impl:
_warn_for_function(hook.spec.warn_on_impl, hookimpl.function)
# positional arg checking
notinspec = set(hookimpl.argnames) - set(hook.spec.argnames)
if notinspec:
raise PluginValidationError(
hookimpl.plugin,
"Plugin %r for hook %r\nhookimpl definition: %s\n"
"Argument(s) %s are declared in the hookimpl but "
"can not be found in the hookspec"
% (
hookimpl.plugin_name,
hook.name,
_formatdef(hookimpl.function),
notinspec,
),
)
def check_pending(self):
""" Verify that all hooks which have not been verified against
a hook specification are optional, otherwise raise PluginValidationError"""
for name in self.hook.__dict__:
if name[0] != "_":
hook = getattr(self.hook, name)
if not hook.has_spec():
for hookimpl in hook.get_hookimpls():
if not hookimpl.optionalhook:
raise PluginValidationError(
hookimpl.plugin,
"unknown hook %r in plugin %r"
% (name, hookimpl.plugin),
)
def list_plugin_distinfo(self):
""" return list of distinfo/plugin tuples for all setuptools registered
plugins. """
return list(self._plugin_distinfo)
def list_name_plugin(self):
""" return list of name/plugin pairs. """
return list(self._name2plugin.items())
def get_hookcallers(self, plugin):
""" get all hook callers for the specified plugin. """
return self._plugin2hookcallers.get(plugin)
def add_hookcall_monitoring(self, before, after):
""" add before/after tracing functions for all hooks
and return an undo function which, when called,
will remove the added tracers.
``before(hook_name, hook_impls, kwargs)`` will be called ahead
of all hook calls and receive a hookcaller instance, a list
of HookImpl instances and the keyword arguments for the hook call.
``after(outcome, hook_name, hook_impls, kwargs)`` receives the
same arguments as ``before`` but also a :py:class:`_Result`` object
which represents the result of the overall hook call.
"""
return _tracing._TracedHookExecution(self, before, after).undo
def enable_tracing(self):
""" enable tracing of hook calls and return an undo function. """
hooktrace = self.hook._trace
def before(hook_name, methods, kwargs):
hooktrace.root.indent += 1
hooktrace(hook_name, kwargs)
def after(outcome, hook_name, methods, kwargs):
if outcome.excinfo is None:
hooktrace("finish", hook_name, "-->", outcome.get_result())
hooktrace.root.indent -= 1
return self.add_hookcall_monitoring(before, after)
def subset_hook_caller(self, name, remove_plugins):
""" Return a new _HookCaller instance for the named method
which manages calls to all registered plugins except the
ones from remove_plugins. """
orig = getattr(self.hook, name)
plugins_to_remove = [plug for plug in remove_plugins if hasattr(plug, name)]
if plugins_to_remove:
hc = _HookCaller(
orig.name, orig._hookexec, orig.spec.namespace, orig.spec.opts
)
for hookimpl in orig.get_hookimpls():
plugin = hookimpl.plugin
if plugin not in plugins_to_remove:
hc._add_hookimpl(hookimpl)
# we also keep track of this hook caller so it
# gets properly removed on plugin unregistration
self._plugin2hookcallers.setdefault(plugin, []).append(hc)
return hc
return orig
|
pytest-dev/pluggy | pluggy/manager.py | PluginManager.add_hookcall_monitoring | python | def add_hookcall_monitoring(self, before, after):
return _tracing._TracedHookExecution(self, before, after).undo | add before/after tracing functions for all hooks
and return an undo function which, when called,
will remove the added tracers.
``before(hook_name, hook_impls, kwargs)`` will be called ahead
of all hook calls and receive a hookcaller instance, a list
of HookImpl instances and the keyword arguments for the hook call.
``after(outcome, hook_name, hook_impls, kwargs)`` receives the
same arguments as ``before`` but also a :py:class:`_Result`` object
which represents the result of the overall hook call. | train | https://github.com/pytest-dev/pluggy/blob/4de9e440eeadd9f0eb8c5232b349ef64e20e33fb/pluggy/manager.py#L300-L313 | null | class PluginManager(object):
""" Core Pluginmanager class which manages registration
of plugin objects and 1:N hook calling.
You can register new hooks by calling ``add_hookspecs(module_or_class)``.
You can register plugin objects (which contain hooks) by calling
``register(plugin)``. The Pluginmanager is initialized with a
prefix that is searched for in the names of the dict of registered
plugin objects.
For debugging purposes you can call ``enable_tracing()``
which will subsequently send debug information to the trace helper.
"""
def __init__(self, project_name, implprefix=None):
"""If ``implprefix`` is given implementation functions
will be recognized if their name matches the implprefix. """
self.project_name = project_name
self._name2plugin = {}
self._plugin2hookcallers = {}
self._plugin_distinfo = []
self.trace = _tracing.TagTracer().get("pluginmanage")
self.hook = _HookRelay(self.trace.root.get("hook"))
if implprefix is not None:
warnings.warn(
"Support for the `implprefix` arg is now deprecated and will "
"be removed in an upcoming release. Please use HookimplMarker.",
DeprecationWarning,
stacklevel=2,
)
self._implprefix = implprefix
self._inner_hookexec = lambda hook, methods, kwargs: hook.multicall(
methods,
kwargs,
firstresult=hook.spec.opts.get("firstresult") if hook.spec else False,
)
def _hookexec(self, hook, methods, kwargs):
# called from all hookcaller instances.
# enable_tracing will set its own wrapping function at self._inner_hookexec
return self._inner_hookexec(hook, methods, kwargs)
def register(self, plugin, name=None):
""" Register a plugin and return its canonical name or None if the name
is blocked from registering. Raise a ValueError if the plugin is already
registered. """
plugin_name = name or self.get_canonical_name(plugin)
if plugin_name in self._name2plugin or plugin in self._plugin2hookcallers:
if self._name2plugin.get(plugin_name, -1) is None:
return # blocked plugin, return None to indicate no registration
raise ValueError(
"Plugin already registered: %s=%s\n%s"
% (plugin_name, plugin, self._name2plugin)
)
# XXX if an error happens we should make sure no state has been
# changed at point of return
self._name2plugin[plugin_name] = plugin
# register matching hook implementations of the plugin
self._plugin2hookcallers[plugin] = hookcallers = []
for name in dir(plugin):
hookimpl_opts = self.parse_hookimpl_opts(plugin, name)
if hookimpl_opts is not None:
normalize_hookimpl_opts(hookimpl_opts)
method = getattr(plugin, name)
hookimpl = HookImpl(plugin, plugin_name, method, hookimpl_opts)
hook = getattr(self.hook, name, None)
if hook is None:
hook = _HookCaller(name, self._hookexec)
setattr(self.hook, name, hook)
elif hook.has_spec():
self._verify_hook(hook, hookimpl)
hook._maybe_apply_history(hookimpl)
hook._add_hookimpl(hookimpl)
hookcallers.append(hook)
return plugin_name
def parse_hookimpl_opts(self, plugin, name):
method = getattr(plugin, name)
if not inspect.isroutine(method):
return
try:
res = getattr(method, self.project_name + "_impl", None)
except Exception:
res = {}
if res is not None and not isinstance(res, dict):
# false positive
res = None
# TODO: remove when we drop implprefix in 1.0
elif res is None and self._implprefix and name.startswith(self._implprefix):
_warn_for_function(
DeprecationWarning(
"The `implprefix` system is deprecated please decorate "
"this function using an instance of HookimplMarker."
),
method,
)
res = {}
return res
def unregister(self, plugin=None, name=None):
""" unregister a plugin object and all its contained hook implementations
from internal data structures. """
if name is None:
assert plugin is not None, "one of name or plugin needs to be specified"
name = self.get_name(plugin)
if plugin is None:
plugin = self.get_plugin(name)
# if self._name2plugin[name] == None registration was blocked: ignore
if self._name2plugin.get(name):
del self._name2plugin[name]
for hookcaller in self._plugin2hookcallers.pop(plugin, []):
hookcaller._remove_plugin(plugin)
return plugin
def set_blocked(self, name):
""" block registrations of the given name, unregister if already registered. """
self.unregister(name=name)
self._name2plugin[name] = None
def is_blocked(self, name):
""" return True if the given plugin name is blocked. """
return name in self._name2plugin and self._name2plugin[name] is None
def add_hookspecs(self, module_or_class):
""" add new hook specifications defined in the given module_or_class.
Functions are recognized if they have been decorated accordingly. """
names = []
for name in dir(module_or_class):
spec_opts = self.parse_hookspec_opts(module_or_class, name)
if spec_opts is not None:
hc = getattr(self.hook, name, None)
if hc is None:
hc = _HookCaller(name, self._hookexec, module_or_class, spec_opts)
setattr(self.hook, name, hc)
else:
# plugins registered this hook without knowing the spec
hc.set_specification(module_or_class, spec_opts)
for hookfunction in hc.get_hookimpls():
self._verify_hook(hc, hookfunction)
names.append(name)
if not names:
raise ValueError(
"did not find any %r hooks in %r" % (self.project_name, module_or_class)
)
def parse_hookspec_opts(self, module_or_class, name):
method = getattr(module_or_class, name)
return getattr(method, self.project_name + "_spec", None)
def get_plugins(self):
""" return the set of registered plugins. """
return set(self._plugin2hookcallers)
def is_registered(self, plugin):
""" Return True if the plugin is already registered. """
return plugin in self._plugin2hookcallers
def get_canonical_name(self, plugin):
""" Return canonical name for a plugin object. Note that a plugin
may be registered under a different name which was specified
by the caller of register(plugin, name). To obtain the name
of an registered plugin use ``get_name(plugin)`` instead."""
return getattr(plugin, "__name__", None) or str(id(plugin))
def get_plugin(self, name):
""" Return a plugin or None for the given name. """
return self._name2plugin.get(name)
def has_plugin(self, name):
""" Return True if a plugin with the given name is registered. """
return self.get_plugin(name) is not None
def get_name(self, plugin):
""" Return name for registered plugin or None if not registered. """
for name, val in self._name2plugin.items():
if plugin == val:
return name
def _verify_hook(self, hook, hookimpl):
if hook.is_historic() and hookimpl.hookwrapper:
raise PluginValidationError(
hookimpl.plugin,
"Plugin %r\nhook %r\nhistoric incompatible to hookwrapper"
% (hookimpl.plugin_name, hook.name),
)
if hook.spec.warn_on_impl:
_warn_for_function(hook.spec.warn_on_impl, hookimpl.function)
# positional arg checking
notinspec = set(hookimpl.argnames) - set(hook.spec.argnames)
if notinspec:
raise PluginValidationError(
hookimpl.plugin,
"Plugin %r for hook %r\nhookimpl definition: %s\n"
"Argument(s) %s are declared in the hookimpl but "
"can not be found in the hookspec"
% (
hookimpl.plugin_name,
hook.name,
_formatdef(hookimpl.function),
notinspec,
),
)
def check_pending(self):
""" Verify that all hooks which have not been verified against
a hook specification are optional, otherwise raise PluginValidationError"""
for name in self.hook.__dict__:
if name[0] != "_":
hook = getattr(self.hook, name)
if not hook.has_spec():
for hookimpl in hook.get_hookimpls():
if not hookimpl.optionalhook:
raise PluginValidationError(
hookimpl.plugin,
"unknown hook %r in plugin %r"
% (name, hookimpl.plugin),
)
def load_setuptools_entrypoints(self, group, name=None):
""" Load modules from querying the specified setuptools ``group``.
:param str group: entry point group to load plugins
:param str name: if given, loads only plugins with the given ``name``.
:rtype: int
:return: return the number of loaded plugins by this call.
"""
from pkg_resources import (
iter_entry_points,
DistributionNotFound,
VersionConflict,
)
count = 0
for ep in iter_entry_points(group, name=name):
# is the plugin registered or blocked?
if self.get_plugin(ep.name) or self.is_blocked(ep.name):
continue
try:
plugin = ep.load()
except DistributionNotFound:
continue
except VersionConflict as e:
raise PluginValidationError(
plugin=None,
message="Plugin %r could not be loaded: %s!" % (ep.name, e),
)
self.register(plugin, name=ep.name)
self._plugin_distinfo.append((plugin, ep.dist))
count += 1
return count
def list_plugin_distinfo(self):
""" return list of distinfo/plugin tuples for all setuptools registered
plugins. """
return list(self._plugin_distinfo)
def list_name_plugin(self):
""" return list of name/plugin pairs. """
return list(self._name2plugin.items())
def get_hookcallers(self, plugin):
""" get all hook callers for the specified plugin. """
return self._plugin2hookcallers.get(plugin)
def enable_tracing(self):
""" enable tracing of hook calls and return an undo function. """
hooktrace = self.hook._trace
def before(hook_name, methods, kwargs):
hooktrace.root.indent += 1
hooktrace(hook_name, kwargs)
def after(outcome, hook_name, methods, kwargs):
if outcome.excinfo is None:
hooktrace("finish", hook_name, "-->", outcome.get_result())
hooktrace.root.indent -= 1
return self.add_hookcall_monitoring(before, after)
def subset_hook_caller(self, name, remove_plugins):
""" Return a new _HookCaller instance for the named method
which manages calls to all registered plugins except the
ones from remove_plugins. """
orig = getattr(self.hook, name)
plugins_to_remove = [plug for plug in remove_plugins if hasattr(plug, name)]
if plugins_to_remove:
hc = _HookCaller(
orig.name, orig._hookexec, orig.spec.namespace, orig.spec.opts
)
for hookimpl in orig.get_hookimpls():
plugin = hookimpl.plugin
if plugin not in plugins_to_remove:
hc._add_hookimpl(hookimpl)
# we also keep track of this hook caller so it
# gets properly removed on plugin unregistration
self._plugin2hookcallers.setdefault(plugin, []).append(hc)
return hc
return orig
|
pytest-dev/pluggy | pluggy/manager.py | PluginManager.enable_tracing | python | def enable_tracing(self):
hooktrace = self.hook._trace
def before(hook_name, methods, kwargs):
hooktrace.root.indent += 1
hooktrace(hook_name, kwargs)
def after(outcome, hook_name, methods, kwargs):
if outcome.excinfo is None:
hooktrace("finish", hook_name, "-->", outcome.get_result())
hooktrace.root.indent -= 1
return self.add_hookcall_monitoring(before, after) | enable tracing of hook calls and return an undo function. | train | https://github.com/pytest-dev/pluggy/blob/4de9e440eeadd9f0eb8c5232b349ef64e20e33fb/pluggy/manager.py#L315-L328 | [
"def add_hookcall_monitoring(self, before, after):\n \"\"\" add before/after tracing functions for all hooks\n and return an undo function which, when called,\n will remove the added tracers.\n\n ``before(hook_name, hook_impls, kwargs)`` will be called ahead\n of all hook calls and receive a hookcall... | class PluginManager(object):
""" Core Pluginmanager class which manages registration
of plugin objects and 1:N hook calling.
You can register new hooks by calling ``add_hookspecs(module_or_class)``.
You can register plugin objects (which contain hooks) by calling
``register(plugin)``. The Pluginmanager is initialized with a
prefix that is searched for in the names of the dict of registered
plugin objects.
For debugging purposes you can call ``enable_tracing()``
which will subsequently send debug information to the trace helper.
"""
def __init__(self, project_name, implprefix=None):
"""If ``implprefix`` is given implementation functions
will be recognized if their name matches the implprefix. """
self.project_name = project_name
self._name2plugin = {}
self._plugin2hookcallers = {}
self._plugin_distinfo = []
self.trace = _tracing.TagTracer().get("pluginmanage")
self.hook = _HookRelay(self.trace.root.get("hook"))
if implprefix is not None:
warnings.warn(
"Support for the `implprefix` arg is now deprecated and will "
"be removed in an upcoming release. Please use HookimplMarker.",
DeprecationWarning,
stacklevel=2,
)
self._implprefix = implprefix
self._inner_hookexec = lambda hook, methods, kwargs: hook.multicall(
methods,
kwargs,
firstresult=hook.spec.opts.get("firstresult") if hook.spec else False,
)
def _hookexec(self, hook, methods, kwargs):
# called from all hookcaller instances.
# enable_tracing will set its own wrapping function at self._inner_hookexec
return self._inner_hookexec(hook, methods, kwargs)
def register(self, plugin, name=None):
""" Register a plugin and return its canonical name or None if the name
is blocked from registering. Raise a ValueError if the plugin is already
registered. """
plugin_name = name or self.get_canonical_name(plugin)
if plugin_name in self._name2plugin or plugin in self._plugin2hookcallers:
if self._name2plugin.get(plugin_name, -1) is None:
return # blocked plugin, return None to indicate no registration
raise ValueError(
"Plugin already registered: %s=%s\n%s"
% (plugin_name, plugin, self._name2plugin)
)
# XXX if an error happens we should make sure no state has been
# changed at point of return
self._name2plugin[plugin_name] = plugin
# register matching hook implementations of the plugin
self._plugin2hookcallers[plugin] = hookcallers = []
for name in dir(plugin):
hookimpl_opts = self.parse_hookimpl_opts(plugin, name)
if hookimpl_opts is not None:
normalize_hookimpl_opts(hookimpl_opts)
method = getattr(plugin, name)
hookimpl = HookImpl(plugin, plugin_name, method, hookimpl_opts)
hook = getattr(self.hook, name, None)
if hook is None:
hook = _HookCaller(name, self._hookexec)
setattr(self.hook, name, hook)
elif hook.has_spec():
self._verify_hook(hook, hookimpl)
hook._maybe_apply_history(hookimpl)
hook._add_hookimpl(hookimpl)
hookcallers.append(hook)
return plugin_name
def parse_hookimpl_opts(self, plugin, name):
method = getattr(plugin, name)
if not inspect.isroutine(method):
return
try:
res = getattr(method, self.project_name + "_impl", None)
except Exception:
res = {}
if res is not None and not isinstance(res, dict):
# false positive
res = None
# TODO: remove when we drop implprefix in 1.0
elif res is None and self._implprefix and name.startswith(self._implprefix):
_warn_for_function(
DeprecationWarning(
"The `implprefix` system is deprecated please decorate "
"this function using an instance of HookimplMarker."
),
method,
)
res = {}
return res
def unregister(self, plugin=None, name=None):
""" unregister a plugin object and all its contained hook implementations
from internal data structures. """
if name is None:
assert plugin is not None, "one of name or plugin needs to be specified"
name = self.get_name(plugin)
if plugin is None:
plugin = self.get_plugin(name)
# if self._name2plugin[name] == None registration was blocked: ignore
if self._name2plugin.get(name):
del self._name2plugin[name]
for hookcaller in self._plugin2hookcallers.pop(plugin, []):
hookcaller._remove_plugin(plugin)
return plugin
def set_blocked(self, name):
""" block registrations of the given name, unregister if already registered. """
self.unregister(name=name)
self._name2plugin[name] = None
def is_blocked(self, name):
""" return True if the given plugin name is blocked. """
return name in self._name2plugin and self._name2plugin[name] is None
def add_hookspecs(self, module_or_class):
""" add new hook specifications defined in the given module_or_class.
Functions are recognized if they have been decorated accordingly. """
names = []
for name in dir(module_or_class):
spec_opts = self.parse_hookspec_opts(module_or_class, name)
if spec_opts is not None:
hc = getattr(self.hook, name, None)
if hc is None:
hc = _HookCaller(name, self._hookexec, module_or_class, spec_opts)
setattr(self.hook, name, hc)
else:
# plugins registered this hook without knowing the spec
hc.set_specification(module_or_class, spec_opts)
for hookfunction in hc.get_hookimpls():
self._verify_hook(hc, hookfunction)
names.append(name)
if not names:
raise ValueError(
"did not find any %r hooks in %r" % (self.project_name, module_or_class)
)
def parse_hookspec_opts(self, module_or_class, name):
method = getattr(module_or_class, name)
return getattr(method, self.project_name + "_spec", None)
def get_plugins(self):
""" return the set of registered plugins. """
return set(self._plugin2hookcallers)
def is_registered(self, plugin):
""" Return True if the plugin is already registered. """
return plugin in self._plugin2hookcallers
def get_canonical_name(self, plugin):
""" Return canonical name for a plugin object. Note that a plugin
may be registered under a different name which was specified
by the caller of register(plugin, name). To obtain the name
of an registered plugin use ``get_name(plugin)`` instead."""
return getattr(plugin, "__name__", None) or str(id(plugin))
def get_plugin(self, name):
""" Return a plugin or None for the given name. """
return self._name2plugin.get(name)
def has_plugin(self, name):
""" Return True if a plugin with the given name is registered. """
return self.get_plugin(name) is not None
def get_name(self, plugin):
""" Return name for registered plugin or None if not registered. """
for name, val in self._name2plugin.items():
if plugin == val:
return name
def _verify_hook(self, hook, hookimpl):
if hook.is_historic() and hookimpl.hookwrapper:
raise PluginValidationError(
hookimpl.plugin,
"Plugin %r\nhook %r\nhistoric incompatible to hookwrapper"
% (hookimpl.plugin_name, hook.name),
)
if hook.spec.warn_on_impl:
_warn_for_function(hook.spec.warn_on_impl, hookimpl.function)
# positional arg checking
notinspec = set(hookimpl.argnames) - set(hook.spec.argnames)
if notinspec:
raise PluginValidationError(
hookimpl.plugin,
"Plugin %r for hook %r\nhookimpl definition: %s\n"
"Argument(s) %s are declared in the hookimpl but "
"can not be found in the hookspec"
% (
hookimpl.plugin_name,
hook.name,
_formatdef(hookimpl.function),
notinspec,
),
)
def check_pending(self):
""" Verify that all hooks which have not been verified against
a hook specification are optional, otherwise raise PluginValidationError"""
for name in self.hook.__dict__:
if name[0] != "_":
hook = getattr(self.hook, name)
if not hook.has_spec():
for hookimpl in hook.get_hookimpls():
if not hookimpl.optionalhook:
raise PluginValidationError(
hookimpl.plugin,
"unknown hook %r in plugin %r"
% (name, hookimpl.plugin),
)
def load_setuptools_entrypoints(self, group, name=None):
""" Load modules from querying the specified setuptools ``group``.
:param str group: entry point group to load plugins
:param str name: if given, loads only plugins with the given ``name``.
:rtype: int
:return: return the number of loaded plugins by this call.
"""
from pkg_resources import (
iter_entry_points,
DistributionNotFound,
VersionConflict,
)
count = 0
for ep in iter_entry_points(group, name=name):
# is the plugin registered or blocked?
if self.get_plugin(ep.name) or self.is_blocked(ep.name):
continue
try:
plugin = ep.load()
except DistributionNotFound:
continue
except VersionConflict as e:
raise PluginValidationError(
plugin=None,
message="Plugin %r could not be loaded: %s!" % (ep.name, e),
)
self.register(plugin, name=ep.name)
self._plugin_distinfo.append((plugin, ep.dist))
count += 1
return count
def list_plugin_distinfo(self):
""" return list of distinfo/plugin tuples for all setuptools registered
plugins. """
return list(self._plugin_distinfo)
def list_name_plugin(self):
""" return list of name/plugin pairs. """
return list(self._name2plugin.items())
def get_hookcallers(self, plugin):
""" get all hook callers for the specified plugin. """
return self._plugin2hookcallers.get(plugin)
def add_hookcall_monitoring(self, before, after):
""" add before/after tracing functions for all hooks
and return an undo function which, when called,
will remove the added tracers.
``before(hook_name, hook_impls, kwargs)`` will be called ahead
of all hook calls and receive a hookcaller instance, a list
of HookImpl instances and the keyword arguments for the hook call.
``after(outcome, hook_name, hook_impls, kwargs)`` receives the
same arguments as ``before`` but also a :py:class:`_Result`` object
which represents the result of the overall hook call.
"""
return _tracing._TracedHookExecution(self, before, after).undo
def subset_hook_caller(self, name, remove_plugins):
""" Return a new _HookCaller instance for the named method
which manages calls to all registered plugins except the
ones from remove_plugins. """
orig = getattr(self.hook, name)
plugins_to_remove = [plug for plug in remove_plugins if hasattr(plug, name)]
if plugins_to_remove:
hc = _HookCaller(
orig.name, orig._hookexec, orig.spec.namespace, orig.spec.opts
)
for hookimpl in orig.get_hookimpls():
plugin = hookimpl.plugin
if plugin not in plugins_to_remove:
hc._add_hookimpl(hookimpl)
# we also keep track of this hook caller so it
# gets properly removed on plugin unregistration
self._plugin2hookcallers.setdefault(plugin, []).append(hc)
return hc
return orig
|
pytest-dev/pluggy | pluggy/manager.py | PluginManager.subset_hook_caller | python | def subset_hook_caller(self, name, remove_plugins):
orig = getattr(self.hook, name)
plugins_to_remove = [plug for plug in remove_plugins if hasattr(plug, name)]
if plugins_to_remove:
hc = _HookCaller(
orig.name, orig._hookexec, orig.spec.namespace, orig.spec.opts
)
for hookimpl in orig.get_hookimpls():
plugin = hookimpl.plugin
if plugin not in plugins_to_remove:
hc._add_hookimpl(hookimpl)
# we also keep track of this hook caller so it
# gets properly removed on plugin unregistration
self._plugin2hookcallers.setdefault(plugin, []).append(hc)
return hc
return orig | Return a new _HookCaller instance for the named method
which manages calls to all registered plugins except the
ones from remove_plugins. | train | https://github.com/pytest-dev/pluggy/blob/4de9e440eeadd9f0eb8c5232b349ef64e20e33fb/pluggy/manager.py#L330-L348 | [
"def _add_hookimpl(self, hookimpl):\n \"\"\"Add an implementation to the callback chain.\n \"\"\"\n if hookimpl.hookwrapper:\n methods = self._wrappers\n else:\n methods = self._nonwrappers\n\n if hookimpl.trylast:\n methods.insert(0, hookimpl)\n elif hookimpl.tryfirst:\n ... | class PluginManager(object):
""" Core Pluginmanager class which manages registration
of plugin objects and 1:N hook calling.
You can register new hooks by calling ``add_hookspecs(module_or_class)``.
You can register plugin objects (which contain hooks) by calling
``register(plugin)``. The Pluginmanager is initialized with a
prefix that is searched for in the names of the dict of registered
plugin objects.
For debugging purposes you can call ``enable_tracing()``
which will subsequently send debug information to the trace helper.
"""
def __init__(self, project_name, implprefix=None):
"""If ``implprefix`` is given implementation functions
will be recognized if their name matches the implprefix. """
self.project_name = project_name
self._name2plugin = {}
self._plugin2hookcallers = {}
self._plugin_distinfo = []
self.trace = _tracing.TagTracer().get("pluginmanage")
self.hook = _HookRelay(self.trace.root.get("hook"))
if implprefix is not None:
warnings.warn(
"Support for the `implprefix` arg is now deprecated and will "
"be removed in an upcoming release. Please use HookimplMarker.",
DeprecationWarning,
stacklevel=2,
)
self._implprefix = implprefix
self._inner_hookexec = lambda hook, methods, kwargs: hook.multicall(
methods,
kwargs,
firstresult=hook.spec.opts.get("firstresult") if hook.spec else False,
)
def _hookexec(self, hook, methods, kwargs):
# called from all hookcaller instances.
# enable_tracing will set its own wrapping function at self._inner_hookexec
return self._inner_hookexec(hook, methods, kwargs)
def register(self, plugin, name=None):
""" Register a plugin and return its canonical name or None if the name
is blocked from registering. Raise a ValueError if the plugin is already
registered. """
plugin_name = name or self.get_canonical_name(plugin)
if plugin_name in self._name2plugin or plugin in self._plugin2hookcallers:
if self._name2plugin.get(plugin_name, -1) is None:
return # blocked plugin, return None to indicate no registration
raise ValueError(
"Plugin already registered: %s=%s\n%s"
% (plugin_name, plugin, self._name2plugin)
)
# XXX if an error happens we should make sure no state has been
# changed at point of return
self._name2plugin[plugin_name] = plugin
# register matching hook implementations of the plugin
self._plugin2hookcallers[plugin] = hookcallers = []
for name in dir(plugin):
hookimpl_opts = self.parse_hookimpl_opts(plugin, name)
if hookimpl_opts is not None:
normalize_hookimpl_opts(hookimpl_opts)
method = getattr(plugin, name)
hookimpl = HookImpl(plugin, plugin_name, method, hookimpl_opts)
hook = getattr(self.hook, name, None)
if hook is None:
hook = _HookCaller(name, self._hookexec)
setattr(self.hook, name, hook)
elif hook.has_spec():
self._verify_hook(hook, hookimpl)
hook._maybe_apply_history(hookimpl)
hook._add_hookimpl(hookimpl)
hookcallers.append(hook)
return plugin_name
def parse_hookimpl_opts(self, plugin, name):
method = getattr(plugin, name)
if not inspect.isroutine(method):
return
try:
res = getattr(method, self.project_name + "_impl", None)
except Exception:
res = {}
if res is not None and not isinstance(res, dict):
# false positive
res = None
# TODO: remove when we drop implprefix in 1.0
elif res is None and self._implprefix and name.startswith(self._implprefix):
_warn_for_function(
DeprecationWarning(
"The `implprefix` system is deprecated please decorate "
"this function using an instance of HookimplMarker."
),
method,
)
res = {}
return res
def unregister(self, plugin=None, name=None):
""" unregister a plugin object and all its contained hook implementations
from internal data structures. """
if name is None:
assert plugin is not None, "one of name or plugin needs to be specified"
name = self.get_name(plugin)
if plugin is None:
plugin = self.get_plugin(name)
# if self._name2plugin[name] == None registration was blocked: ignore
if self._name2plugin.get(name):
del self._name2plugin[name]
for hookcaller in self._plugin2hookcallers.pop(plugin, []):
hookcaller._remove_plugin(plugin)
return plugin
def set_blocked(self, name):
""" block registrations of the given name, unregister if already registered. """
self.unregister(name=name)
self._name2plugin[name] = None
def is_blocked(self, name):
""" return True if the given plugin name is blocked. """
return name in self._name2plugin and self._name2plugin[name] is None
def add_hookspecs(self, module_or_class):
""" add new hook specifications defined in the given module_or_class.
Functions are recognized if they have been decorated accordingly. """
names = []
for name in dir(module_or_class):
spec_opts = self.parse_hookspec_opts(module_or_class, name)
if spec_opts is not None:
hc = getattr(self.hook, name, None)
if hc is None:
hc = _HookCaller(name, self._hookexec, module_or_class, spec_opts)
setattr(self.hook, name, hc)
else:
# plugins registered this hook without knowing the spec
hc.set_specification(module_or_class, spec_opts)
for hookfunction in hc.get_hookimpls():
self._verify_hook(hc, hookfunction)
names.append(name)
if not names:
raise ValueError(
"did not find any %r hooks in %r" % (self.project_name, module_or_class)
)
def parse_hookspec_opts(self, module_or_class, name):
method = getattr(module_or_class, name)
return getattr(method, self.project_name + "_spec", None)
def get_plugins(self):
""" return the set of registered plugins. """
return set(self._plugin2hookcallers)
def is_registered(self, plugin):
""" Return True if the plugin is already registered. """
return plugin in self._plugin2hookcallers
def get_canonical_name(self, plugin):
""" Return canonical name for a plugin object. Note that a plugin
may be registered under a different name which was specified
by the caller of register(plugin, name). To obtain the name
of an registered plugin use ``get_name(plugin)`` instead."""
return getattr(plugin, "__name__", None) or str(id(plugin))
def get_plugin(self, name):
""" Return a plugin or None for the given name. """
return self._name2plugin.get(name)
def has_plugin(self, name):
""" Return True if a plugin with the given name is registered. """
return self.get_plugin(name) is not None
def get_name(self, plugin):
""" Return name for registered plugin or None if not registered. """
for name, val in self._name2plugin.items():
if plugin == val:
return name
def _verify_hook(self, hook, hookimpl):
if hook.is_historic() and hookimpl.hookwrapper:
raise PluginValidationError(
hookimpl.plugin,
"Plugin %r\nhook %r\nhistoric incompatible to hookwrapper"
% (hookimpl.plugin_name, hook.name),
)
if hook.spec.warn_on_impl:
_warn_for_function(hook.spec.warn_on_impl, hookimpl.function)
# positional arg checking
notinspec = set(hookimpl.argnames) - set(hook.spec.argnames)
if notinspec:
raise PluginValidationError(
hookimpl.plugin,
"Plugin %r for hook %r\nhookimpl definition: %s\n"
"Argument(s) %s are declared in the hookimpl but "
"can not be found in the hookspec"
% (
hookimpl.plugin_name,
hook.name,
_formatdef(hookimpl.function),
notinspec,
),
)
def check_pending(self):
""" Verify that all hooks which have not been verified against
a hook specification are optional, otherwise raise PluginValidationError"""
for name in self.hook.__dict__:
if name[0] != "_":
hook = getattr(self.hook, name)
if not hook.has_spec():
for hookimpl in hook.get_hookimpls():
if not hookimpl.optionalhook:
raise PluginValidationError(
hookimpl.plugin,
"unknown hook %r in plugin %r"
% (name, hookimpl.plugin),
)
def load_setuptools_entrypoints(self, group, name=None):
""" Load modules from querying the specified setuptools ``group``.
:param str group: entry point group to load plugins
:param str name: if given, loads only plugins with the given ``name``.
:rtype: int
:return: return the number of loaded plugins by this call.
"""
from pkg_resources import (
iter_entry_points,
DistributionNotFound,
VersionConflict,
)
count = 0
for ep in iter_entry_points(group, name=name):
# is the plugin registered or blocked?
if self.get_plugin(ep.name) or self.is_blocked(ep.name):
continue
try:
plugin = ep.load()
except DistributionNotFound:
continue
except VersionConflict as e:
raise PluginValidationError(
plugin=None,
message="Plugin %r could not be loaded: %s!" % (ep.name, e),
)
self.register(plugin, name=ep.name)
self._plugin_distinfo.append((plugin, ep.dist))
count += 1
return count
def list_plugin_distinfo(self):
""" return list of distinfo/plugin tuples for all setuptools registered
plugins. """
return list(self._plugin_distinfo)
def list_name_plugin(self):
""" return list of name/plugin pairs. """
return list(self._name2plugin.items())
def get_hookcallers(self, plugin):
""" get all hook callers for the specified plugin. """
return self._plugin2hookcallers.get(plugin)
def add_hookcall_monitoring(self, before, after):
""" add before/after tracing functions for all hooks
and return an undo function which, when called,
will remove the added tracers.
``before(hook_name, hook_impls, kwargs)`` will be called ahead
of all hook calls and receive a hookcaller instance, a list
of HookImpl instances and the keyword arguments for the hook call.
``after(outcome, hook_name, hook_impls, kwargs)`` receives the
same arguments as ``before`` but also a :py:class:`_Result`` object
which represents the result of the overall hook call.
"""
return _tracing._TracedHookExecution(self, before, after).undo
def enable_tracing(self):
""" enable tracing of hook calls and return an undo function. """
hooktrace = self.hook._trace
def before(hook_name, methods, kwargs):
hooktrace.root.indent += 1
hooktrace(hook_name, kwargs)
def after(outcome, hook_name, methods, kwargs):
if outcome.excinfo is None:
hooktrace("finish", hook_name, "-->", outcome.get_result())
hooktrace.root.indent -= 1
return self.add_hookcall_monitoring(before, after)
|
pytest-dev/pluggy | pluggy/hooks.py | varnames | python | def varnames(func):
cache = getattr(func, "__dict__", {})
try:
return cache["_varnames"]
except KeyError:
pass
if inspect.isclass(func):
try:
func = func.__init__
except AttributeError:
return (), ()
elif not inspect.isroutine(func): # callable object?
try:
func = getattr(func, "__call__", func)
except Exception:
return ()
try: # func MUST be a function or method here or we won't parse any args
spec = _getargspec(func)
except TypeError:
return (), ()
args, defaults = tuple(spec.args), spec.defaults
if defaults:
index = -len(defaults)
args, defaults = args[:index], tuple(args[index:])
else:
defaults = ()
# strip any implicit instance arg
# pypy3 uses "obj" instead of "self" for default dunder methods
implicit_names = ("self",) if not _PYPY3 else ("self", "obj")
if args:
if inspect.ismethod(func) or (
"." in getattr(func, "__qualname__", ()) and args[0] in implicit_names
):
args = args[1:]
try:
cache["_varnames"] = args, defaults
except TypeError:
pass
return args, defaults | Return tuple of positional and keywrord argument names for a function,
method, class or callable.
In case of a class, its ``__init__`` method is considered.
For methods the ``self`` parameter is not included. | train | https://github.com/pytest-dev/pluggy/blob/4de9e440eeadd9f0eb8c5232b349ef64e20e33fb/pluggy/hooks.py#L142-L191 | [
"def _getargspec(func):\n return inspect.getfullargspec(func)\n",
"def _getargspec(func):\n return inspect.getargspec(func)\n"
] | """
Internal hook annotation, representation and calling machinery.
"""
import inspect
import sys
import warnings
from .callers import _legacymulticall, _multicall
class HookspecMarker(object):
""" Decorator helper class for marking functions as hook specifications.
You can instantiate it with a project_name to get a decorator.
Calling PluginManager.add_hookspecs later will discover all marked functions
if the PluginManager uses the same project_name.
"""
def __init__(self, project_name):
self.project_name = project_name
def __call__(
self, function=None, firstresult=False, historic=False, warn_on_impl=None
):
""" if passed a function, directly sets attributes on the function
which will make it discoverable to add_hookspecs(). If passed no
function, returns a decorator which can be applied to a function
later using the attributes supplied.
If firstresult is True the 1:N hook call (N being the number of registered
hook implementation functions) will stop at I<=N when the I'th function
returns a non-None result.
If historic is True calls to a hook will be memorized and replayed
on later registered plugins.
"""
def setattr_hookspec_opts(func):
if historic and firstresult:
raise ValueError("cannot have a historic firstresult hook")
setattr(
func,
self.project_name + "_spec",
dict(
firstresult=firstresult,
historic=historic,
warn_on_impl=warn_on_impl,
),
)
return func
if function is not None:
return setattr_hookspec_opts(function)
else:
return setattr_hookspec_opts
class HookimplMarker(object):
""" Decorator helper class for marking functions as hook implementations.
You can instantiate with a project_name to get a decorator.
Calling PluginManager.register later will discover all marked functions
if the PluginManager uses the same project_name.
"""
def __init__(self, project_name):
self.project_name = project_name
def __call__(
self,
function=None,
hookwrapper=False,
optionalhook=False,
tryfirst=False,
trylast=False,
):
""" if passed a function, directly sets attributes on the function
which will make it discoverable to register(). If passed no function,
returns a decorator which can be applied to a function later using
the attributes supplied.
If optionalhook is True a missing matching hook specification will not result
in an error (by default it is an error if no matching spec is found).
If tryfirst is True this hook implementation will run as early as possible
in the chain of N hook implementations for a specfication.
If trylast is True this hook implementation will run as late as possible
in the chain of N hook implementations.
If hookwrapper is True the hook implementations needs to execute exactly
one "yield". The code before the yield is run early before any non-hookwrapper
function is run. The code after the yield is run after all non-hookwrapper
function have run. The yield receives a ``_Result`` object representing
the exception or result outcome of the inner calls (including other hookwrapper
calls).
"""
def setattr_hookimpl_opts(func):
setattr(
func,
self.project_name + "_impl",
dict(
hookwrapper=hookwrapper,
optionalhook=optionalhook,
tryfirst=tryfirst,
trylast=trylast,
),
)
return func
if function is None:
return setattr_hookimpl_opts
else:
return setattr_hookimpl_opts(function)
def normalize_hookimpl_opts(opts):
opts.setdefault("tryfirst", False)
opts.setdefault("trylast", False)
opts.setdefault("hookwrapper", False)
opts.setdefault("optionalhook", False)
if hasattr(inspect, "getfullargspec"):
def _getargspec(func):
return inspect.getfullargspec(func)
else:
def _getargspec(func):
return inspect.getargspec(func)
_PYPY3 = hasattr(sys, "pypy_version_info") and sys.version_info.major == 3
class _HookRelay(object):
""" hook holder object for performing 1:N hook calls where N is the number
of registered plugins.
"""
def __init__(self, trace):
self._trace = trace
class _HookCaller(object):
def __init__(self, name, hook_execute, specmodule_or_class=None, spec_opts=None):
self.name = name
self._wrappers = []
self._nonwrappers = []
self._hookexec = hook_execute
self.argnames = None
self.kwargnames = None
self.multicall = _multicall
self.spec = None
if specmodule_or_class is not None:
assert spec_opts is not None
self.set_specification(specmodule_or_class, spec_opts)
def has_spec(self):
return self.spec is not None
def set_specification(self, specmodule_or_class, spec_opts):
assert not self.has_spec()
self.spec = HookSpec(specmodule_or_class, self.name, spec_opts)
if spec_opts.get("historic"):
self._call_history = []
def is_historic(self):
return hasattr(self, "_call_history")
def _remove_plugin(self, plugin):
def remove(wrappers):
for i, method in enumerate(wrappers):
if method.plugin == plugin:
del wrappers[i]
return True
if remove(self._wrappers) is None:
if remove(self._nonwrappers) is None:
raise ValueError("plugin %r not found" % (plugin,))
def get_hookimpls(self):
# Order is important for _hookexec
return self._nonwrappers + self._wrappers
def _add_hookimpl(self, hookimpl):
"""Add an implementation to the callback chain.
"""
if hookimpl.hookwrapper:
methods = self._wrappers
else:
methods = self._nonwrappers
if hookimpl.trylast:
methods.insert(0, hookimpl)
elif hookimpl.tryfirst:
methods.append(hookimpl)
else:
# find last non-tryfirst method
i = len(methods) - 1
while i >= 0 and methods[i].tryfirst:
i -= 1
methods.insert(i + 1, hookimpl)
if "__multicall__" in hookimpl.argnames:
warnings.warn(
"Support for __multicall__ is now deprecated and will be"
"removed in an upcoming release.",
DeprecationWarning,
)
self.multicall = _legacymulticall
def __repr__(self):
return "<_HookCaller %r>" % (self.name,)
def __call__(self, *args, **kwargs):
if args:
raise TypeError("hook calling supports only keyword arguments")
assert not self.is_historic()
if self.spec and self.spec.argnames:
notincall = (
set(self.spec.argnames) - set(["__multicall__"]) - set(kwargs.keys())
)
if notincall:
warnings.warn(
"Argument(s) {} which are declared in the hookspec "
"can not be found in this hook call".format(tuple(notincall)),
stacklevel=2,
)
return self._hookexec(self, self.get_hookimpls(), kwargs)
def call_historic(self, result_callback=None, kwargs=None, proc=None):
"""Call the hook with given ``kwargs`` for all registered plugins and
for all plugins which will be registered afterwards.
If ``result_callback`` is not ``None`` it will be called for for each
non-None result obtained from a hook implementation.
.. note::
The ``proc`` argument is now deprecated.
"""
if proc is not None:
warnings.warn(
"Support for `proc` argument is now deprecated and will be"
"removed in an upcoming release.",
DeprecationWarning,
)
result_callback = proc
self._call_history.append((kwargs or {}, result_callback))
# historizing hooks don't return results
res = self._hookexec(self, self.get_hookimpls(), kwargs)
if result_callback is None:
return
# XXX: remember firstresult isn't compat with historic
for x in res or []:
result_callback(x)
def call_extra(self, methods, kwargs):
""" Call the hook with some additional temporarily participating
methods using the specified kwargs as call parameters. """
old = list(self._nonwrappers), list(self._wrappers)
for method in methods:
opts = dict(hookwrapper=False, trylast=False, tryfirst=False)
hookimpl = HookImpl(None, "<temp>", method, opts)
self._add_hookimpl(hookimpl)
try:
return self(**kwargs)
finally:
self._nonwrappers, self._wrappers = old
def _maybe_apply_history(self, method):
"""Apply call history to a new hookimpl if it is marked as historic.
"""
if self.is_historic():
for kwargs, result_callback in self._call_history:
res = self._hookexec(self, [method], kwargs)
if res and result_callback is not None:
result_callback(res[0])
class HookImpl(object):
def __init__(self, plugin, plugin_name, function, hook_impl_opts):
self.function = function
self.argnames, self.kwargnames = varnames(self.function)
self.plugin = plugin
self.opts = hook_impl_opts
self.plugin_name = plugin_name
self.__dict__.update(hook_impl_opts)
def __repr__(self):
return "<HookImpl plugin_name=%r, plugin=%r>" % (self.plugin_name, self.plugin)
class HookSpec(object):
def __init__(self, namespace, name, opts):
self.namespace = namespace
self.function = function = getattr(namespace, name)
self.name = name
self.argnames, self.kwargnames = varnames(function)
self.opts = opts
self.argnames = ["__multicall__"] + list(self.argnames)
self.warn_on_impl = opts.get("warn_on_impl")
|
pytest-dev/pluggy | pluggy/hooks.py | _HookCaller._add_hookimpl | python | def _add_hookimpl(self, hookimpl):
if hookimpl.hookwrapper:
methods = self._wrappers
else:
methods = self._nonwrappers
if hookimpl.trylast:
methods.insert(0, hookimpl)
elif hookimpl.tryfirst:
methods.append(hookimpl)
else:
# find last non-tryfirst method
i = len(methods) - 1
while i >= 0 and methods[i].tryfirst:
i -= 1
methods.insert(i + 1, hookimpl)
if "__multicall__" in hookimpl.argnames:
warnings.warn(
"Support for __multicall__ is now deprecated and will be"
"removed in an upcoming release.",
DeprecationWarning,
)
self.multicall = _legacymulticall | Add an implementation to the callback chain. | train | https://github.com/pytest-dev/pluggy/blob/4de9e440eeadd9f0eb8c5232b349ef64e20e33fb/pluggy/hooks.py#L245-L270 | null | class _HookCaller(object):
def __init__(self, name, hook_execute, specmodule_or_class=None, spec_opts=None):
self.name = name
self._wrappers = []
self._nonwrappers = []
self._hookexec = hook_execute
self.argnames = None
self.kwargnames = None
self.multicall = _multicall
self.spec = None
if specmodule_or_class is not None:
assert spec_opts is not None
self.set_specification(specmodule_or_class, spec_opts)
def has_spec(self):
return self.spec is not None
def set_specification(self, specmodule_or_class, spec_opts):
assert not self.has_spec()
self.spec = HookSpec(specmodule_or_class, self.name, spec_opts)
if spec_opts.get("historic"):
self._call_history = []
def is_historic(self):
return hasattr(self, "_call_history")
def _remove_plugin(self, plugin):
def remove(wrappers):
for i, method in enumerate(wrappers):
if method.plugin == plugin:
del wrappers[i]
return True
if remove(self._wrappers) is None:
if remove(self._nonwrappers) is None:
raise ValueError("plugin %r not found" % (plugin,))
def get_hookimpls(self):
# Order is important for _hookexec
return self._nonwrappers + self._wrappers
def __repr__(self):
return "<_HookCaller %r>" % (self.name,)
def __call__(self, *args, **kwargs):
if args:
raise TypeError("hook calling supports only keyword arguments")
assert not self.is_historic()
if self.spec and self.spec.argnames:
notincall = (
set(self.spec.argnames) - set(["__multicall__"]) - set(kwargs.keys())
)
if notincall:
warnings.warn(
"Argument(s) {} which are declared in the hookspec "
"can not be found in this hook call".format(tuple(notincall)),
stacklevel=2,
)
return self._hookexec(self, self.get_hookimpls(), kwargs)
def call_historic(self, result_callback=None, kwargs=None, proc=None):
"""Call the hook with given ``kwargs`` for all registered plugins and
for all plugins which will be registered afterwards.
If ``result_callback`` is not ``None`` it will be called for for each
non-None result obtained from a hook implementation.
.. note::
The ``proc`` argument is now deprecated.
"""
if proc is not None:
warnings.warn(
"Support for `proc` argument is now deprecated and will be"
"removed in an upcoming release.",
DeprecationWarning,
)
result_callback = proc
self._call_history.append((kwargs or {}, result_callback))
# historizing hooks don't return results
res = self._hookexec(self, self.get_hookimpls(), kwargs)
if result_callback is None:
return
# XXX: remember firstresult isn't compat with historic
for x in res or []:
result_callback(x)
def call_extra(self, methods, kwargs):
""" Call the hook with some additional temporarily participating
methods using the specified kwargs as call parameters. """
old = list(self._nonwrappers), list(self._wrappers)
for method in methods:
opts = dict(hookwrapper=False, trylast=False, tryfirst=False)
hookimpl = HookImpl(None, "<temp>", method, opts)
self._add_hookimpl(hookimpl)
try:
return self(**kwargs)
finally:
self._nonwrappers, self._wrappers = old
def _maybe_apply_history(self, method):
"""Apply call history to a new hookimpl if it is marked as historic.
"""
if self.is_historic():
for kwargs, result_callback in self._call_history:
res = self._hookexec(self, [method], kwargs)
if res and result_callback is not None:
result_callback(res[0])
|
pytest-dev/pluggy | pluggy/hooks.py | _HookCaller.call_historic | python | def call_historic(self, result_callback=None, kwargs=None, proc=None):
if proc is not None:
warnings.warn(
"Support for `proc` argument is now deprecated and will be"
"removed in an upcoming release.",
DeprecationWarning,
)
result_callback = proc
self._call_history.append((kwargs or {}, result_callback))
# historizing hooks don't return results
res = self._hookexec(self, self.get_hookimpls(), kwargs)
if result_callback is None:
return
# XXX: remember firstresult isn't compat with historic
for x in res or []:
result_callback(x) | Call the hook with given ``kwargs`` for all registered plugins and
for all plugins which will be registered afterwards.
If ``result_callback`` is not ``None`` it will be called for for each
non-None result obtained from a hook implementation.
.. note::
The ``proc`` argument is now deprecated. | train | https://github.com/pytest-dev/pluggy/blob/4de9e440eeadd9f0eb8c5232b349ef64e20e33fb/pluggy/hooks.py#L291-L316 | [
"def get_hookimpls(self):\n # Order is important for _hookexec\n return self._nonwrappers + self._wrappers\n"
] | class _HookCaller(object):
def __init__(self, name, hook_execute, specmodule_or_class=None, spec_opts=None):
self.name = name
self._wrappers = []
self._nonwrappers = []
self._hookexec = hook_execute
self.argnames = None
self.kwargnames = None
self.multicall = _multicall
self.spec = None
if specmodule_or_class is not None:
assert spec_opts is not None
self.set_specification(specmodule_or_class, spec_opts)
def has_spec(self):
return self.spec is not None
def set_specification(self, specmodule_or_class, spec_opts):
assert not self.has_spec()
self.spec = HookSpec(specmodule_or_class, self.name, spec_opts)
if spec_opts.get("historic"):
self._call_history = []
def is_historic(self):
return hasattr(self, "_call_history")
def _remove_plugin(self, plugin):
def remove(wrappers):
for i, method in enumerate(wrappers):
if method.plugin == plugin:
del wrappers[i]
return True
if remove(self._wrappers) is None:
if remove(self._nonwrappers) is None:
raise ValueError("plugin %r not found" % (plugin,))
def get_hookimpls(self):
# Order is important for _hookexec
return self._nonwrappers + self._wrappers
def _add_hookimpl(self, hookimpl):
"""Add an implementation to the callback chain.
"""
if hookimpl.hookwrapper:
methods = self._wrappers
else:
methods = self._nonwrappers
if hookimpl.trylast:
methods.insert(0, hookimpl)
elif hookimpl.tryfirst:
methods.append(hookimpl)
else:
# find last non-tryfirst method
i = len(methods) - 1
while i >= 0 and methods[i].tryfirst:
i -= 1
methods.insert(i + 1, hookimpl)
if "__multicall__" in hookimpl.argnames:
warnings.warn(
"Support for __multicall__ is now deprecated and will be"
"removed in an upcoming release.",
DeprecationWarning,
)
self.multicall = _legacymulticall
def __repr__(self):
return "<_HookCaller %r>" % (self.name,)
def __call__(self, *args, **kwargs):
if args:
raise TypeError("hook calling supports only keyword arguments")
assert not self.is_historic()
if self.spec and self.spec.argnames:
notincall = (
set(self.spec.argnames) - set(["__multicall__"]) - set(kwargs.keys())
)
if notincall:
warnings.warn(
"Argument(s) {} which are declared in the hookspec "
"can not be found in this hook call".format(tuple(notincall)),
stacklevel=2,
)
return self._hookexec(self, self.get_hookimpls(), kwargs)
def call_extra(self, methods, kwargs):
""" Call the hook with some additional temporarily participating
methods using the specified kwargs as call parameters. """
old = list(self._nonwrappers), list(self._wrappers)
for method in methods:
opts = dict(hookwrapper=False, trylast=False, tryfirst=False)
hookimpl = HookImpl(None, "<temp>", method, opts)
self._add_hookimpl(hookimpl)
try:
return self(**kwargs)
finally:
self._nonwrappers, self._wrappers = old
def _maybe_apply_history(self, method):
"""Apply call history to a new hookimpl if it is marked as historic.
"""
if self.is_historic():
for kwargs, result_callback in self._call_history:
res = self._hookexec(self, [method], kwargs)
if res and result_callback is not None:
result_callback(res[0])
|
pytest-dev/pluggy | pluggy/hooks.py | _HookCaller.call_extra | python | def call_extra(self, methods, kwargs):
old = list(self._nonwrappers), list(self._wrappers)
for method in methods:
opts = dict(hookwrapper=False, trylast=False, tryfirst=False)
hookimpl = HookImpl(None, "<temp>", method, opts)
self._add_hookimpl(hookimpl)
try:
return self(**kwargs)
finally:
self._nonwrappers, self._wrappers = old | Call the hook with some additional temporarily participating
methods using the specified kwargs as call parameters. | train | https://github.com/pytest-dev/pluggy/blob/4de9e440eeadd9f0eb8c5232b349ef64e20e33fb/pluggy/hooks.py#L318-L329 | [
"def _add_hookimpl(self, hookimpl):\n \"\"\"Add an implementation to the callback chain.\n \"\"\"\n if hookimpl.hookwrapper:\n methods = self._wrappers\n else:\n methods = self._nonwrappers\n\n if hookimpl.trylast:\n methods.insert(0, hookimpl)\n elif hookimpl.tryfirst:\n ... | class _HookCaller(object):
def __init__(self, name, hook_execute, specmodule_or_class=None, spec_opts=None):
self.name = name
self._wrappers = []
self._nonwrappers = []
self._hookexec = hook_execute
self.argnames = None
self.kwargnames = None
self.multicall = _multicall
self.spec = None
if specmodule_or_class is not None:
assert spec_opts is not None
self.set_specification(specmodule_or_class, spec_opts)
def has_spec(self):
return self.spec is not None
def set_specification(self, specmodule_or_class, spec_opts):
assert not self.has_spec()
self.spec = HookSpec(specmodule_or_class, self.name, spec_opts)
if spec_opts.get("historic"):
self._call_history = []
def is_historic(self):
return hasattr(self, "_call_history")
def _remove_plugin(self, plugin):
def remove(wrappers):
for i, method in enumerate(wrappers):
if method.plugin == plugin:
del wrappers[i]
return True
if remove(self._wrappers) is None:
if remove(self._nonwrappers) is None:
raise ValueError("plugin %r not found" % (plugin,))
def get_hookimpls(self):
# Order is important for _hookexec
return self._nonwrappers + self._wrappers
def _add_hookimpl(self, hookimpl):
"""Add an implementation to the callback chain.
"""
if hookimpl.hookwrapper:
methods = self._wrappers
else:
methods = self._nonwrappers
if hookimpl.trylast:
methods.insert(0, hookimpl)
elif hookimpl.tryfirst:
methods.append(hookimpl)
else:
# find last non-tryfirst method
i = len(methods) - 1
while i >= 0 and methods[i].tryfirst:
i -= 1
methods.insert(i + 1, hookimpl)
if "__multicall__" in hookimpl.argnames:
warnings.warn(
"Support for __multicall__ is now deprecated and will be"
"removed in an upcoming release.",
DeprecationWarning,
)
self.multicall = _legacymulticall
def __repr__(self):
return "<_HookCaller %r>" % (self.name,)
def __call__(self, *args, **kwargs):
if args:
raise TypeError("hook calling supports only keyword arguments")
assert not self.is_historic()
if self.spec and self.spec.argnames:
notincall = (
set(self.spec.argnames) - set(["__multicall__"]) - set(kwargs.keys())
)
if notincall:
warnings.warn(
"Argument(s) {} which are declared in the hookspec "
"can not be found in this hook call".format(tuple(notincall)),
stacklevel=2,
)
return self._hookexec(self, self.get_hookimpls(), kwargs)
def call_historic(self, result_callback=None, kwargs=None, proc=None):
"""Call the hook with given ``kwargs`` for all registered plugins and
for all plugins which will be registered afterwards.
If ``result_callback`` is not ``None`` it will be called for for each
non-None result obtained from a hook implementation.
.. note::
The ``proc`` argument is now deprecated.
"""
if proc is not None:
warnings.warn(
"Support for `proc` argument is now deprecated and will be"
"removed in an upcoming release.",
DeprecationWarning,
)
result_callback = proc
self._call_history.append((kwargs or {}, result_callback))
# historizing hooks don't return results
res = self._hookexec(self, self.get_hookimpls(), kwargs)
if result_callback is None:
return
# XXX: remember firstresult isn't compat with historic
for x in res or []:
result_callback(x)
def _maybe_apply_history(self, method):
"""Apply call history to a new hookimpl if it is marked as historic.
"""
if self.is_historic():
for kwargs, result_callback in self._call_history:
res = self._hookexec(self, [method], kwargs)
if res and result_callback is not None:
result_callback(res[0])
|
pytest-dev/pluggy | pluggy/hooks.py | _HookCaller._maybe_apply_history | python | def _maybe_apply_history(self, method):
if self.is_historic():
for kwargs, result_callback in self._call_history:
res = self._hookexec(self, [method], kwargs)
if res and result_callback is not None:
result_callback(res[0]) | Apply call history to a new hookimpl if it is marked as historic. | train | https://github.com/pytest-dev/pluggy/blob/4de9e440eeadd9f0eb8c5232b349ef64e20e33fb/pluggy/hooks.py#L331-L338 | null | class _HookCaller(object):
def __init__(self, name, hook_execute, specmodule_or_class=None, spec_opts=None):
self.name = name
self._wrappers = []
self._nonwrappers = []
self._hookexec = hook_execute
self.argnames = None
self.kwargnames = None
self.multicall = _multicall
self.spec = None
if specmodule_or_class is not None:
assert spec_opts is not None
self.set_specification(specmodule_or_class, spec_opts)
def has_spec(self):
return self.spec is not None
def set_specification(self, specmodule_or_class, spec_opts):
assert not self.has_spec()
self.spec = HookSpec(specmodule_or_class, self.name, spec_opts)
if spec_opts.get("historic"):
self._call_history = []
def is_historic(self):
return hasattr(self, "_call_history")
def _remove_plugin(self, plugin):
def remove(wrappers):
for i, method in enumerate(wrappers):
if method.plugin == plugin:
del wrappers[i]
return True
if remove(self._wrappers) is None:
if remove(self._nonwrappers) is None:
raise ValueError("plugin %r not found" % (plugin,))
def get_hookimpls(self):
# Order is important for _hookexec
return self._nonwrappers + self._wrappers
def _add_hookimpl(self, hookimpl):
"""Add an implementation to the callback chain.
"""
if hookimpl.hookwrapper:
methods = self._wrappers
else:
methods = self._nonwrappers
if hookimpl.trylast:
methods.insert(0, hookimpl)
elif hookimpl.tryfirst:
methods.append(hookimpl)
else:
# find last non-tryfirst method
i = len(methods) - 1
while i >= 0 and methods[i].tryfirst:
i -= 1
methods.insert(i + 1, hookimpl)
if "__multicall__" in hookimpl.argnames:
warnings.warn(
"Support for __multicall__ is now deprecated and will be"
"removed in an upcoming release.",
DeprecationWarning,
)
self.multicall = _legacymulticall
def __repr__(self):
return "<_HookCaller %r>" % (self.name,)
def __call__(self, *args, **kwargs):
if args:
raise TypeError("hook calling supports only keyword arguments")
assert not self.is_historic()
if self.spec and self.spec.argnames:
notincall = (
set(self.spec.argnames) - set(["__multicall__"]) - set(kwargs.keys())
)
if notincall:
warnings.warn(
"Argument(s) {} which are declared in the hookspec "
"can not be found in this hook call".format(tuple(notincall)),
stacklevel=2,
)
return self._hookexec(self, self.get_hookimpls(), kwargs)
def call_historic(self, result_callback=None, kwargs=None, proc=None):
"""Call the hook with given ``kwargs`` for all registered plugins and
for all plugins which will be registered afterwards.
If ``result_callback`` is not ``None`` it will be called for for each
non-None result obtained from a hook implementation.
.. note::
The ``proc`` argument is now deprecated.
"""
if proc is not None:
warnings.warn(
"Support for `proc` argument is now deprecated and will be"
"removed in an upcoming release.",
DeprecationWarning,
)
result_callback = proc
self._call_history.append((kwargs or {}, result_callback))
# historizing hooks don't return results
res = self._hookexec(self, self.get_hookimpls(), kwargs)
if result_callback is None:
return
# XXX: remember firstresult isn't compat with historic
for x in res or []:
result_callback(x)
def call_extra(self, methods, kwargs):
""" Call the hook with some additional temporarily participating
methods using the specified kwargs as call parameters. """
old = list(self._nonwrappers), list(self._wrappers)
for method in methods:
opts = dict(hookwrapper=False, trylast=False, tryfirst=False)
hookimpl = HookImpl(None, "<temp>", method, opts)
self._add_hookimpl(hookimpl)
try:
return self(**kwargs)
finally:
self._nonwrappers, self._wrappers = old
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.