index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
8,845
|
TestaVuota/COCO-Style-Dataset-Generator-GUI
|
refs/heads/master
|
/coco_dataset_generator/utils/visualize_json_file.py
|
import json
import cv2
import argparse
import os
import numpy as np
if __name__=='__main__':
ap = argparse.ArgumentParser()
ap.add_argument('json_file', help="Path to the JSON dataset file to visualize")
ap.add_argument('--save', help='Save a few results to disk to accommodate non-display environments', action='store_true')
ap.add_argument('--relpath', action='store_true', help='Absolute vs relative paths in dataset JSON')
args = ap.parse_args()
'''
cv2.namedWindow('frame', cv2.WND_PROP_FULLSCREEN)
'''
with open(args.json_file, 'r') as f:
obj = json.loads(f.read())
images, annotations = obj["images"], obj["annotations"]
classes = obj["classes"]
print (classes)
print ("Dataset contains %d images, %d objects!"%(len(images), len(annotations)))
for idx, img in enumerate(images):
if args.relpath:
imgpath = os.path.join(os.path.dirname(args.json_file), img['file_name'])
else:
imgpath = img['file_name']
print (imgpath)
if os.path.exists(imgpath):
anns = [ann for ann in annotations if ann["image_id"]==img["id"]]
image_cv2 = cv2.imread(imgpath)
ann_img = image_cv2.copy()
for ann in anns:
s = [int(x) for x in ann['bbox']]
seg = np.array(ann['segmentation'][0])
x, y = seg[range(0, len(seg)-1, 2)], seg[range(1, len(seg), 2)]
seg2d = [[xi, yi] for xi, yi in zip(x,y)]
cv2.fillPoly(ann_img, np.array([seg2d], dtype = 'int32'), (0, 255, 0))
cv2.rectangle(image_cv2, (s[0], s[1]), (s[2], s[3]), (0,0,0), 2)
cv2.putText(image_cv2, classes[ann['category_id']-1], (s[0]-10, s[1]+10), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 2)
image_cv2 = cv2.addWeighted(ann_img,0.25,image_cv2,0.75,0)
if not args.save:
cv2.imshow('frame', image_cv2)
q = cv2.waitKey()
else:
print ('saving sample!')
cv2.imwrite('sample%d.jpg'%(idx), image_cv2)
q = 10
if idx > 25:
q = 113
if q == 113: # if q == 'q'
exit()
|
{"/coco_dataset_generator/extras/cut_objects.py": ["/coco_dataset_generator/gui/contours.py"], "/coco_dataset_generator/gui/segment.py": ["/coco_dataset_generator/gui/contours.py"], "/coco_dataset_generator/extras/occlusion_transforms.py": ["/coco_dataset_generator/gui/contours.py"], "/coco_dataset_generator/utils/create_json_file.py": ["/coco_dataset_generator/gui/segment.py"]}
|
8,846
|
TestaVuota/COCO-Style-Dataset-Generator-GUI
|
refs/heads/master
|
/coco_dataset_generator/gui/segment_bbox_only.py
|
from matplotlib import pyplot as plt
from matplotlib.collections import PatchCollection
from matplotlib.patches import Polygon
from matplotlib.widgets import RadioButtons
from matplotlib.path import Path
import matplotlib.patches as patches
from PIL import Image
import matplotlib
import argparse
import numpy as np
import glob
import os
from matplotlib.widgets import RectangleSelector, Button, RadioButtons
from matplotlib.lines import Line2D
from matplotlib.artist import Artist
from .poly_editor import PolygonInteractor
from matplotlib.mlab import dist_point_to_segment
import sys
from ..utils.visualize_dataset import return_info
import json
from collections import defaultdict
def read_JSON_file(f):
with open(f, 'r') as g:
d = json.loads(g.read())
img_paths = [x['file_name'] for x in d['images']]
rects = [{'bbox': x['segmentation'][0], 'class': x['category_id'], 'image_id': x['image_id']} for x in d['annotations']]
annotations = defaultdict(list)
for rect in rects:
r = rect['bbox']
x0, y0 = min(r[0], r[2], r[4], r[6]), min(r[1], r[3], r[5], r[7])
x1, y1 = max(r[0], r[2], r[4], r[6]), max(r[1], r[3], r[5], r[7])
r = patches.Rectangle((x0,y0),x1-x0,y1-y0,linewidth=1,edgecolor='g',facecolor='g', alpha=0.4)
annotations[img_paths[rect['image_id']]].append({'bbox': r, 'cls': d['classes'][rect['class']-1]})
return d['classes'], img_paths, annotations
class COCO_dataset_generator(object):
def __init__(self, fig, ax, img_dir, classes, model_path, json_file):
self.RS = RectangleSelector(ax, self.line_select_callback,
drawtype='box', useblit=True,
button=[1, 3], # don't use middle button
minspanx=5, minspany=5,
spancoords='pixels',
interactive=True)
ax.set_yticklabels([])
ax.set_xticklabels([])
#self.classes, self.img_paths, _ = read_JSON_file(json_file)
with open(classes, 'r') as f:
self.classes, img_paths = sorted([x.strip().split(',')[0] for x in f.readlines()]), glob.glob(os.path.abspath(os.path.join(img_dir, '*.jpg')))
plt.tight_layout()
self.ax = ax
self.fig = fig
self.axradio = plt.axes([0.0, 0.0, 0.1, 1])
self.radio = RadioButtons(self.axradio, self.classes)
self.zoom_scale = 1.2
self.zoom_id = self.fig.canvas.mpl_connect('scroll_event', self.zoom)
self.keyboard_id = self.fig.canvas.mpl_connect('key_press_event', self.onkeyboard)
self.selected_poly = False
self.axsave = plt.axes([0.81, 0.05, 0.1, 0.05])
self.b_save = Button(self.axsave, 'Save')
self.b_save.on_clicked(self.save)
self.objects, self.existing_patches, self.existing_rects = [], [], []
self.num_pred = 0
if json_file is None:
self.images, self.annotations = [], []
self.index = 0
self.ann_id = 0
else:
with open(json_file, 'r') as g:
d = json.loads(g.read())
self.images, self.annotations = d['images'], d['annotations']
self.index = len(self.images)
self.ann_id = len(self.annotations)
prev_files = [x['file_name'] for x in self.images]
for i, f in enumerate(img_paths):
im = Image.open(f)
width, height = im.size
dic = {'file_name': f, 'id': self.index+i, 'height': height, 'width': width}
if f not in prev_files:
self.images.append(dic)
else:
self.index+=1
image = plt.imread(self.images[self.index]['file_name'])
self.ax.imshow(image, aspect='auto')
if not args['no_feedback']:
from mask_rcnn.get_json_config import get_demo_config
from mask_rcnn import model as modellib
from mask_rcnn.visualize_cv2 import random_colors
self.config = get_demo_config(len(self.classes)-1, True)
if 'config_path' in args:
self.config.from_json(args['config_path'])
plt.connect('draw_event', self.persist)
# Create model object in inference mode.
self.model = modellib.MaskRCNN(mode="inference", model_dir='/'.join(args['weights_path'].split('/')[:-2]), config=self.config)
# Load weights trained on MS-COCO
self.model.load_weights(args['weights_path'], by_name=True)
r = self.model.detect([image], verbose=0)[0]
# Number of instances
N = r['rois'].shape[0]
masks = r['masks']
# Show area outside image boundaries.
height, width = image.shape[:2]
class_ids, scores, rois = r['class_ids'], r['scores'], r['rois'],
for i in range(N):
# Label
class_id = class_ids[i]
score = scores[i] if scores is not None else None
label = self.classes[class_id-1]
pat = patches.Rectangle((rois[i][1], rois[i][0]), rois[i][3]-rois[i][1], rois[i][2]-rois[i][0], linewidth=1, edgecolor='r',facecolor='r', alpha=0.4)
rect = self.ax.add_patch(pat)
self.objects.append(label)
self.existing_patches.append(pat.get_bbox().get_points())
self.existing_rects.append(pat)
self.num_pred = len(self.objects)
def line_select_callback(self, eclick, erelease):
'eclick and erelease are the press and release events'
x1, y1 = eclick.xdata, eclick.ydata
x2, y2 = erelease.xdata, erelease.ydata
def zoom(self, event):
if not event.inaxes:
return
cur_xlim = self.ax.get_xlim()
cur_ylim = self.ax.get_ylim()
xdata = event.xdata # get event x location
ydata = event.ydata # get event y location
if event.button == 'down':
# deal with zoom in
scale_factor = 1 / self.zoom_scale
elif event.button == 'up':
# deal with zoom out
scale_factor = self.zoom_scale
else:
# deal with something that should never happen
scale_factor = 1
print (event.button)
new_width = (cur_xlim[1] - cur_xlim[0]) * scale_factor
new_height = (cur_ylim[1] - cur_ylim[0]) * scale_factor
relx = (cur_xlim[1] - xdata)/(cur_xlim[1] - cur_xlim[0])
rely = (cur_ylim[1] - ydata)/(cur_ylim[1] - cur_ylim[0])
self.ax.set_xlim([xdata - new_width * (1-relx), xdata + new_width * (relx)])
self.ax.set_ylim([ydata - new_height * (1-rely), ydata + new_height * (rely)])
self.ax.figure.canvas.draw()
def save(self, event):
data = {'images':self.images[:self.index+1], 'annotations':self.annotations, 'categories':[], 'classes': self.classes}
with open('output.json', 'w') as outfile:
json.dump(data, outfile)
def persist(self, event):
if self.RS.active:
self.RS.update()
def onkeyboard(self, event):
if not event.inaxes:
return
elif event.key == 'a':
for i, ((xmin, ymin), (xmax, ymax)) in enumerate(self.existing_patches):
if xmin<=event.xdata<=xmax and ymin<=event.ydata<=ymax:
self.radio.set_active(self.classes.index(self.objects[i]))
self.RS.set_active(True)
self.rectangle = self.existing_rects[i]
self.rectangle.set_visible(False)
coords = self.rectangle.get_bbox().get_points()
self.RS.extents = [coords[0][0], coords[1][0], coords[0][1], coords[1][1]]
self.RS.to_draw.set_visible(True)
self.fig.canvas.draw()
self.existing_rects.pop(i)
self.existing_patches.pop(i)
self.objects.pop(i)
fig.canvas.draw()
break
elif event.key == 'i':
b = self.RS.extents # xmin, xmax, ymin, ymax
b = [int(x) for x in b]
if b[1]-b[0]>0 and b[3]-b[2]>0:
poly = [b[0], b[2], b[0], b[3], b[1], b[3], b[1], b[2], b[0], b[2]]
area = (b[1]-b[0])*(b[3]-b[2])
bbox = [b[0], b[2], b[1], b[3]]
dic2 = {'segmentation': [poly], 'area': area, 'iscrowd':0, 'image_id':self.index, 'bbox':bbox, 'category_id': self.classes.index(self.radio.value_selected)+1, 'id': self.ann_id}
if dic2 not in self.annotations:
self.annotations.append(dic2)
self.ann_id+=1
rect = patches.Rectangle((b[0],b[2]),b[1]-b[0],b[3]-b[2],linewidth=1,edgecolor='g',facecolor='g', alpha=0.4)
self.ax.add_patch(rect)
self.RS.set_active(False)
self.fig.canvas.draw()
self.RS.set_active(True)
elif event.key in ['N', 'n']:
self.ax.clear()
self.index+=1
if (len(self.objects)==self.num_pred):
self.images.pop(self.index-1)
self.index-=1
if self.index==len(self.images):
exit()
image = plt.imread(self.images[self.index]['file_name'])
self.ax.imshow(image)
self.ax.set_yticklabels([])
self.ax.set_xticklabels([])
r = self.model.detect([image], verbose=0)[0]
# Number of instances
N = r['rois'].shape[0]
masks = r['masks']
# Show area outside image boundaries.
height, width = image.shape[:2]
class_ids, scores, rois = r['class_ids'], r['scores'], r['rois'],
self.existing_rects, self.existing_patches, self.objects = [], [], []
for i in range(N):
# Label
class_id = class_ids[i]
score = scores[i] if scores is not None else None
label = self.classes[class_id-1]
pat = patches.Rectangle((rois[i][1], rois[i][0]), rois[i][3]-rois[i][1], rois[i][2]-rois[i][0], linewidth=1, edgecolor='r',facecolor='r', alpha=0.4)
rect = self.ax.add_patch(pat)
self.objects.append(label)
self.existing_patches.append(pat.get_bbox().get_points())
self.existing_rects.append(pat)
self.num_pred = len(self.objects)
self.fig.canvas.draw()
elif event.key in ['q','Q']:
exit()
if __name__=='__main__':
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--image_file", required=True, help="Path to the images dir")
ap.add_argument("-c", "--classes_file", required=True, help="Path to classes file")
ap.add_argument("-j", "--json_file", required=False, help="Path of JSON file to append dataset to", default=None)
ap.add_argument("--save_csv", required=False, action="store_true", help="Choose option to save dataset as CSV file annotations.csv")
ap.add_argument('-w', "--weights_path", default=None, help="Path to Mask RCNN checkpoint save file")
ap.add_argument('-x', "--config_path", default=None, help="Path to Mask RCNN JSON config file")
args = vars(ap.parse_args())
args["no_feedback"] = 'weights_path' not in args
fig = plt.figure(figsize=(14, 14))
ax = plt.gca()
gen = COCO_dataset_generator(fig, ax, args['image_file'], args['classes_file'], args['weights_path'], args['json_file'])
plt.subplots_adjust(bottom=0.2)
plt.show()
|
{"/coco_dataset_generator/extras/cut_objects.py": ["/coco_dataset_generator/gui/contours.py"], "/coco_dataset_generator/gui/segment.py": ["/coco_dataset_generator/gui/contours.py"], "/coco_dataset_generator/extras/occlusion_transforms.py": ["/coco_dataset_generator/gui/contours.py"], "/coco_dataset_generator/utils/create_json_file.py": ["/coco_dataset_generator/gui/segment.py"]}
|
8,847
|
TestaVuota/COCO-Style-Dataset-Generator-GUI
|
refs/heads/master
|
/coco_dataset_generator/extras/split_json_file.py
|
import json
import argparse
def contains(splits):
# Returns 1D binary map of images to take such that access is O(1)
MAX, MIN = max([int(x.split('-')[-1]) for x in splits]), min([int(x.split('-')[0]) for x in splits])
A = [0 for _ in range(MAX-MIN+1)]
for sp in splits:
if '-' in sp:
beg, end = [int(x) for x in sp.split('-')]
else:
beg = end = int(sp)
for idx in range(beg-MIN, end+1-MIN):
print (idx)
A[idx] = 1
return A, MIN, MAX
if __name__=='__main__':
ap = argparse.ArgumentParser()
ap.add_argument('json', help='Path to JSON dataset file')
ap.add_argument('split', nargs='+', help='Dataset split for splitting')
ap.add_argument('--out', help='Path to output JSON file', default='cut_dataset.json')
args = ap.parse_args()
with open(args.json, 'r') as f:
obj = json.load(f)
A, MIN, MAX = contains(args.split)
imgs, anns = [], []
for img in obj['images']:
if img['id'] >= MIN and img['id'] <= MAX:
if A[img['id']-MIN]:
ANN = [ann for ann in obj['annotations'] if ann['image_id']==img['id']]
anns.extend(ANN)
imgs.append(img)
with open(args.out, 'w') as f:
json.dump({'images': imgs, 'annotations': anns, 'classes': obj['classes'], 'categories': []}, f)
|
{"/coco_dataset_generator/extras/cut_objects.py": ["/coco_dataset_generator/gui/contours.py"], "/coco_dataset_generator/gui/segment.py": ["/coco_dataset_generator/gui/contours.py"], "/coco_dataset_generator/extras/occlusion_transforms.py": ["/coco_dataset_generator/gui/contours.py"], "/coco_dataset_generator/utils/create_json_file.py": ["/coco_dataset_generator/gui/segment.py"]}
|
8,848
|
TestaVuota/COCO-Style-Dataset-Generator-GUI
|
refs/heads/master
|
/coco_dataset_generator/extras/create_binary_dataset.py
|
import argparse
import json
if __name__=='__main__':
ap = argparse.ArgumentParser()
ap.add_argument('json', help='Path to original multi-class JSON file')
args = ap.parse_args()
with open(args.json, 'r') as f:
obj = json.load(f)
obj['classes'] = ['object']
for idx in range(len(obj['annotations'])):
obj['annotations'][idx]['category_id'] = 1
with open('.'.join(args.json.split('.')[:-1])+'_binary.json', 'w') as f:
json.dump(obj, f)
|
{"/coco_dataset_generator/extras/cut_objects.py": ["/coco_dataset_generator/gui/contours.py"], "/coco_dataset_generator/gui/segment.py": ["/coco_dataset_generator/gui/contours.py"], "/coco_dataset_generator/extras/occlusion_transforms.py": ["/coco_dataset_generator/gui/contours.py"], "/coco_dataset_generator/utils/create_json_file.py": ["/coco_dataset_generator/gui/segment.py"]}
|
8,849
|
TestaVuota/COCO-Style-Dataset-Generator-GUI
|
refs/heads/master
|
/setup.py
|
from setuptools import setup, find_packages
import os
import sys
import subprocess
def install(package):
subprocess.call([sys.executable, "-m", "pip", "install", package])
if os.getenv('MASK_RCNN'):
fl = 'requirements_maskrcnn.txt'
else:
fl = 'requirements.txt'
with open(fl, 'r') as f:
packs = [x.strip() for x in f.readlines()]
for p in packs:
install(p)
dependencies = []
packages = [
package for package in find_packages() if package.startswith('coco_dataset_generator')
]
setup(name='coco_dataset_generator',
version='1.0',
description='COCO Style Dataset Generator GUI',
author='hanskrupakar',
author_email='hansk@nyu.edu',
license='Open-Source',
url='https://www.github.com/hanskrupakar/COCO-Style-Dataset-Generator-GUI',
packages=packages,
install_requires=dependencies,
test_suite='unit_tests',
)
|
{"/coco_dataset_generator/extras/cut_objects.py": ["/coco_dataset_generator/gui/contours.py"], "/coco_dataset_generator/gui/segment.py": ["/coco_dataset_generator/gui/contours.py"], "/coco_dataset_generator/extras/occlusion_transforms.py": ["/coco_dataset_generator/gui/contours.py"], "/coco_dataset_generator/utils/create_json_file.py": ["/coco_dataset_generator/gui/segment.py"]}
|
8,850
|
TestaVuota/COCO-Style-Dataset-Generator-GUI
|
refs/heads/master
|
/coco_dataset_generator/utils/delete_images.py
|
import argparse
import json
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as patches
def press(event):
if event.key.lower() == 'q':
exit()
if event.key.lower() == 'd':
for ann in anns:
new_anns.remove(ann)
print ('Deleted image:', new_imgs[ptr]["file_name"], "from the dataset!")
del new_imgs[ptr]
if event.key.lower() == 'j':
print ("Saving dataset to file! Please wait!")
# Account for deletions by changing label space
id_list = [int(img['id']) for img in new_imgs]
ann_list = [int(ann['id']) for ann in new_anns]
full_img, full_ann = [x for x in range(len(id_list))], [x for x in range(len(ann_list))]
free_img, free_ann = list(set(full_img)-set(id_list)), list(set(full_ann)-set(ann_list))
change_img, change_ann = list(set(id_list)-set(full_img)), list(set(ann_list)-set(full_ann))
for f, c in zip(free_img, change_img):
for img in new_imgs:
if img['id']==c:
img['id']=f
for ann in new_anns:
if ann['image_id']==c:
ann['image_id']=f
for f, c in zip(free_ann, change_ann):
for ann in new_anns:
if ann['id']==c:
ann['id']=f
data = {'images': new_imgs, 'annotations': new_anns, 'categories':[], 'classes':classes}
with open('deleted_dataset.json', 'w') as f:
json.dump(data, f)
print ("Dataset saved!")
else:
plt.close()
if __name__=='__main__':
ap = argparse.ArgumentParser()
ap.add_argument('--json_file', required=True, help='Path to JSON file')
args = ap.parse_args()
with open(args.json_file, 'r') as f:
obj = json.load(f)
images, annotations = obj["images"], obj["annotations"]
classes = obj["classes"]
print ("Total number of images in dataset: ", len(images))
new_imgs, new_anns = images, annotations
for ptr, img in enumerate(images):
fig, ax = plt.subplots()
plt.tick_params(axis='both', which='both', bottom='off', top='off',
labelbottom='off', right='off', left='off', labelleft='off')
fig.canvas.mpl_connect('key_press_event', press)
ax.set_title('d - Delete image; j - Save dataset; q - Exit; others - Next image')
anns = [ann for ann in annotations if ann["image_id"]==img["id"]]
image = plt.imread(img["file_name"])
plt.imshow(image)
for ann in anns:
s = [int(x) for x in ann['bbox']]
rect = patches.Rectangle((s[0],s[1]),s[2]-s[0],s[3]-s[1],linewidth=1,edgecolor='r',facecolor='none')
ax = plt.gca()
ax.add_patch(rect)
plt.text(s[0]-10, s[1]+10, classes[ann['category_id']-1])
plt.show()
print ("Saving dataset to file! Please wait!")
data = {'images': new_imgs, 'annotations': new_anns, 'categories':[], 'classes':classes}
with open('deleted_dataset.json', 'w') as f:
json.dump(data, f)
print ("Dataset saved!")
|
{"/coco_dataset_generator/extras/cut_objects.py": ["/coco_dataset_generator/gui/contours.py"], "/coco_dataset_generator/gui/segment.py": ["/coco_dataset_generator/gui/contours.py"], "/coco_dataset_generator/extras/occlusion_transforms.py": ["/coco_dataset_generator/gui/contours.py"], "/coco_dataset_generator/utils/create_json_file.py": ["/coco_dataset_generator/gui/segment.py"]}
|
8,851
|
TestaVuota/COCO-Style-Dataset-Generator-GUI
|
refs/heads/master
|
/coco_dataset_generator/utils/create_json_file.py
|
#coding: utf8
import xml.etree.cElementTree as ET
import glob
import argparse
import os
import numpy as np
import json
import unicodedata
from PIL import Image
from ..gui.segment import COCO_dataset_generator as cocogen
if __name__=='__main__':
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--image_dir", required=True, help="Path to the image dir")
ap.add_argument("-o", "--file_path", required=True, help="Path of output file")
ap.add_argument("-c", "--class_file", required=True, help="Path of file with output classes")
ap.add_argument("-t", "--type", required=True, help="Type of the image files")
args = vars(ap.parse_args())
with open(args['class_file'], 'r') as f:
classes = sorted([unicodedata.normalize('NFKD', x).strip() for x in f.readlines()])
images, anns = [], []
img_paths = [x for x in glob.glob(os.path.join(args['image_dir'], '*.'+args['type'])) if os.path.exists(x[:-3]+'txt')]
num_imgs = len(img_paths)
i=0
for f in sorted(img_paths):
img = Image.open(f)
width, height = img.size
dic = {'file_name': f, 'id': i, 'height': height, 'width': width}
images.append(dic)
ann_index = 0
for i, f in enumerate(sorted(glob.glob(os.path.join(os.path.abspath(args['image_dir']), '*.txt')))):
ptr = 0
with open(f, 'r', encoding='utf-8-sig') as g:
s = g.read()
s = s.split('\n')[2:-1]
width, height = [int(x) for x in s[0].split(' ')]
s = s[2:]
print (s)
while(ptr<len(s)):
cat_id = classes.index(s[ptr].encode('utf-8').decode('utf-8-sig'))+1
area = float(s[ptr+1])
poly = [[float(x) for x in s[ptr+2].split(' ')[:-1]]]
print (cat_id, area, poly)
if len(s)>ptr+3 and s[ptr+3] != '':
ind = ptr + 3
while (ind<len(s) and s[ind]!=''):
poly.append([float(x) for x in s[ind].split(' ')[:-1]])
ind+=1
ptr = ind-3
x1, x2, y1, y2 = None, None, None, None
for p in poly:
points = np.reshape(np.array(p), (int(len(p)/2), 2))
if x1 is None:
x1, y1 = points.min(0)
x2, y2 = points.max(0)
else:
if points.min(0)[0]<x1:
x1 = points.min(0)[0]
if points.min(0)[1]<y1:
y1 = points.min(0)[1]
if points.max(0)[0]>x2:
x2 = points.max(0)[0]
if points.max(0)[1]>y2:
y2 = points.max(0)[1]
bbox = [x2, y2, x1, y1]
dic2 = {'segmentation': poly, 'area': area, 'iscrowd':0, 'image_id':i, 'bbox':bbox, 'category_id': cat_id, 'id': ann_index}
ann_index+=1
ptr+=4
anns.append(dic2)
data = {'images':images, 'annotations':anns, 'categories':[], 'classes': classes}
with open(args['file_path']+'.json', 'w') as outfile:
json.dump(data, outfile)
|
{"/coco_dataset_generator/extras/cut_objects.py": ["/coco_dataset_generator/gui/contours.py"], "/coco_dataset_generator/gui/segment.py": ["/coco_dataset_generator/gui/contours.py"], "/coco_dataset_generator/extras/occlusion_transforms.py": ["/coco_dataset_generator/gui/contours.py"], "/coco_dataset_generator/utils/create_json_file.py": ["/coco_dataset_generator/gui/segment.py"]}
|
8,855
|
Daiding-Hu/Exclecode
|
refs/heads/master
|
/pymysql.py
|
import pymssql
# 连接数据库
connect = pymssql.connect(
host='localhost',
port=3306,
user='root',
passwd='hu12580WEI',
db='python',
charset='utf8'
)
# 获取游标
cursor = connect.cursor()
# 插入数据
sql = "INSERT INTO trade (name, account, saving) VALUES ( '%s', '%s', %.2f )"
data = ('雷军', '13512345678', 10000)
cursor.execute(sql % data)
connect.commit()
print('成功插入', cursor.rowcount, '条数据')
|
{"/Excle-pandas.py": ["/pymysql.py"]}
|
8,856
|
Daiding-Hu/Exclecode
|
refs/heads/master
|
/Excle-xlwt.py
|
import xlwt
workbook = xlwt.Workbook(encoding='utf-8')
worksheet = workbook.add_sheet('表格1')
# worksheet.write(0, 0, 'this is test') # 在0,0处写入数据
# workbook.save('练习2.xlsx')
list_num = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
for i in range(len(list_num)):
worksheet.write(0, i, list_num[i])
style = xlwt.XFStyle() # 初始化样式
font = xlwt.Font() # 创建字体
font.name = 'Times New Roman'
font.bold = True
workbook.save('练习2.xlsx')
|
{"/Excle-pandas.py": ["/pymysql.py"]}
|
8,857
|
Daiding-Hu/Exclecode
|
refs/heads/master
|
/Excle-openpyxl.py
|
import openpyxl
from openpyxl import Workbook
from openpyxl import load_workbook
# # 实例化
# wb = Workbook()
# # 激活 worksheet,这个函数使用_active_sheet_index这个属性,默认设置的值是0,除非你指定一个值,否则总是获取到第一个worksheet。
# ws = wb.active
#
# # 单元格直接赋值
# ws['A1'] = 42
# ws['A2'] = '=sum(B2:C2)'
# wb.save('练习3.xlsx')
#
# print(ws['A2'].value)
# # wb2 = load_workbook('练习2.xlsx')
wb = Workbook()
ws = wb.active
for r in dataframe_to_rows(df, index=True, header=True):
ws.append(r)
for cell in ws['A'] + ws[1]:
cell.style = 'Pandas'
wb.save("pandas_openpyxl.xlsx")
|
{"/Excle-pandas.py": ["/pymysql.py"]}
|
8,858
|
Daiding-Hu/Exclecode
|
refs/heads/master
|
/Excle-xlrd.py
|
import openpyxl
import xlrd
import xlwt
import datetime
import pymssql
workbook = xlrd.open_workbook('练习.xlsx')
sheet_names = workbook.sheet_names()
print(sheet_names)
sheets = workbook.sheets() # 获取sheet对象,赋给sheets
print(sheets)
sheet1 = workbook.sheet_by_index(0) # 获取sheet中第一张表,0代表第一张
print(sheet1)
sheet2 = workbook.sheet_by_name("表格2") # 获取表格文件中名字为‘表格2’的sheet对象给sheetn
print(sheet2)
sheet_is_load = workbook.sheet_loaded('表格2') # 通过表格名称判断sheet是否导入
print(sheet_is_load)
sheet_is_load2 = workbook.sheet_loaded(0) # 通过index判断sheeet是否导入
print(sheet_is_load2)
'''对行的操作'''
nrows = sheet1.nrows # 获取sheet1中的有效行数
print(nrows)
row_value = sheet1.row_values(rowx=2) # 获取第2行的数据存放到列表中
print(row_value)
row_value1 = sheet1.row_values(rowx=2, start_colx=1, end_colx=5) # 截取第二行数据的部分数据,从0开始,下标左可以取到,右不可以
print(row_value1)
row_object = sheet1.row(rowx=2) # 获取第2行的单元对象,没一个单元格数据的类型,
# 单元类型:empty,string,number,date,boolean, error
print(row_object)
row_type = sheet1.row_types(rowx=2) # 获取第二行的单元类型
# 单元类型ctype:empty为0,string为1,number为2,date为3,boolean为4, error为5;
print(row_type)
row_len = sheet1.row_len(rowx=2)
print(row_len)
'''对列的操作'''
ncols = sheet1.ncols # sheet1中有多少有效列
print(ncols)
ncols_value = sheet1.col_values(colx=1)
print(ncols_value) # 第一列的值
ncols_value1 = sheet1.col_values(4, 1, 3) # 第四列中1,3行的值
print(ncols_value1)
cols_slic = sheet1.col_slice(colx=2) # 第二列的数据和类型
print(cols_slic)
cols_type = sheet1.col_types(colx=2) # 第二列的单元格类型对应编号
print(cols_type)
'''对sheet对象中的单元执行操作'''
cell_value = sheet1.cell(rowx=0, colx=0) # 0行0列的数值和类型
print(cell_value)
a = sheet1.cell_value(0, 0) # 0行0列的数值
print(a)
a1 = sheet1.cell_type(1, 13)
print(1)
b1 = sheet1.cell_value(1, 13)
print(b1)
c1 = xlrd.xldate.xldate_as_datetime(b1, workbook.datemode) # 提取时间
print(c1)
d1 = c1.strftime('%Y/%m/%d') # 转换时间格式
print(d1)
'''合并单元格'''
''' 获取合并的单元格
若表格为xls格式的,打开workbook时需将formatting_info设置为True,然后再获取sheet中的合并单元格;
若表格有xlsx格式的,打开workbook时保持formatting_info为默认值False,然后再获取sheet中的合并单元格;
workbook1 = xlrd.open_workbook("测试.xls", formatting_info=True)'''
a2 = sheet2.merged_cells # 获取xlsx格式的excel文件中的合并单元格的位置
print(a2)
b2 = sheet2.cell_value(2, 8) # 读取合并单元格数据(仅需“起始行起始列”即可获取数据)
print(b2)
for (row_start, row_end, col_start, col_end) in sheet2.merged_cells: # 使用for循环获取所有的合并单元格数据
print(sheet2.cell_value(row_start, col_start))
|
{"/Excle-pandas.py": ["/pymysql.py"]}
|
8,859
|
Daiding-Hu/Exclecode
|
refs/heads/master
|
/Excle-pandas.py
|
import numpy as np
import pandas as pd
import pymysql
|
{"/Excle-pandas.py": ["/pymysql.py"]}
|
8,860
|
samuelcolvin/sasstastic
|
refs/heads/master
|
/sasstastic/logs.py
|
import logging
import logging.config
import click
class ClickHandler(logging.Handler):
formats = {
logging.DEBUG: {'fg': 'white', 'dim': True},
logging.INFO: {'fg': 'green'},
logging.WARN: {'fg': 'yellow'},
}
def emit(self, record):
log_entry = self.format(record)
if record.levelno == logging.INFO and log_entry.startswith('>>'):
click.secho(log_entry[2:], fg='cyan')
else:
fmt = self.formats.get(record.levelno, {'fg': 'red'})
click.secho(log_entry, **fmt)
def log_config(log_level: str) -> dict:
"""
Setup default config. for dictConfig.
:param log_level: str name or django debugging int
:return: dict suitable for ``logging.config.dictConfig``
"""
assert log_level in {'DEBUG', 'INFO', 'WARNING', 'ERROR'}, f'wrong log level {log_level}'
return {
'version': 1,
'disable_existing_loggers': True,
'formatters': {'default': {'format': '%(message)s'}, 'indent': {'format': ' %(message)s'}},
'handlers': {
'sasstastic': {'level': log_level, 'class': 'sasstastic.logs.ClickHandler', 'formatter': 'default'},
},
'loggers': {'sasstastic': {'handlers': ['sasstastic'], 'level': log_level, 'propagate': False}},
}
def setup_logging(log_level):
config = log_config(log_level)
logging.config.dictConfig(config)
|
{"/sasstastic/config.py": ["/sasstastic/common.py"], "/sasstastic/compile.py": ["/sasstastic/common.py", "/sasstastic/config.py"], "/sasstastic/cli.py": ["/sasstastic/config.py", "/sasstastic/logs.py", "/sasstastic/main.py"], "/sasstastic/__init__.py": ["/sasstastic/common.py", "/sasstastic/compile.py", "/sasstastic/config.py", "/sasstastic/download.py", "/sasstastic/main.py"], "/sasstastic/main.py": ["/sasstastic/compile.py", "/sasstastic/config.py", "/sasstastic/download.py"], "/sasstastic/download.py": ["/sasstastic/common.py", "/sasstastic/config.py"], "/tests/test_cli.py": ["/sasstastic/cli.py"]}
|
8,861
|
samuelcolvin/sasstastic
|
refs/heads/master
|
/sasstastic/config.py
|
import logging
import re
from pathlib import Path
from typing import Any, Dict, List, Optional, Pattern
import yaml
from pydantic import BaseModel, HttpUrl, ValidationError, validator
from pydantic.error_wrappers import display_errors
from .common import SasstasticError, is_file_path
try:
from yaml import CLoader as Loader
except ImportError:
from yaml import Loader
__all__ = 'SourceModel', 'DownloadModel', 'ConfigModel', 'load_config'
logger = logging.getLogger('sasstastic.config')
class SourceModel(BaseModel):
url: HttpUrl
extract: Optional[Dict[Pattern, Optional[Path]]] = None
to: Optional[Path] = None
@validator('url', pre=True)
def remove_spaces_from_url(cls, v):
return v and v.replace(' ', '')
@validator('extract', each_item=True)
def check_extract_path(cls, v):
if v is not None and v.is_absolute():
raise ValueError('extract path may not be absolute, remove the leading slash')
return v
@validator('to', always=True)
def check_to(cls, v, values):
if values.get('extract'):
# extracting, to can be None
return v
elif is_file_path(v):
# to is already a valid path
return v
elif v is not None and v.is_absolute():
raise ValueError('path may not be absolute, remove the leading slash')
try:
url: HttpUrl = values['url']
except KeyError:
return v
else:
filename = (url.path or '/').rsplit('/', 1)[1]
if not filename.endswith(('.css', '.sass', '.scss')):
raise ValueError(f'no filename found in url "{url}" and file path not given via "to"')
return (v or Path('.')) / filename
class DownloadModel(BaseModel):
dir: Path
sources: List[SourceModel]
class ConfigModel(BaseModel):
download: Optional[DownloadModel] = None
build_dir: Path
output_dir: Path
lock_file: Path = Path('.sasstastic.lock')
include_files: Pattern = re.compile(r'^[^_].+\.(?:css|sass|scss)$')
exclude_files: Optional[Pattern] = None
replace: Optional[Dict[Pattern, Dict[Pattern, str]]] = None
file_hashes: bool = False
dev_mode: bool = True
config_file: Path
@classmethod
def parse_obj(cls, config_file: Path, obj: Dict[str, Any]) -> 'ConfigModel':
if isinstance(obj, dict):
obj['config_file'] = config_file
m: ConfigModel = super().parse_obj(obj)
config_directory = config_file.parent
if not m.download.dir.is_absolute():
m.download.dir = config_directory / m.download.dir
if not m.build_dir.is_absolute():
m.build_dir = config_directory / m.build_dir
if not m.output_dir.is_absolute():
m.output_dir = config_directory / m.output_dir
if not m.lock_file.is_absolute():
m.lock_file = config_directory / m.lock_file
return m
def load_config(config_file: Path) -> ConfigModel:
if not config_file.is_file():
logger.error('%s does not exist', config_file)
raise SasstasticError('config files does not exist')
try:
with config_file.open('r') as f:
data = yaml.load(f, Loader=Loader)
except yaml.YAMLError as e:
logger.error('invalid YAML file %s:\n%s', config_file, e)
raise SasstasticError('invalid YAML file')
try:
return ConfigModel.parse_obj(config_file, data)
except ValidationError as exc:
logger.error('Error parsing %s:\n%s', config_file, display_errors(exc.errors()))
raise SasstasticError('error parsing config file')
|
{"/sasstastic/config.py": ["/sasstastic/common.py"], "/sasstastic/compile.py": ["/sasstastic/common.py", "/sasstastic/config.py"], "/sasstastic/cli.py": ["/sasstastic/config.py", "/sasstastic/logs.py", "/sasstastic/main.py"], "/sasstastic/__init__.py": ["/sasstastic/common.py", "/sasstastic/compile.py", "/sasstastic/config.py", "/sasstastic/download.py", "/sasstastic/main.py"], "/sasstastic/main.py": ["/sasstastic/compile.py", "/sasstastic/config.py", "/sasstastic/download.py"], "/sasstastic/download.py": ["/sasstastic/common.py", "/sasstastic/config.py"], "/tests/test_cli.py": ["/sasstastic/cli.py"]}
|
8,862
|
samuelcolvin/sasstastic
|
refs/heads/master
|
/sasstastic/compile.py
|
import hashlib
import json
import logging
import re
import shutil
import tempfile
from contextlib import contextmanager
from pathlib import Path
from time import time
from typing import Optional, Union
import click
import sass
from .common import SasstasticError
from .config import ConfigModel
__all__ = ('compile_sass',)
logger = logging.getLogger('sasstastic.compile')
STARTS_DOWNLOAD = re.compile('^(?:DOWNLOAD|DL)/')
STARTS_SRC = re.compile('^SRC/')
def compile_sass(config: ConfigModel, alt_output_dir: Optional[Path] = None, dev_mode: Optional[bool] = None):
if dev_mode is None:
dev_mode = config.dev_mode
else:
dev_mode = dev_mode
mode = 'dev' if dev_mode else 'prod'
out_dir: Path = alt_output_dir or config.output_dir
logger.info('\ncompiling "%s/" to "%s/" (mode: %s)', config.build_dir, out_dir, mode)
with tmpdir() as tmp_path:
SassCompiler(config, tmp_path, dev_mode).build()
fast_move(tmp_path, out_dir)
class SassCompiler:
def __init__(self, config: ConfigModel, tmp_out_dir: Path, dev_mode: bool):
self._config = config
self._build_dir = config.build_dir
self._tmp_out_dir = tmp_out_dir
self._dev_mode = dev_mode
self._src_dir = self._build_dir
self._replace = config.replace or {}
self._download_dir = config.download.dir
self._importers = [(5, self._clever_imports)]
dir_hash = hashlib.md5(str(self._build_dir).encode()).hexdigest()
self._size_cache_file = Path(tempfile.gettempdir()) / 'grablib_cache.{}.json'.format(dir_hash)
self._output_style = 'nested' if self._dev_mode else 'compressed'
self._old_size_cache = {}
self._new_size_cache = {}
self._errors = 0
self._files_generated = 0
def build(self) -> None:
start = time()
if self._dev_mode:
self._src_dir = out_dir_src = self._tmp_out_dir / '.src'
shutil.copytree(str(self._build_dir), str(out_dir_src))
files = sum(f.is_file() for f in out_dir_src.glob('**/*'))
logger.info('>> %28s/* ➤ %-30s %3d files', self._build_dir, '.src/', files)
try:
self._download_dir = out_dir_src / self._download_dir.relative_to(self._build_dir)
except ValueError:
# download dir is not inside the build dir, need to copy libs too
out_dir_libs = self._tmp_out_dir / '.libs'
shutil.copytree(str(self._download_dir), str(out_dir_libs))
files = sum(f.is_file() for f in out_dir_libs.glob('**/*'))
logger.info('%28s/* ➤ %-30s %3d files', self._download_dir, '.libs/', files)
self._download_dir = out_dir_src
if self._size_cache_file.exists():
with self._size_cache_file.open() as f:
self._old_size_cache = json.load(f)
for path in self._src_dir.glob('**/*.*'):
self.process_file(path)
with self._size_cache_file.open('w') as f:
json.dump(self._new_size_cache, f, indent=2)
time_taken = (time() - start) * 1000
plural = '' if self._files_generated == 1 else 's'
if not self._errors:
logger.info('%d css file%s generated in %0.0fms, 0 errors', self._files_generated, plural, time_taken)
else:
logger.error(
'%d css file%s generated in %0.0fms, %d errors', self._files_generated, plural, time_taken, self._errors
)
raise SasstasticError('sass errors')
def process_file(self, f: Path):
if not f.is_file():
return
if not self._config.include_files.search(f.name):
return
if self._config.exclude_files and self._config.exclude_files.search(str(f)):
return
if is_relative_to(f, self._download_dir):
return
rel_path = f.relative_to(self._src_dir)
css_path = (self._tmp_out_dir / rel_path).with_suffix('.css')
map_path = css_path.with_name(css_path.name + '.map') if self._dev_mode else None
try:
css = sass.compile(
filename=str(f),
source_map_filename=map_path and str(map_path),
output_style=self._output_style,
precision=10,
importers=self._importers,
)
except sass.CompileError as e:
self._errors += 1
logger.error('%s compile error:\n%s', f, e)
return
log_msg = None
file_hashes = self._config.file_hashes
try:
css_path.parent.mkdir(parents=True, exist_ok=True)
if self._dev_mode:
css, css_map = css
if file_hashes:
css_path = insert_hash(css_path, css)
map_path = insert_hash(map_path, css)
file_hashes = False
# correct the link to map file in css
css = re.sub(r'/\*# sourceMappingURL=\S+ \*/', f'/*# sourceMappingURL={map_path.name} */', css)
map_path.write_text(css_map)
css, log_msg = self._regex_modify(rel_path, css)
finally:
self._log_file_creation(rel_path, css_path, css)
if log_msg:
logger.debug(log_msg)
if file_hashes:
css_path = insert_hash(css_path, css)
css_path.write_text(css)
self._files_generated += 1
def _regex_modify(self, rel_path, css):
log_msg = None
for path_regex, regex_map in self._replace.items():
if re.search(path_regex, str(rel_path)):
logger.debug('%s has regex replace matches for "%s"', rel_path, path_regex)
for pattern, repl in regex_map.items():
hash1 = hash(css)
css = re.sub(pattern, repl, css)
if hash(css) == hash1:
log_msg = ' "{}" ➤ "{}" didn\'t modify the source'.format(pattern, repl)
else:
log_msg = ' "{}" ➤ "{}" modified the source'.format(pattern, repl)
return css, log_msg
def _log_file_creation(self, rel_path, css_path, css):
src, dst = str(rel_path), str(css_path.relative_to(self._tmp_out_dir))
size = len(css.encode())
p = str(css_path)
self._new_size_cache[p] = size
old_size = self._old_size_cache.get(p)
c = None
if old_size:
change_p = (size - old_size) / old_size * 100
if abs(change_p) > 0.5:
c = 'green' if change_p <= 0 else 'red'
change_p = click.style('{:+0.0f}%'.format(change_p), fg=c)
logger.info('>> %30s ➤ %-30s %9s %s', src, dst, fmt_size(size), change_p)
if c is None:
logger.info('>> %30s ➤ %-30s %9s', src, dst, fmt_size(size))
def _clever_imports(self, src_path):
_new_path = None
if STARTS_SRC.match(src_path):
_new_path = self._build_dir / STARTS_SRC.sub('', src_path)
elif STARTS_DOWNLOAD.match(src_path):
_new_path = self._download_dir / STARTS_DOWNLOAD.sub('', src_path)
return _new_path and [(str(_new_path),)]
@contextmanager
def tmpdir():
d = tempfile.mkdtemp()
try:
yield Path(d)
finally:
shutil.rmtree(d)
def _move_dir(src: str, dst: str, exists: bool):
if exists:
shutil.rmtree(dst)
shutil.move(src, dst)
def fast_move(src_dir: Path, dst_dir: Path):
"""
Move all files and directories from src_dir to dst_dir, files are moved first. This tries to be relatively fast.
"""
to_move = []
to_rename = []
for src_path in src_dir.iterdir():
if src_path.is_file():
to_rename.append((src_path, dst_dir / src_path.relative_to(src_dir)))
else:
assert src_path.is_dir(), src_path
dst = dst_dir / src_path.relative_to(src_dir)
to_move.append((str(src_path), str(dst), dst.exists()))
dst_dir.mkdir(parents=True, exist_ok=True)
s = time()
# files in the root of src_dir are moved first, these are generally the scss files which
# should be updated first to avoid styles not changing when a browser reloads
for src, dst in to_rename:
src.rename(dst)
for src, dst, exists in to_move:
if exists:
shutil.rmtree(dst)
shutil.move(src, dst)
logger.debug('filed from %s/ to %s/ in %0.1fms', src_dir, dst_dir, (time() - s) * 1000)
def insert_hash(path: Path, content: Union[str, bytes], *, hash_length=7):
"""
Insert a hash based on the content into the path after the first dot.
hash_length 7 matches git commit short references
"""
if isinstance(content, str):
content = content.encode()
hash_ = hashlib.md5(content).hexdigest()[:hash_length]
if '.' in path.name:
new_name = re.sub(r'\.', f'.{hash_}.', path.name, count=1)
else:
new_name = f'{path.name}.{hash_}'
return path.with_name(new_name)
KB, MB = 1024, 1024 ** 2
def fmt_size(num):
if num <= KB:
return f'{num:0.0f}B'
elif num <= MB:
return f'{num / KB:0.1f}KB'
else:
return f'{num / MB:0.1f}MB'
def is_relative_to(p1: Path, p2: Path) -> bool:
try:
p1.relative_to(p2)
except ValueError:
return False
else:
return True
|
{"/sasstastic/config.py": ["/sasstastic/common.py"], "/sasstastic/compile.py": ["/sasstastic/common.py", "/sasstastic/config.py"], "/sasstastic/cli.py": ["/sasstastic/config.py", "/sasstastic/logs.py", "/sasstastic/main.py"], "/sasstastic/__init__.py": ["/sasstastic/common.py", "/sasstastic/compile.py", "/sasstastic/config.py", "/sasstastic/download.py", "/sasstastic/main.py"], "/sasstastic/main.py": ["/sasstastic/compile.py", "/sasstastic/config.py", "/sasstastic/download.py"], "/sasstastic/download.py": ["/sasstastic/common.py", "/sasstastic/config.py"], "/tests/test_cli.py": ["/sasstastic/cli.py"]}
|
8,863
|
samuelcolvin/sasstastic
|
refs/heads/master
|
/sasstastic/cli.py
|
import logging
from pathlib import Path
from typing import Optional
import typer
from .config import SasstasticError, load_config
from .logs import setup_logging
from .main import download_and_compile, watch
from .version import VERSION
cli = typer.Typer()
logger = logging.getLogger('sasstastic.cli')
def version_callback(value: bool):
if value:
print(f'sasstastic: v{VERSION}')
raise typer.Exit()
OUTPUT_HELP = 'Custom directory to output css files, if omitted the "output_dir" field from the config file is used.'
DEV_MODE_HELP = 'Whether to compile in development or production mode, if omitted the value is taken from config.'
WATCH_HELP = 'Whether to watch the config file and build directory then download and compile after file changes.'
VERBOSE_HELP = 'Print more information to the console.'
VERSION_HELP = 'Show the version and exit.'
@cli.command()
def build(
config_path: Path = typer.Argument('sasstastic.yml', exists=True, file_okay=True, dir_okay=True, readable=True),
output_dir: Optional[Path] = typer.Option(
None, '-o', '--output-dir', file_okay=False, dir_okay=True, readable=True, help=OUTPUT_HELP
),
dev_mode: bool = typer.Option(None, '--dev/--prod', help=DEV_MODE_HELP),
watch_mode: bool = typer.Option(False, '--watch/--dont-watch', help=WATCH_HELP),
verbose: bool = typer.Option(False, help=VERBOSE_HELP),
version: bool = typer.Option(None, '--version', callback=version_callback, is_eager=True, help=VERSION_HELP),
):
"""
Fantastic SASS and SCSS compilation.
Takes a single argument: a path to a sasstastic.yml config file, or a directory containing a sasstastic.yml file.
"""
setup_logging('DEBUG' if verbose else 'INFO')
if config_path.is_dir():
config_path /= 'sasstastic.yml'
logger.info('config path: %s', config_path)
try:
config = load_config(config_path)
if watch_mode:
watch(config, output_dir, dev_mode)
else:
download_and_compile(config, output_dir, dev_mode)
except SasstasticError:
raise typer.Exit(1)
if __name__ == '__main__':
cli()
|
{"/sasstastic/config.py": ["/sasstastic/common.py"], "/sasstastic/compile.py": ["/sasstastic/common.py", "/sasstastic/config.py"], "/sasstastic/cli.py": ["/sasstastic/config.py", "/sasstastic/logs.py", "/sasstastic/main.py"], "/sasstastic/__init__.py": ["/sasstastic/common.py", "/sasstastic/compile.py", "/sasstastic/config.py", "/sasstastic/download.py", "/sasstastic/main.py"], "/sasstastic/main.py": ["/sasstastic/compile.py", "/sasstastic/config.py", "/sasstastic/download.py"], "/sasstastic/download.py": ["/sasstastic/common.py", "/sasstastic/config.py"], "/tests/test_cli.py": ["/sasstastic/cli.py"]}
|
8,864
|
samuelcolvin/sasstastic
|
refs/heads/master
|
/sasstastic/__init__.py
|
# flake8: noqa
from .common import SasstasticError
from .compile import compile_sass
from .config import ConfigModel, load_config
from .download import download_sass
from .main import download_and_compile
from .version import VERSION
__all__ = (
'download_sass',
'compile_sass',
'SasstasticError',
'load_config',
'ConfigModel',
'download_and_compile',
'VERSION',
)
|
{"/sasstastic/config.py": ["/sasstastic/common.py"], "/sasstastic/compile.py": ["/sasstastic/common.py", "/sasstastic/config.py"], "/sasstastic/cli.py": ["/sasstastic/config.py", "/sasstastic/logs.py", "/sasstastic/main.py"], "/sasstastic/__init__.py": ["/sasstastic/common.py", "/sasstastic/compile.py", "/sasstastic/config.py", "/sasstastic/download.py", "/sasstastic/main.py"], "/sasstastic/main.py": ["/sasstastic/compile.py", "/sasstastic/config.py", "/sasstastic/download.py"], "/sasstastic/download.py": ["/sasstastic/common.py", "/sasstastic/config.py"], "/tests/test_cli.py": ["/sasstastic/cli.py"]}
|
8,865
|
samuelcolvin/sasstastic
|
refs/heads/master
|
/sasstastic/common.py
|
import re
from pathlib import Path
from typing import Optional
__all__ = ('SasstasticError', 'is_file_path')
class SasstasticError(RuntimeError):
pass
def is_file_path(p: Optional[Path]) -> bool:
return p is not None and re.search(r'\.[a-zA-Z0-9]{1,5}$', p.name)
|
{"/sasstastic/config.py": ["/sasstastic/common.py"], "/sasstastic/compile.py": ["/sasstastic/common.py", "/sasstastic/config.py"], "/sasstastic/cli.py": ["/sasstastic/config.py", "/sasstastic/logs.py", "/sasstastic/main.py"], "/sasstastic/__init__.py": ["/sasstastic/common.py", "/sasstastic/compile.py", "/sasstastic/config.py", "/sasstastic/download.py", "/sasstastic/main.py"], "/sasstastic/main.py": ["/sasstastic/compile.py", "/sasstastic/config.py", "/sasstastic/download.py"], "/sasstastic/download.py": ["/sasstastic/common.py", "/sasstastic/config.py"], "/tests/test_cli.py": ["/sasstastic/cli.py"]}
|
8,866
|
samuelcolvin/sasstastic
|
refs/heads/master
|
/setup.py
|
from importlib.machinery import SourceFileLoader
from pathlib import Path
from setuptools import setup
description = 'Fantastic SASS and SCSS compilation for python'
THIS_DIR = Path(__file__).resolve().parent
try:
long_description = THIS_DIR.joinpath('README.md').read_text()
except FileNotFoundError:
long_description = description
# avoid loading the package before requirements are installed:
version = SourceFileLoader('version', 'sasstastic/version.py').load_module()
setup(
name='sasstastic',
version=str(version.VERSION),
description=description,
long_description=long_description,
long_description_content_type='text/markdown',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Operating System :: POSIX :: Linux',
'Environment :: MacOS X',
'Topic :: Internet',
],
author='Samuel Colvin',
author_email='s@muelcolvin.com',
url='https://github.com/samuelcolvin/sasstastic',
license='MIT',
packages=['sasstastic'],
package_data={'sasstastic': ['py.typed']},
entry_points="""
[console_scripts]
sasstastic=sasstastic.__main__:cli
""",
python_requires='>=3.7',
zip_safe=True,
install_requires=[
'libsass>=0.20.0',
'httpx>=0.12.1',
'pydantic>=1.5',
'PyYAML>=5.3.1',
'typer>=0.1.0',
'watchgod>=0.6',
],
)
|
{"/sasstastic/config.py": ["/sasstastic/common.py"], "/sasstastic/compile.py": ["/sasstastic/common.py", "/sasstastic/config.py"], "/sasstastic/cli.py": ["/sasstastic/config.py", "/sasstastic/logs.py", "/sasstastic/main.py"], "/sasstastic/__init__.py": ["/sasstastic/common.py", "/sasstastic/compile.py", "/sasstastic/config.py", "/sasstastic/download.py", "/sasstastic/main.py"], "/sasstastic/main.py": ["/sasstastic/compile.py", "/sasstastic/config.py", "/sasstastic/download.py"], "/sasstastic/download.py": ["/sasstastic/common.py", "/sasstastic/config.py"], "/tests/test_cli.py": ["/sasstastic/cli.py"]}
|
8,867
|
samuelcolvin/sasstastic
|
refs/heads/master
|
/sasstastic/main.py
|
import asyncio
import logging
from pathlib import Path
from typing import Optional
import watchgod
from .compile import compile_sass
from .config import ConfigModel, load_config
from .download import Downloader, download_sass
logger = logging.getLogger('sasstastic.main')
__all__ = 'download_and_compile', 'watch', 'awatch'
def download_and_compile(config: ConfigModel, alt_output_dir: Optional[Path] = None, dev_mode: Optional[bool] = None):
logger.info('build path: %s/', config.build_dir)
logger.info('output path: %s/', alt_output_dir or config.output_dir)
download_sass(config)
compile_sass(config, alt_output_dir, dev_mode)
def watch(config: ConfigModel, alt_output_dir: Optional[Path] = None, dev_mode: Optional[bool] = None):
try:
asyncio.run(awatch(config, alt_output_dir, dev_mode))
except KeyboardInterrupt:
pass
async def awatch(config: ConfigModel, alt_output_dir: Optional[Path] = None, dev_mode: Optional[bool] = None):
logger.info('build path: %s/', config.build_dir)
logger.info('output path: %s/', alt_output_dir or config.output_dir)
await Downloader(config).download()
compile_sass(config, alt_output_dir, dev_mode)
config_file = str(config.config_file)
async for changes in watch_multiple(config_file, config.build_dir):
changed_paths = {c[1] for c in changes}
if config_file in changed_paths:
logger.info('changes detected in config file, downloading sources...')
config = load_config(config.config_file)
await Downloader(config).download()
if changed_paths != {config_file}:
logger.info('changes detected in the build directory, re-compiling...')
compile_sass(config, alt_output_dir, dev_mode)
async def watch_multiple(*paths):
watchers = [watchgod.awatch(p) for p in paths]
while True:
done, pending = await asyncio.wait([w.__anext__() for w in watchers], return_when=asyncio.FIRST_COMPLETED)
for t in pending:
t.cancel()
for t in done:
yield t.result()
|
{"/sasstastic/config.py": ["/sasstastic/common.py"], "/sasstastic/compile.py": ["/sasstastic/common.py", "/sasstastic/config.py"], "/sasstastic/cli.py": ["/sasstastic/config.py", "/sasstastic/logs.py", "/sasstastic/main.py"], "/sasstastic/__init__.py": ["/sasstastic/common.py", "/sasstastic/compile.py", "/sasstastic/config.py", "/sasstastic/download.py", "/sasstastic/main.py"], "/sasstastic/main.py": ["/sasstastic/compile.py", "/sasstastic/config.py", "/sasstastic/download.py"], "/sasstastic/download.py": ["/sasstastic/common.py", "/sasstastic/config.py"], "/tests/test_cli.py": ["/sasstastic/cli.py"]}
|
8,868
|
samuelcolvin/sasstastic
|
refs/heads/master
|
/sasstastic/download.py
|
import asyncio
import hashlib
import json
import logging
import re
import zipfile
from io import BytesIO
from itertools import chain
from pathlib import Path
from typing import Dict, Set, Tuple
from httpx import AsyncClient
from .common import SasstasticError, is_file_path
from .config import ConfigModel, SourceModel
__all__ = ('download_sass', 'Downloader')
logger = logging.getLogger('sasstastic.download')
def download_sass(config: ConfigModel):
asyncio.run(Downloader(config).download())
class Downloader:
def __init__(self, config: ConfigModel):
self._download_dir = config.download.dir
self._sources = config.download.sources
self._client = AsyncClient()
self._lock_check = LockCheck(self._download_dir, config.lock_file)
async def download(self):
if not self._sources:
logger.info('\nno files to download')
return
to_download = [s for s in self._sources if self._lock_check.should_download(s)]
if to_download:
logger.info(
'\ndownloading %d files to %s, %d up-to-date',
len(to_download),
self._download_dir,
len(self._sources) - len(to_download),
)
try:
await asyncio.gather(*[self._download_source(s) for s in to_download])
finally:
await self._client.aclose()
self._lock_check.save()
else:
logger.info('\nno new files to download, %d up-to-date', len(self._sources))
self._lock_check.delete_stale()
async def _download_source(self, s: SourceModel):
logger.debug('%s: downloading...', s.url)
r = await self._client.get(s.url)
if r.status_code != 200:
logger.error('Error downloading %r, unexpected status code: %s', s.url, r.status_code)
raise SasstasticError(f'unexpected status code {r.status_code}')
loop = asyncio.get_running_loop()
if s.extract is None:
path = await loop.run_in_executor(None, self._save_file, s.to, r.content)
self._lock_check.record(s, s.to, r.content)
logger.info('>> downloaded %s ➤ %s', s.url, path)
else:
count = await loop.run_in_executor(None, self._extract_zip, s, r.content)
logger.info('>> downloaded %s ➤ extract %d files', s.url, count)
def _extract_zip(self, s: SourceModel, content: bytes):
zcopied = 0
with zipfile.ZipFile(BytesIO(content)) as zipf:
logger.debug('%s: %d files in zip archive', s.url, len(zipf.namelist()))
for filepath in zipf.namelist():
if filepath.endswith('/'):
continue
regex_pattern, match, file_path = None, None, None
for r, t in s.extract.items():
match = r.match(filepath)
if match:
regex_pattern, file_path = r, t
break
if regex_pattern is None:
logger.debug('%s: "%s" no target found', s.url, filepath)
elif file_path is None:
logger.debug('%s: "%s" skipping (regex: "%s")', s.url, filepath, regex_pattern)
else:
if not is_file_path(file_path):
file_name = match.groupdict().get('filename') or match.groups()[-1]
file_path = file_path / file_name
logger.debug('%s: "%s" ➤ "%s" (regex: "%s")', s.url, filepath, file_path, regex_pattern)
content = zipf.read(filepath)
self._lock_check.record(s, file_path, content)
self._save_file(file_path, content)
zcopied += 1
return zcopied
def _save_file(self, save_to: Path, content) -> Path:
p = self._download_dir / save_to
p.parent.mkdir(parents=True, exist_ok=True)
p.write_bytes(content)
return p
class LockCheck:
"""
Avoid downloading unchanged files by consulting a "lock file" cache.
"""
file_description = (
"# this files records information about files downloaded by sasstastic \n" # noqa: Q000
"# to allow unnecessary downloads to be skipped.\n" # noqa: Q000
"# You should't edit it manually and should include it in version control."
)
def __init__(self, root_dir: Path, lock_file: Path):
self._root_dir = root_dir
self._lock_file = lock_file
if lock_file.is_file():
lines = (ln for ln in lock_file.read_text().split('\n') if not re.match(r'\s*#', ln))
c = json.loads('\n'.join(lines))
self._cache: Dict[str, Set[Tuple[str, str]]] = {k: {tuple(f) for f in v} for k, v in c.items()}
else:
self._cache = {}
self._active: Set[str] = set()
def should_download(self, s: SourceModel) -> bool:
k = self._hash_source(s)
files = self._cache.get(k)
if files is None:
return True
else:
self._active.add(k)
return not any(self._file_unchanged(*v) for v in files)
def record(self, s: SourceModel, path: Path, content: bytes):
k = self._hash_source(s)
r = str(path), hashlib.md5(content).hexdigest()
self._active.add(k)
files = self._cache.get(k)
if files is None:
self._cache[k] = {r}
else:
files.add(r)
def save(self):
lines = ',\n'.join(f' "{k}": {json.dumps(sorted(v))}' for k, v in self._cache.items() if k in self._active)
self._lock_file.write_text(f'{self.file_description}\n{{\n{lines}\n}}')
def delete_stale(self):
d_files = set(chain.from_iterable((p for p, _ in f) for u, f in self._cache.items() if u in self._active))
for p in self._root_dir.glob('**/*'):
rel_path = str(p.relative_to(self._root_dir))
if rel_path not in d_files and p.is_file():
p.unlink()
logger.info('>> %s stale and deleted', rel_path)
def _file_unchanged(self, path: str, file_hash: str) -> bool:
p = self._root_dir / path
return p.is_file() and hashlib.md5(p.read_bytes()).hexdigest() == file_hash
@staticmethod
def _hash_source(s: SourceModel):
j = str(s.url), None if s.extract is None else {str(k): str(v) for k, v in s.extract.items()}, str(s.to)
return hashlib.md5(json.dumps(j).encode()).hexdigest()
|
{"/sasstastic/config.py": ["/sasstastic/common.py"], "/sasstastic/compile.py": ["/sasstastic/common.py", "/sasstastic/config.py"], "/sasstastic/cli.py": ["/sasstastic/config.py", "/sasstastic/logs.py", "/sasstastic/main.py"], "/sasstastic/__init__.py": ["/sasstastic/common.py", "/sasstastic/compile.py", "/sasstastic/config.py", "/sasstastic/download.py", "/sasstastic/main.py"], "/sasstastic/main.py": ["/sasstastic/compile.py", "/sasstastic/config.py", "/sasstastic/download.py"], "/sasstastic/download.py": ["/sasstastic/common.py", "/sasstastic/config.py"], "/tests/test_cli.py": ["/sasstastic/cli.py"]}
|
8,869
|
samuelcolvin/sasstastic
|
refs/heads/master
|
/tests/test_cli.py
|
from typer.testing import CliRunner
from sasstastic.cli import cli
runner = CliRunner()
def test_print_commands():
result = runner.invoke(cli, ['--help'])
assert result.exit_code == 0
assert 'Fantastic SASS and SCSS compilation' in result.output
|
{"/sasstastic/config.py": ["/sasstastic/common.py"], "/sasstastic/compile.py": ["/sasstastic/common.py", "/sasstastic/config.py"], "/sasstastic/cli.py": ["/sasstastic/config.py", "/sasstastic/logs.py", "/sasstastic/main.py"], "/sasstastic/__init__.py": ["/sasstastic/common.py", "/sasstastic/compile.py", "/sasstastic/config.py", "/sasstastic/download.py", "/sasstastic/main.py"], "/sasstastic/main.py": ["/sasstastic/compile.py", "/sasstastic/config.py", "/sasstastic/download.py"], "/sasstastic/download.py": ["/sasstastic/common.py", "/sasstastic/config.py"], "/tests/test_cli.py": ["/sasstastic/cli.py"]}
|
8,870
|
aleksandr-rakov/workdays_calendar
|
refs/heads/master
|
/workdays_calendar/scripts/init_db.py
|
# -*- coding: utf8 -*-
import sys
import os
from pyramid.paster import bootstrap
from pymongo import MongoClient
import workdays_calendar.users
import workdays_calendar.tags
import workdays_calendar.days_calendar
db=None
def setup(settings):
global db
db=MongoClient(settings['mongo.uri'])[settings['mongo.db']]
def usage(argv):
cmd = os.path.basename(argv[0])
print('usage: %s <config_uri>\n'
'(example: "%s development.ini")' % (cmd, cmd))
sys.exit(1)
def run(settings):
workdays_calendar.users.init_db(db,settings)
workdays_calendar.tags.init_db(db,settings)
workdays_calendar.days_calendar.init_db(db,settings)
def main():
if 2!=len(sys.argv):
usage(sys.argv)
env = bootstrap(sys.argv[1])
settings=env['registry'].settings
setup(settings)
run(settings)
if __name__=='__main__':
main()
|
{"/workdays_calendar/scripts/init_db.py": ["/workdays_calendar/users.py", "/workdays_calendar/tags.py", "/workdays_calendar/days_calendar.py"], "/workdays_calendar/tags.py": ["/workdays_calendar/api.py", "/workdays_calendar/collection_names.py"], "/workdays_calendar/scripts/gen_year.py": ["/workdays_calendar/days_calendar.py"], "/workdays_calendar/app_api.py": ["/workdays_calendar/collection_names.py", "/workdays_calendar/days_calendar.py", "/workdays_calendar/tags.py"]}
|
8,871
|
aleksandr-rakov/workdays_calendar
|
refs/heads/master
|
/workdays_calendar/tags.py
|
# -*- coding: utf8 -*-
import workdays_calendar.api as api
import colander
from bson import ObjectId
from workdays_calendar.collection_names import TAGS_COLLECTION
HOLIDAY_TAG='holiday'
def init_db(db,settings):
required_tags=[HOLIDAY_TAG]
for tag in required_tags:
if db[TAGS_COLLECTION].find_one({'name':tag}) is None:
db[TAGS_COLLECTION].insert({
'name': tag,
'color': 'red'
})
@colander.deferred
def name_validator(node,kw):
db=kw['db']
tagid=kw['tagid']
def validator(form, value):
colander.Length(max=50)(form, value)
if db[TAGS_COLLECTION].find_one({'name':value,'_id':{'$ne':tagid}}):
raise colander.Invalid(
form,
u'Тег с таким именем уже есть'
)
return validator
class TagsSchema(colander.Schema):
name = colander.SchemaNode(
colander.String(),
validator=name_validator,
)
color = colander.SchemaNode(
colander.String(),
)
class TagsViews(api.BaseViews):
@api.view(path='tags', method='GET')
def view_tags(self):
result=list(self.db[TAGS_COLLECTION].find({},{'password':0}).sort('name'))
return result
@api.view(path='tags', method='PUT')
def view_tag_create(self):
schema=TagsSchema().bind(
db=self.db,
tagid=None
)
data=self.validated_data(schema)
self.db[TAGS_COLLECTION].insert(
data
)
return {
'message': u'Тег создан'
}
@api.view(path='tags/{tag_id}', method='POST')
def view_tag_update(self):
tagid=ObjectId(self.params['tag_id'])
schema=TagsSchema().bind(
db=self.db,
tagid=tagid
)
data=self.validated_data(schema)
self.db[TAGS_COLLECTION].update(
{'_id': tagid},
{'$set': data}
)
return {
'message': u'Тег изменен'
}
|
{"/workdays_calendar/scripts/init_db.py": ["/workdays_calendar/users.py", "/workdays_calendar/tags.py", "/workdays_calendar/days_calendar.py"], "/workdays_calendar/tags.py": ["/workdays_calendar/api.py", "/workdays_calendar/collection_names.py"], "/workdays_calendar/scripts/gen_year.py": ["/workdays_calendar/days_calendar.py"], "/workdays_calendar/app_api.py": ["/workdays_calendar/collection_names.py", "/workdays_calendar/days_calendar.py", "/workdays_calendar/tags.py"]}
|
8,872
|
aleksandr-rakov/workdays_calendar
|
refs/heads/master
|
/workdays_calendar/scripts/gen_year.py
|
# -*- coding: utf8 -*-
import sys
import os
from pyramid.paster import bootstrap
from pymongo import MongoClient
from workdays_calendar.days_calendar import gen_year
db=None
def setup(settings):
global db
db=MongoClient(settings['mongo.uri'])[settings['mongo.db']]
def usage(argv):
cmd = os.path.basename(argv[0])
print('usage: %s <year> <config_uri>\n'
'(example: "%s 2017 development.ini")' % (cmd, cmd))
sys.exit(1)
def run(settings,year):
gen_year(db,year)
def main():
if 3!=len(sys.argv):
usage(sys.argv)
env = bootstrap(sys.argv[2])
settings=env['registry'].settings
setup(settings)
year=int(sys.argv[1])
run(settings,year)
if __name__=='__main__':
main()
|
{"/workdays_calendar/scripts/init_db.py": ["/workdays_calendar/users.py", "/workdays_calendar/tags.py", "/workdays_calendar/days_calendar.py"], "/workdays_calendar/tags.py": ["/workdays_calendar/api.py", "/workdays_calendar/collection_names.py"], "/workdays_calendar/scripts/gen_year.py": ["/workdays_calendar/days_calendar.py"], "/workdays_calendar/app_api.py": ["/workdays_calendar/collection_names.py", "/workdays_calendar/days_calendar.py", "/workdays_calendar/tags.py"]}
|
8,873
|
aleksandr-rakov/workdays_calendar
|
refs/heads/master
|
/workdays_calendar/api.py
|
# -*- coding: utf8 -*-
import colander
from pyramid.security import NO_PERMISSION_REQUIRED
from pyramid.view import view_config
from pyramid.decorator import reify
from pyramid.i18n import get_localizer
def translate(request):
def translator(msg):
if msg=="":
return ""
elif hasattr(msg, 'interpolate'):
return get_localizer(request).translate(msg)
else:
return msg
return translator
API_VERSION='1'
API_PREFIX='api/v%s/'%API_VERSION
class view(view_config):
def __call__(self, wrapped):
settings = self.__dict__.copy()
depth = settings.pop('_depth', 0)
def callback(context, name, ob):
config = context.config.with_package(info.module)
# ===========our part start============
route_name="%s_%s"%(name,wrapped.__name__)
view_keys={
'permission':'permission',
}
view_settings=dict((key2,settings[key1]) for key1,key2 in view_keys.items() if key1 in settings)
if config.registry.settings.get('check_xsrf')=='true':
view_settings['check_xsrf']=settings.get('check_xsrf',True)
route_keys={
'method': 'request_method'
}
route_settings=dict((key2,settings[key1]) for key1,key2 in route_keys.items() if key1 in settings)
route_settings['pattern']="%s%s"%(API_PREFIX,settings['path'])
config.add_route(route_name, **route_settings)
config.add_view(view=ob, attr=wrapped.__name__, route_name=route_name, renderer='json', **view_settings)
# ===========our part end==============
info = self.venusian.attach(wrapped, callback, category='pyramid',
depth=depth + 1)
if info.scope == 'class':
if settings.get('attr') is None:
settings['attr'] = wrapped.__name__
settings['_info'] = info.codeinfo # fbo "action_method"
return wrapped
class BaseViews(object):
def __init__(self, context, request):
self.context = context
self.request = request
self.db=request.db
self.initialize()
def initialize(self):
pass
@reify
def modifers(self):
return self.request.GET
@reify
def params(self):
return self.request.matchdict
@reify
def data(self):
return self.request.body and self.request.json_body or {}
def validate(self,schema,data):
try:
return schema.deserialize(data)
except colander.Invalid as e:
raise ValidationFailure(e.asdict(translate(self.request)))
def validated_data(self,schema):
return self.validate(schema,self.data)
class ValidationFailure(Exception):
def __init__(self, data):
self.data = data
@view_config(context=ValidationFailure, renderer='json', permission=NO_PERMISSION_REQUIRED)
def failed_validation(exc, request):
request.response.status_int = 422
return {'errors': exc.data}
|
{"/workdays_calendar/scripts/init_db.py": ["/workdays_calendar/users.py", "/workdays_calendar/tags.py", "/workdays_calendar/days_calendar.py"], "/workdays_calendar/tags.py": ["/workdays_calendar/api.py", "/workdays_calendar/collection_names.py"], "/workdays_calendar/scripts/gen_year.py": ["/workdays_calendar/days_calendar.py"], "/workdays_calendar/app_api.py": ["/workdays_calendar/collection_names.py", "/workdays_calendar/days_calendar.py", "/workdays_calendar/tags.py"]}
|
8,874
|
aleksandr-rakov/workdays_calendar
|
refs/heads/master
|
/workdays_calendar/app_api.py
|
# -*- coding: utf8 -*-
from workdays_calendar.collection_names import CALENDAR_COLLECTION,TAGS_COLLECTION
from workdays_calendar.days_calendar import get_day_int
from workdays_calendar.tags import HOLIDAY_TAG
import datetime
def get_tag(db,tag_name):
return db[TAGS_COLLECTION].find_one({'name': tag_name})
def day_has_tag(db,day,tag):
if not tag:
return False
day_int=get_day_int(day)
stored_day=db[CALENDAR_COLLECTION].find_one({'day_int': day_int})
if stored_day:
return str(tag['_id']) in stored_day['tags']
return False
def is_holiday_today(db):
holiday_tag=get_tag(db,HOLIDAY_TAG)
return day_has_tag(db,datetime.date.today(),holiday_tag)
def get_workdays_interval(db,start,num_days):
holiday_tag=get_tag(db,HOLIDAY_TAG)
holidays_used=False
total_days=0
while day_has_tag(db,start,holiday_tag):
start+=datetime.timedelta(days=1)
holidays_used=True
total_days+=1
day=start
drive_days=num_days
while drive_days>0:
day+=datetime.timedelta(days=1)
total_days+=1
if day_has_tag(db,day,holiday_tag):
holidays_used=True
else:
drive_days-=1
return {
'end': day,
'total_days': total_days,
'holidays_used': holidays_used
}
|
{"/workdays_calendar/scripts/init_db.py": ["/workdays_calendar/users.py", "/workdays_calendar/tags.py", "/workdays_calendar/days_calendar.py"], "/workdays_calendar/tags.py": ["/workdays_calendar/api.py", "/workdays_calendar/collection_names.py"], "/workdays_calendar/scripts/gen_year.py": ["/workdays_calendar/days_calendar.py"], "/workdays_calendar/app_api.py": ["/workdays_calendar/collection_names.py", "/workdays_calendar/days_calendar.py", "/workdays_calendar/tags.py"]}
|
8,875
|
aleksandr-rakov/workdays_calendar
|
refs/heads/master
|
/workdays_calendar/auth.py
|
# -*- coding: utf8 -*-
from pyramid.authorization import ACLAuthorizationPolicy
from pyramid.security import Allow
import hashlib
from bson import ObjectId
LOGGED_IN_PERMISSION='admin'
class Hasher:
#работа с подсолеными хешами
@classmethod
def salt(cls):
return unicode(ObjectId())
@classmethod
def generate(cls,pw,salt=None):
if salt==None:
salt=ObjectId()
return unicode(salt).encode('utf-8')+hashlib.md5(unicode(salt).encode('utf-8')+unicode(pw).encode('utf-8')).hexdigest()
@classmethod
def check(cls,pw_with_salt,pw):
salt=pw_with_salt[:-32]
return pw_with_salt==cls.generate(pw,salt)
def add_role_principals(userid, request):
return ['role:%s'%LOGGED_IN_PERMISSION]
class RootFactory(object):
def __init__(self, request):
pass
__acl__ = [
(Allow, 'role:%s'%LOGGED_IN_PERMISSION, LOGGED_IN_PERMISSION),
]
def userid(request):
try:
return request.authenticated_userid
except:
return None
def has_perm(request):
def has_perm(perm,context=None):
if context is None:
return request.has_permission(perm, request.context)
else:
return request.has_permission(perm, context)
return has_perm
def includeme(config):
config.set_root_factory(RootFactory)
config.set_authorization_policy(ACLAuthorizationPolicy())
config.include('pyramid_jwt')
config.set_jwt_authentication_policy(
'secret',
http_header='X-Token',
callback=add_role_principals
)
config.set_default_permission(LOGGED_IN_PERMISSION)
config.add_request_method(userid,'userid', True, True)
config.add_request_method(has_perm,'has_perm', True, True)
|
{"/workdays_calendar/scripts/init_db.py": ["/workdays_calendar/users.py", "/workdays_calendar/tags.py", "/workdays_calendar/days_calendar.py"], "/workdays_calendar/tags.py": ["/workdays_calendar/api.py", "/workdays_calendar/collection_names.py"], "/workdays_calendar/scripts/gen_year.py": ["/workdays_calendar/days_calendar.py"], "/workdays_calendar/app_api.py": ["/workdays_calendar/collection_names.py", "/workdays_calendar/days_calendar.py", "/workdays_calendar/tags.py"]}
|
8,876
|
aleksandr-rakov/workdays_calendar
|
refs/heads/master
|
/workdays_calendar/collection_names.py
|
# -*- coding: utf8 -*-
CALENDAR_COLLECTION='workdays_calendar'
TAGS_COLLECTION='workdays_tags'
USERS_COLLECTION='workdays_users'
|
{"/workdays_calendar/scripts/init_db.py": ["/workdays_calendar/users.py", "/workdays_calendar/tags.py", "/workdays_calendar/days_calendar.py"], "/workdays_calendar/tags.py": ["/workdays_calendar/api.py", "/workdays_calendar/collection_names.py"], "/workdays_calendar/scripts/gen_year.py": ["/workdays_calendar/days_calendar.py"], "/workdays_calendar/app_api.py": ["/workdays_calendar/collection_names.py", "/workdays_calendar/days_calendar.py", "/workdays_calendar/tags.py"]}
|
8,877
|
aleksandr-rakov/workdays_calendar
|
refs/heads/master
|
/workdays_calendar/days_calendar.py
|
# -*- coding: utf8 -*-
import workdays_calendar.api as api
import calendar
import colander
import datetime
from workdays_calendar.collection_names import CALENDAR_COLLECTION,TAGS_COLLECTION
def init_db(db,settings):
gen_year(db,datetime.datetime.now().year)
def chunkify(lst,n):
return [ lst[i*n:i*n+n] for i in xrange(len(lst)/n) ]
def get_day_int(date):
return date.year*10000+date.month*100+date.day
def gen_year(db,year):
c=calendar.Calendar(0)
holiday_tag=db[TAGS_COLLECTION].find_one({'name':'holiday'})
if holiday_tag is None:
print "Tag not found"
return
for m in xrange(1,13):
for d,wd in c.itermonthdays2(year,m):
if d and wd>=5:
day_int=year*10000+m*100+d
old=db[CALENDAR_COLLECTION].find_one({'day_int': day_int})
if not old:
db[CALENDAR_COLLECTION].insert({
'day_int': day_int,
'tags': [str(holiday_tag['_id'])],
})
class StringList(colander.SequenceSchema):
items = colander.SchemaNode(
colander.String()
)
class dayUpdataSchema(colander.Schema):
tags = StringList()
class CalendarViews(api.BaseViews):
@api.view(path='calendar/{year}', method='GET')
def view_calendar(self):
c=calendar.Calendar(0)
year=int(self.params['year'])
result=[]
days=self.db[CALENDAR_COLLECTION].find({'$and':[
{'day_int': {'$gte':year*10000}},
{'day_int': {'$lte':(year+1)*10000}}
]})
days=dict((x['day_int'],x.get('tags',[])) for x in days)
for m in xrange(1,13):
month=[]
for d in c.itermonthdays(year,m):
day_int=year*10000+m*100+d
month.append({
'day': d,
'day_int': day_int,
'tags': days.get(day_int,[])
})
weeks=chunkify(month,7)
result.append({
'month': m,
'weeks': weeks
})
return result
@api.view(path='calendar/day/{day_int}', method='POST')
def view_day_change(self):
schema=dayUpdataSchema()
data=self.validated_data(schema)
day_int=int(self.params['day_int'])
self.db[CALENDAR_COLLECTION].update(
{'day_int': day_int},
{'$set': {
'tags': data['tags'],
'manual': True
}},
upsert=True
)
|
{"/workdays_calendar/scripts/init_db.py": ["/workdays_calendar/users.py", "/workdays_calendar/tags.py", "/workdays_calendar/days_calendar.py"], "/workdays_calendar/tags.py": ["/workdays_calendar/api.py", "/workdays_calendar/collection_names.py"], "/workdays_calendar/scripts/gen_year.py": ["/workdays_calendar/days_calendar.py"], "/workdays_calendar/app_api.py": ["/workdays_calendar/collection_names.py", "/workdays_calendar/days_calendar.py", "/workdays_calendar/tags.py"]}
|
8,878
|
aleksandr-rakov/workdays_calendar
|
refs/heads/master
|
/workdays_calendar/users.py
|
# -*- coding: utf8 -*-
import workdays_calendar.api as api
import colander
from pyramid.httpexceptions import HTTPNotFound,HTTPForbidden
from bson import ObjectId
from time import sleep
from workdays_calendar.auth import Hasher
from pyramid.security import NO_PERMISSION_REQUIRED
from workdays_calendar.collection_names import USERS_COLLECTION
def init_db(db,settings):
if db[USERS_COLLECTION].find_one() is None:
new_user={
'name': 'admin',
'login': 'admin',
'disabled': False,
'password': Hasher.generate('admin')
}
print "Creating new user admin with passord admin"
db[USERS_COLLECTION].insert(new_user)
def authenticate(request,login,password):
userid=None
message=''
if request.userid:
message='Already logged in'
if login and password:
user=request.db[USERS_COLLECTION].find_one({'login':login})
if user and Hasher.check(user['password'],password):
if user['disabled']:
message = u'Аккаунт заблокирован'
else:
userid=str(user['_id'])
message=''
else:
message = u'Ошибка! Проверьте правильнось ввода логина и пароля'
sleep(1) #затрудним перебор пароля
else:
message=u'Введите логин и пароль'
return userid,message
class updatePasswordSchema(colander.Schema):
password = colander.SchemaNode(
colander.String(),
validator=colander.Length(min=5)
)
@colander.deferred
def login_validator(node,kw):
db=kw['db']
userid=kw['userid']
def validator(form, value):
colander.Length(max=50)(form, value)
"""Проверяем не занят ли логин"""
if db[USERS_COLLECTION].find_one({'login':value,'_id':{'$ne':userid}}):
raise colander.Invalid(
form,
u'Этот логин уже зарегистрирован'
)
return validator
class updateUserSchema(colander.Schema):
name = colander.SchemaNode(
colander.String(),
)
login = colander.SchemaNode(
colander.String(),
validator=login_validator,
)
disabled = colander.SchemaNode(
colander.Bool(),
)
class createUserSchema(updateUserSchema,updatePasswordSchema):
pass
class UsersViews(api.BaseViews):
@api.view(path='login', method='POST',permission=NO_PERMISSION_REQUIRED)
def login(self):
login = self.data['login']
password = self.data['password']
message=''
user_id,message = authenticate(self.request, login, password)
if user_id:
return {
'token': self.request.create_jwt_token(user_id)
}
if message:
self.request.response.status=401
return {
'message': message
}
@api.view(path='profile', method='GET')
def profile(self):
userid=self.request.userid
user=self.db[USERS_COLLECTION].find_one({'_id':ObjectId(userid)})
if user is None:
raise HTTPForbidden()
return {
'userid': userid,
'name': user['name']
}
@api.view(path='users', method='GET')
def view_users(self):
result=list(self.db[USERS_COLLECTION].find({},{'password':0}).sort('name'))
return result
@api.view(path='users/{user_id}', method='GET')
def view_user(self):
user=self.db[USERS_COLLECTION].find_one({'_id': ObjectId(self.params['user_id'])})
user['password']=''
if user is None:
raise HTTPNotFound()
return user
@api.view(path='users', method='PUT')
def view_user_create(self):
schema=createUserSchema().bind(
db=self.db,
userid=None
)
data=self.validated_data(schema)
data['password']=Hasher.generate(data['password'])
self.db[USERS_COLLECTION].insert(
data
)
return {
'message': u'Пользователь создан'
}
@api.view(path='users/{user_id}', method='POST')
def view_user_update(self):
userid=ObjectId(self.params['user_id'])
schema=updateUserSchema().bind(
db=self.db,
userid=userid
)
data=self.validated_data(schema)
self.db[USERS_COLLECTION].update(
{'_id': userid},
{'$set': data}
)
return {
'message': u'Пользователь изменен'
}
@api.view(path='users/{user_id}/change_password', method='POST')
def view_user_change_pasword(self):
userid=ObjectId(self.params['user_id'])
schema=updatePasswordSchema()
data=self.validated_data(schema)
data['password']=Hasher.generate(data['password'])
self.db[USERS_COLLECTION].update(
{'_id': userid},
{'$set': data}
)
return {
'message': u'Пароль изменен'
}
|
{"/workdays_calendar/scripts/init_db.py": ["/workdays_calendar/users.py", "/workdays_calendar/tags.py", "/workdays_calendar/days_calendar.py"], "/workdays_calendar/tags.py": ["/workdays_calendar/api.py", "/workdays_calendar/collection_names.py"], "/workdays_calendar/scripts/gen_year.py": ["/workdays_calendar/days_calendar.py"], "/workdays_calendar/app_api.py": ["/workdays_calendar/collection_names.py", "/workdays_calendar/days_calendar.py", "/workdays_calendar/tags.py"]}
|
8,879
|
aleksandr-rakov/workdays_calendar
|
refs/heads/master
|
/workdays_calendar/__init__.py
|
# -*- coding: utf8 -*-
from pyramid.config import Configurator
from bson.objectid import ObjectId
import datetime
from pyramid.renderers import JSON
from pyramid.events import NewRequest
from pymongo import MongoClient
def add_request_properties(event):
"""Сделаем некоторые параметры конфигурации атрибутами request"""
reg = event.request.registry
event.request.db=reg.db
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
config = Configurator(settings=settings)
mongo_conn = MongoClient(settings['mongo.uri'])
mongo_db=mongo_conn[settings['mongo.db']]
config.registry.db=mongo_db
config.add_subscriber(add_request_properties,NewRequest)
def datetime_adapter(obj, request):
return obj.isoformat()+'Z'
def objectid_adapter(obj, request):
return str(obj)
renderer = JSON(ensure_ascii=False,indent=4)
renderer.add_adapter(datetime.datetime, datetime_adapter)
renderer.add_adapter(ObjectId, objectid_adapter)
config.add_renderer('json', renderer)
config.include('workdays_calendar.auth')
config.scan()
return config.make_wsgi_app()
|
{"/workdays_calendar/scripts/init_db.py": ["/workdays_calendar/users.py", "/workdays_calendar/tags.py", "/workdays_calendar/days_calendar.py"], "/workdays_calendar/tags.py": ["/workdays_calendar/api.py", "/workdays_calendar/collection_names.py"], "/workdays_calendar/scripts/gen_year.py": ["/workdays_calendar/days_calendar.py"], "/workdays_calendar/app_api.py": ["/workdays_calendar/collection_names.py", "/workdays_calendar/days_calendar.py", "/workdays_calendar/tags.py"]}
|
8,881
|
martincalvert/MongoDB-3-New-Relic-Plugin
|
refs/heads/master
|
/newrelic_plugin_agent/plugins/__init__.py
|
"""
Plugins are responsible for fetching and parsing the stats from the service
being profiled.
"""
available = {
'mongodb': 'newrelic_plugin_agent.plugins.mongodb.MongoDB',
}
|
{"/newrelic_plugin_agent/plugins/mongodb.py": ["/newrelic_plugin_agent/plugins/__init__.py"]}
|
8,882
|
martincalvert/MongoDB-3-New-Relic-Plugin
|
refs/heads/master
|
/newrelic_plugin_agent/plugins/mongodb.py
|
"""
MongoDB Support
"""
import datetime
from pymongo import errors
import logging
import pymongo
import re
from newrelic_plugin_agent.plugins import base
LOGGER = logging.getLogger(__name__)
class MongoDB(base.Plugin):
# SETUP: Set this GUID to something unique.
GUID = 'CHANGE.ME'
def add_datapoints(self, name, stats):
"""Add all of the data points for a database
:param str name: The name of the database for the stats
:param dict stats: The stats data to add
"""
# SETUP: You can add some database name manipulation here to rework bad named DBs
base_key = name
self.add_gauge_value('Database/Extents/{0}'.format(base_key), 'extents', stats.get('numExtents', 0))
self.add_gauge_value('Database/Size/{0}'.format(base_key), 'bytes', stats.get('dataSize', 0))
self.add_gauge_value('Database/FileSize/{0}'.format(base_key), 'bytes', stats.get('storageSize', 0))
self.add_gauge_value('Database/ObjectsCount/{0}'.format(base_key), 'objects', stats.get('objects', 0))
self.add_gauge_value('Database/ObjectsAverageSize/{0}'.format(base_key), 'bytes', stats.get('avgObjSize', 0))
self.add_gauge_value('Database/Collections/{0}'.format(base_key), 'collections', stats.get('collections', 0))
self.add_gauge_value('Database/IndexCount/{0}'.format(base_key), 'indexes', stats.get('indexes', 0))
self.add_gauge_value('Database/IndexSize/{0}'.format(base_key), 'bytes', stats.get('indexSize', 0))
def add_server_datapoints(self, stats):
"""Add all of the data points for a server
:param dict stats: The stats data to add
"""
host = stats.get('host','no_host')
asserts = stats.get('asserts', dict())
self.add_derive_value('Asserts/Regular/{0}'.format(host), 'asserts', asserts.get('regular', 0))
self.add_derive_value('Asserts/Warning/{0}'.format(host), 'asserts', asserts.get('warning', 0))
self.add_derive_value('Asserts/Message/{0}'.format(host), 'asserts', asserts.get('msg', 0))
self.add_derive_value('Asserts/User/{0}'.format(host), 'asserts', asserts.get('user', 0))
self.add_derive_value('Asserts/Rollovers/{0}'.format(host), 'asserts', asserts.get('rollovers', 0))
flush = stats.get('backgroundFlushing', dict())
self.add_derive_timing_value('BackgroundFlushes/{0}'.format(host), 'ms', flush.get('flushes', 0), flush.get('total_ms', 0), flush.get('last_ms', 0))
self.add_gauge_value('SecondsSinceLastFlush/{0}'.format(host), 'seconds', (datetime.datetime.now() - flush.get('last_finished', datetime.datetime.now())).seconds)
conn = stats.get('connections', dict())
self.add_gauge_value('Connections/Available/{0}'.format(host), 'connections', conn.get('available', 0))
self.add_gauge_value('Connections/Current/{0}'.format(host), 'connections', conn.get('current', 0))
metrics = stats.get('metrics', dict())
commands = metrics.get('commands', dict())
self.add_derive_value('Metrics/Commands/Find/{0}'.format(host), 'ops', commands.get('find',dict()).get('total',0))
self.add_derive_value('Metrics/Commands/Count/{0}'.format(host), 'ops', commands.get('count',dict()).get('total',0))
self.add_derive_value('Metrics/Commands/CreateIndexes/{0}'.format(host), 'ops', commands.get('createIndexes',dict()).get('total',0))
self.add_derive_value('Metrics/Commands/MoveChunks/{0}'.format(host), 'ops', commands.get('moveChunk',dict()).get('total',0))
self.add_derive_value('Metrics/Commands/Update/{0}'.format(host), 'ops', commands.get('update',dict()).get('total',0))
self.add_derive_value('Metrics/Commands/Distinct/{0}'.format(host), 'ops', commands.get('distinct',dict()).get('total',0))
self.add_derive_value('Metrics/Commands/GetMore/{0}'.format(host), 'ops', commands.get('getMore',dict()).get('total',0))
document = metrics.get('document', dict())
self.add_derive_value('Metrics/Documents/Deleted/{0}'.format(host), 'ops', document.get('deleted', 0))
self.add_derive_value('Metrics/Documents/Returned/{0}'.format(host), 'ops', document.get('returned', 0))
self.add_derive_value('Metrics/Documents/Inserted/{0}'.format(host), 'ops', document.get('inserted', 0))
self.add_derive_value('Metrics/Documents/Updated/{0}'.format(host), 'ops', document.get('updated', 0))
operations = metrics.get('operation', dict())
self.add_derive_value('Metrics/NonIndex/Ordering/{0}'.format(host), 'ops', operations.get('scanAndOrder', 0))
repl = metrics.get('repl', dict())
repl_network = repl.get('network', dict())
repl_network_get_mores = repl_network.get('getmores')
self.add_derive_value('Metrics/Repl/GetMores/count/{0}'.format(host), 'ops', repl_network_get_mores.get('num', 0))
self.add_derive_value('Metrics/Repl/GetMores/total/{0}'.format(host), 'ops', repl_network_get_mores.get('totalMillis', 0))
# metrics.repl.network.getmores.num GetMore that cross shards
cursors = metrics.get('cursors', dict())
self.add_gauge_value('Cursors/On/{0}'.format(host), 'cursors', cursors.get('open', 0))
self.add_derive_value('Cursors/TimedOut/{0}'.format(host), 'cursors', cursors.get('timedOut', 0))
dur = stats.get('dur', dict())
self.add_gauge_value('Durability/CommitsInWriteLock/{0}'.format(host), 'commits', dur.get('commitsInWriteLock', 0))
self.add_gauge_value('Durability/EarlyCommits/{0}'.format(host), 'commits', dur.get('earlyCommits', 0))
self.add_gauge_value('Durability/JournalCommits/{0}'.format(host), 'commits', dur.get('commits', 0))
self.add_gauge_value('Durability/JournalBytesWritten/{0}'.format(host), 'bytes', dur.get('journaledMB', 0) / 1048576)
self.add_gauge_value('Durability/DataFileBytesWritten/{0}'.format(host), 'bytes', dur.get('writeToDataFilesMB', 0) / 1048576)
timems = dur.get('timeMs', dict())
self.add_gauge_value('Durability/Timings/DurationMeasured/{0}'.format(host), 'ms', timems.get('dt', 0))
self.add_gauge_value('Durability/Timings/LogBufferPreparation/{0}'.format(host), 'ms', timems.get('prepLogBuffer', 0))
self.add_gauge_value('Durability/Timings/WriteToJournal/{0}'.format(host), 'ms', timems.get('writeToJournal', 0))
self.add_gauge_value('Durability/Timings/WriteToDataFiles/{0}'.format(host), 'ms', timems.get('writeToDataFiles', 0))
self.add_gauge_value('Durability/Timings/RemapingPrivateView/{0}'.format(host), 'ms', timems.get('remapPrivateView', 0))
locks = stats.get('globalLock', dict())
self.add_derive_value('GlobalLocks/Held/{0}'.format(host), 'ms', locks.get('lockTime', 0) / 1000)
self.add_derive_value('GlobalLocks/Ratio/{0}'.format(host), 'ratio', locks.get('ratio', 0))
active = locks.get('activeClients', dict())
self.add_derive_value('GlobalLocks/ActiveClients/Total/{0}'.format(host), 'clients', active.get('total', 0))
self.add_derive_value('GlobalLocks/ActiveClients/Readers/{0}'.format(host), 'clients', active.get('readers', 0))
self.add_derive_value('GlobalLocks/ActiveClients/Writers/{0}'.format(host), 'clients', active.get('writers', 0))
queue = locks.get('currentQueue', dict())
self.add_derive_value('GlobalLocks/Queue/Total/{0}'.format(host), 'locks', queue.get('total', 0))
self.add_derive_value('GlobalLocks/Queue/Readers/{0}'.format(host), 'readers', queue.get('readers', 0))
self.add_derive_value('GlobalLocks/Queue/Writers/{0}'.format(host), 'writers', queue.get('writers', 0))
mem = stats.get('mem', dict())
self.add_gauge_value('Memory/Resident/{0}'.format(host), 'megabytes', mem.get('resident', 0))
self.add_gauge_value('Memory/Virtual/{0}'.format(host), 'megabytes', mem.get('virtual', 0))
net = stats.get('network', dict())
self.add_derive_value('Network/Requests/{0}'.format(host), 'requests', net.get('numRequests', 0))
self.add_derive_value('Network/Transfer/In/{0}'.format(host), 'bytes', net.get('bytesIn', 0))
self.add_derive_value('Network/Transfer/Out/{0}'.format(host), 'bytes', net.get('bytesOut', 0))
ops = stats.get('opcounters', dict())
self.add_derive_value('Operations/Insert/{0}'.format(host), 'ops', ops.get('insert', 0))
self.add_derive_value('Operations/Query/{0}'.format(host), 'ops', ops.get('query', 0))
self.add_derive_value('Operations/Update/{0}'.format(host), 'ops', ops.get('update', 0))
self.add_derive_value('Operations/Delete/{0}'.format(host), 'ops', ops.get('delete', 0))
self.add_derive_value('Operations/GetMore/{0}'.format(host), 'ops', ops.get('getmore', 0))
self.add_derive_value('Operations/Command/{0}'.format(host), 'ops', ops.get('command', 0))
extra = stats.get('extra_info', dict())
self.add_gauge_value('System/HeapUsage/{0}'.format(host), 'bytes', extra.get('heap_usage_bytes', 0))
self.add_derive_value('System/PageFaults/{0}'.format(host), 'faults', extra.get('page_faults', 0))
wt = stats.get('wiredTiger', dict())
wt_cache = wt.get('cache', dict())
self.add_derive_value('WiredTiger/Cache/BytesReadInto/Derived/{0}'.format(host), 'bytes', wt_cache.get('bytes read into cache', 0))
self.add_derive_value('WiredTiger/Cache/BytesIn/Derived/{0}'.format(host), 'bytes', wt_cache.get('bytes currently in the cache', 0))
self.add_gauge_value('WiredTiger/Cache/DirtyBytes/{0}'.format(host), 'bytes', wt_cache.get('tracked dirty bytes in the cache', 0))
self.add_gauge_value('WiredTiger/Cache/BytesReadInto/Gauge/{0}'.format(host), 'bytes', wt_cache.get('bytes read into cache', 0))
self.add_gauge_value('WiredTiger/Cache/BytesIn/Gauge/{0}'.format(host), 'bytes', wt_cache.get('bytes currently in the cache', 0))
wt_concurrent = wt.get('concurrentTransactions', dict())
wt_write = wt_concurrent.get('write', dict())
self.add_gauge_value('WiredTiger/concurrentTransactions/WritesAvailable/{0}'.format(host), 'tickets', wt_write.get('available', 0))
wt_read = wt_concurrent.get('read', dict())
self.add_gauge_value('WiredTiger/concurrentTransactions/ReadsAvailable/{0}'.format(host), 'tickets', wt_read.get('available', 0))
top_repl = stats.get('repl', dict())
if top_repl.get('ismaster', False):
repl_buffer = repl.get('buffer', dict())
self.add_derive_value('Repl/Buffer/Count/{0}'.format(host), 'ops', repl_buffer.get('count', 0))
self.add_derive_value('Repl/Buffer/SizeBytes/{0}'.format(host), 'bytes', repl_buffer.get('sizeBytes', 0))
self.add_derive_value('Repl/Buffer/MaxSizeBytes/{0}'.format(host), 'bytes', repl_buffer.get('maxSizeBytes', 0))
else:
repl_apply = repl.get('apply', dict())
repl_apply_batches = repl_apply.get('batches', dict())
self.add_derive_value('Repl/Apply/Ops/{0}'.format(host), 'ops', repl_apply.get('ops', 0))
self.add_derive_value('Repl/Apply/BatchesNum/{0}'.format(host), 'bytes', repl_apply_batches.get('num', 0))
self.add_derive_value('Repl/Apply/BatchesTotalMillis/{0}'.format(host), 'bytes', repl_apply_batches.get('totalMillis', 0))
def connect(self):
kwargs = {'host': self.config.get('host', 'localhost'), 'port': self.config.get('port', 27017)}
for key in ['ssl', 'ssl_keyfile', 'ssl_certfile', 'ssl_cert_reqs', 'ssl_ca_certs']:
if key in self.config:
kwargs[key] = self.config[key]
try:
client = pymongo.MongoClient(**kwargs)
admin = self.config.get('admin', {})
if 'username' in admin.keys():
username = admin.get('username', 'root')
password = admin.get('password', 'password')
auth_db = admin.get('auth_db', 'admin')
client[auth_db].authenticate(username, password)
return client
except pymongo.errors.ConnectionFailure as error:
LOGGER.error('Could not connect to MongoDB: %s', error)
def get_and_add_db_dict(self, databases):
"""Handle the nested database structure with username and password.
:param dict/list databases: The databases data structure
"""
LOGGER.debug('Processing mongo databases')
client = self.connect()
if not client:
return
databases = self.config.get('databases', list())
try:
if (isinstance(databases, dict)):
for database in databases.keys():
db = client[database]
logged_in = False
if 'username' in databases[database]:
db.authenticate(databases[database]['username'], databases[database].get('password'))
logged_in = True
self.add_datapoints(database, db.command('dbStats'))
if logged_in:
db.logout()
else:
for database in databases:
db = client[database]
self.add_datapoints(database, db.command('dbStats'))
except errors.OperationFailure as error:
LOGGER.critical('Could not fetch stats: %s', error)
def get_and_add_server_stats(self):
LOGGER.debug('Fetching server stats')
client = self.connect()
if not client:
return
self.add_server_datapoints(client.db.command('serverStatus'))
client.close()
def poll(self):
self.initialize()
self.get_and_add_server_stats()
self.get_and_add_db_dict()
self.finish()
|
{"/newrelic_plugin_agent/plugins/mongodb.py": ["/newrelic_plugin_agent/plugins/__init__.py"]}
|
8,885
|
yanpeipan/scrapy
|
refs/heads/master
|
/Scrapy/spiders/cntv.py
|
# -*- coding: utf-8 -*-
import scrapy
class CntvSpider(scrapy.Spider):
name = "cntv"
allowed_domains = ["http://tv.cntv.cn"]
videoset_search='http://tv.cntv.cn/videoset/search'
#init
def __init__(self, *args, **kwargs):
pass
#start request
def start_requests(self):
return [scrapy.http.Request(url=getattr(self, 'videoset_search'))]
#parse code
def parse(self, response):
print response.xpath('//dd[@code]')
pass
|
{"/Scrapy/spiders/windj007.py": ["/Scrapy/items.py"], "/Scrapy/spiders/Baidupan.py": ["/Scrapy/items.py"], "/Scrapy/spiders/proxy.py": ["/Scrapy/items.py"], "/Scrapy/pipelines.py": ["/Scrapy/items.py"], "/Scrapy/spiders/douban.py": ["/Scrapy/items.py"]}
|
8,886
|
yanpeipan/scrapy
|
refs/heads/master
|
/Scrapy/spiders/tudou.py
|
#coding=utf-8
from scrapy.spiders import CrawlSpider, Spider
from scrapy.loader import ItemLoader
from scrapy.loader.processors import TakeFirst, MapCompose, Join
from scrapy.exceptions import CloseSpider
from scrapy.selector import Selector
from scrapy.http import FormRequest
from scrapy.http import Request
from Scrapy.items import *
from urlparse import urlparse,parse_qs
import json
import pymongo
from datetime import datetime, date, time
class TudouSpider(CrawlSpider):
name = 'tudou'
allowed_domins = ['http://www.tudou.com']
list_url = 'http://www.tudou.com/list/index.html'
rate=float(1000)/3600
def __init__(self, category = None, *args, **kwargs):
if hasattr(self, 'rate'):
self.download_delay=1/getattr(self, 'rate')
if category:
self.category=unicode(category, 'utf-8')
def start_requests(self):
return [Request(getattr(self, 'list_url'), callback=self.parseList)]
def parseList(self, response):
channels=response.xpath('//*[@id="secMenu"]/ul/li')
for channel in channels:
id=channel.xpath('@data-id').extract()
url=channel.xpath('.//a/@href').extract()
name=channel.xpath('.//a/text()').extract()
|
{"/Scrapy/spiders/windj007.py": ["/Scrapy/items.py"], "/Scrapy/spiders/Baidupan.py": ["/Scrapy/items.py"], "/Scrapy/spiders/proxy.py": ["/Scrapy/items.py"], "/Scrapy/pipelines.py": ["/Scrapy/items.py"], "/Scrapy/spiders/douban.py": ["/Scrapy/items.py"]}
|
8,887
|
yanpeipan/scrapy
|
refs/heads/master
|
/Scrapy/spiders/windj007.py
|
from scrapy.linkextractors import LinkExtractor
from scrapy.spiders import CrawlSpider, Rule
from Scrapy.items import ProxyItem
import re
class Windj007Spider(CrawlSpider):
name = 'Windj007'
start_urls = ['http://www.google.ru/search?q=%2B%94%3A8080+%2B%94%3A3128+%2B%94%3A80+filetype%3Atxt&hl=ru&source=hp&btnG=%CF%EE%E8%F1%EA+%E2+Google&gbv=1&d=1',
'http://www.google.ru/search?q=%2B%94%3A8080+%2B%94%3A3128+%2B%94%3A80+filetype%3Atxt&hl=ru&source=hp&btnG=%CF%EE%E8%F1%EA+%E2+Google&gbv=1&start=10',
'http://www.google.ru/search?q=%2B%94%3A8080+%2B%94%3A3128+%2B%94%3A80+filetype%3Atxt&hl=ru&source=hp&btnG=%CF%EE%E8%F1%EA+%E2+Google&gbv=1&start=20',
'http://www.google.ru/search?q=%2B%94%3A8080+%2B%94%3A3128+%2B%94%3A80+filetype%3Atxt&hl=ru&source=hp&btnG=%CF%EE%E8%F1%EA+%E2+Google&gbv=1&start=30',
'http://www.google.ru/search?q=%2B%94%3A8080+%2B%94%3A3128+%2B%94%3A80+filetype%3Atxt&hl=ru&source=hp&btnG=%CF%EE%E8%F1%EA+%E2+Google&gbv=1&start=40',
]
_address_re = re.compile(r'(\d{1,4}\.\d{1,4}\.\d{1,4}\.\d{1,4})[^0-9]+(\d+)')
rules = (
Rule(LinkExtractor(restrict_xpaths = '//h3[@class="r"]'),
callback = 'parse_proxylist',
follow = True
),
)
def parse_proxylist(self, response):
if response.status >= 400:
return
addresses_parsed = ProxySpider._address_re.finditer(response.body)
for row in addresses_parsed:
res = ProxyItem()
res['ip'] = '%s:%s' % tuple(row.groups())
yield res
|
{"/Scrapy/spiders/windj007.py": ["/Scrapy/items.py"], "/Scrapy/spiders/Baidupan.py": ["/Scrapy/items.py"], "/Scrapy/spiders/proxy.py": ["/Scrapy/items.py"], "/Scrapy/pipelines.py": ["/Scrapy/items.py"], "/Scrapy/spiders/douban.py": ["/Scrapy/items.py"]}
|
8,888
|
yanpeipan/scrapy
|
refs/heads/master
|
/Scrapy/spiders/Baidupan.py
|
# -*- coding: utf-8 -*-
from scrapy.spiders import CrawlSpider
from scrapy.http import Request
from Scrapy.items import *
import json
class BaidupanSpider(CrawlSpider):
name = 'baidupan'
uks = []
allowed_domins = ['https://pan.baidu.com']
URL_HOT = 'https://pan.baidu.com/pcloud/friend/gethotuserlist?start={start}&limit=24'
"""
{"errno":0,"request_id":3296180617,"hotuser_list":[{"type":-1,"hot_uname":"\u6700\u7ec8***4\u4e91\u76d8","avatar_url":"https:\/\/ss0.bdstatic.com\/7Ls0a8Sm1A5BphGlnYG\/sys\/portrait\/item\/50424c4f.jpg","intro":"\u767e\u5ea6\u300a\u6700\u7ec8\u5e7b\u60f314\u300b\u4e91\u5e73\u53f0\u30028\u670825\u65e5\u5f00\u653e\u6027\u6d4b\u8bd5\u5f00\u542f\uff0c\u656c\u8bf7\u671f\u5f85\u3002","follow_count":0,"fans_count":1278735,"user_type":4,"is_vip":0,"pubshare_count":2,"hot_uk":1112219283,"album_count":3},{"type":-1,"hot_uname":"\u8273*\u90ed\u9759","avatar_url":"https:\/\/ss0.bdstatic.com\/7Ls0a8Sm1A5BphGlnYG\/sys\/portrait\/item\/7a567d4d.jpg","intro":"\u90ed\u9759\u4e0e15\u4e2a\u57ce\u5e02\u7684\u8273\u9047","follow_count":0,"fans_count":1370108,"user_type":4,"is_vip":0,"pubshare_count":0,"hot_uk":1447638178,"album_count":0}]}
"""
URL_INFO = 'https://pan.baidu.com/pcloud/user/getinfo?&query_uk={uk}'
"""
{"errno":0,"request_id":456845460,"user_info":{"avatar_url":"https:\/\/ss0.bdstatic.com\/7Ls0a8Sm1A5BphGlnYG\/sys\/portrait\/item\/decad705.jpg","fans_count":1,"follow_count":1,"intro":"","uname":"\u65adVS\u5f26","uk":3389100040,"album_count":0,"pubshare_count":0,"tui_user_count":0,"c2c_user_sell_count":0,"c2c_user_buy_count":0,"c2c_user_product_count":0,"pair_follow_type":-1}}
uname:昵称
"""
#缺少专辑列表
URL_SHARE_LIMIT = 100
URL_SHARE = 'https://pan.baidu.com/pcloud/feed/getsharelist?&auth_type=1&start={start}&limit=100&query_uk={uk}' #获得分享列表
"""
{"feed_type":"share","category":6,"public":"1","shareid":"1541924625","data_id":"2418757107690953697","title":"\u5723\u8bde\u58c1\u7eb8\u5927\u6d3e\u9001","third":0,"clienttype":0,"filecount":1,"uk":1798788396,"username":"SONYcity03","feed_time":1418986714000,"desc":"","avatar_url":"http:\/\/himg.bdimg.com\/sys\/portrait\/item\/1b6bf333.jpg","dir_cnt":1,"filelist":[{"server_filename":"\u5723\u8bde\u58c1\u7eb8\u5927\u6d3e\u9001","category":6,"isdir":1,"size":1024,"fs_id":870907642649299,"path":"%2F%E5%9C%A3%E8%AF%9E%E5%A3%81%E7%BA%B8%E5%A4%A7%E6%B4%BE%E9%80%81","md5":"0","sign":"1221d7d56438970225926ad552423ff6a5d3dd33","time_stamp":1439542024}],"source_uid":"871590683","source_id":"1541924625","shorturl":"1dDndV6T","vCnt":34296,"dCnt":7527,"tCnt":5056,"like_status":0,"like_count":60,"comment_count":19},
public:公开分享
title:文件名称
uk:用户编号
"""
URL_FOLLOW_LIMIT = 24
URL_FOLLOW = 'https://pan.baidu.com/pcloud/friend/getfollowlist?query_uk={uk}&limit={limit}&start={start}' #获得订阅列表
"""
{"type":-1,"follow_uname":"\u597d\u55e8\u597d\u55e8\u554a","avatar_url":"http:\/\/himg.bdimg.com\/sys\/portrait\/item\/979b832f.jpg","intro":"\u9700\u8981\u597d\u8d44\u6599\u52a0994798392","user_type":0,"is_vip":0,"follow_count":2,"fans_count":2276,"follow_time":1415614418,"pubshare_count":36,"follow_uk":2603342172,"album_count":0},
follow_uname:订阅名称
fans_count:粉丝数
"""
URL_FANS_LIMIT = 24
URL_FANS = 'https://pan.baidu.com/pcloud/friend/getfanslist?query_uk={uk}&limit={limit}&start={start}' # 获取关注列表
"""
{"type":-1,"fans_uname":"\u62e8\u52a8\u795e\u7684\u5fc3\u7eea","avatar_url":"http:\/\/himg.bdimg.com\/sys\/portrait\/item\/d5119a2b.jpg","intro":"","user_type":0,"is_vip":0,"follow_count":8,"fans_count":39,"follow_time":1439541512,"pubshare_count":15,"fans_uk":288332613,"album_count":0}
avatar_url:头像
fans_uname:用户名
"""
# rate: 40page/min
rate = 360.0 / 60.0
parse_fans = False
parse_share_list = True
parse_share_priority = 0
def __init__(self, *args, **kwargs):
for k, v in enumerate(kwargs):
setattr(self, v, kwargs[v])
if hasattr(self, 'rate'):
self.download_delay = 1 / getattr(self, 'rate')
def start_requests(self):
requests = []
start = 0
hotUserRequest = Request(
url=self.URL_HOT.format(start=start),
callback=self.parseHotUserList,
meta={'start': start},
dont_filter=True
)
requests.append(hotUserRequest)
for _,uk in enumerate(self.uks):
shareListRequest = Request(
url=self.URL_SHARE.format(uk=uk, start=start, limit=self.URL_SHARE_LIMIT),
callback=self.parseShareList,
headers={'Referer':'https://pan.baidu.com/share/home'},
meta={'uk': uk, 'start': start, 'limit': self.URL_SHARE_LIMIT},
priority=self.parse_share_priority
)
fansRequest = Request(
url=self.URL_FANS.format(uk=uk, start=start, limit=self.URL_FANS_LIMIT),
callback=self.parseFans,
meta={'uk': uk, 'start': start, 'limit': self.URL_FANS_LIMIT}
)
followRequest = Request(
url=self.URL_FOLLOW.format(uk=uk, start=start, limit=self.URL_FOLLOW_LIMIT),
callback=self.parseFollow,
meta={'uk': uk, 'start': start, 'limit': self.URL_FOLLOW_LIMIT}
)
if self.parse_share_list:
requests.append(shareListRequest)
if self.parse_fans:
requests.append(fansRequest)
requests.append(followRequest)
return requests
"""
解析热门用户列表
"""
def parseHotUserList(self, response):
list = json.loads(response.body_as_unicode())
if list['errno'] == 0:
for _, record in enumerate(list['hotuser_list']):
yield BaidupanHotUserItem(record)
uk = record['hot_uk']
if (record['pubshare_count'] > 0 or record['album_count'] > 0) and self.parse_share_list:
yield Request(
url=self.URL_SHARE.format(uk=uk, start=0, limit=self.URL_SHARE_LIMIT),
callback=self.parseShareList,
headers={'Referer':'https://pan.baidu.com/share/home'},
meta={'uk': uk, 'start': 0, 'limit': self.URL_SHARE_LIMIT},
priority=self.parse_share_priority
)
if record['fans_count'] > 0 and self.parse_fans:
yield Request(
url=self.URL_FANS.format(uk=uk, start=0, limit=self.URL_FANS_LIMIT),
callback=self.parseFans,
meta={'uk': uk, 'start': 0, 'limit': self.URL_FANS_LIMIT}
)
if record['follow_count'] > 0:
yield Request(
url=self.URL_FOLLOW.format(uk=uk, start=0, limit=self.URL_FOLLOW_LIMIT),
callback=self.parseFollow,
meta={'uk': uk, 'start': 0, 'limit': self.URL_FOLLOW_LIMIT},
dont_filter=True
)
if len(list) > 0:
start = response.meta['start'] + 24
yield Request(
url=self.URL_HOT.format(start=start),
callback=self.parseHotUserList,
meta={'start': start},
dont_filter=True
)
"""
解析分享列表
"""
def parseShareList(self, response):
list = json.loads(response.body_as_unicode())
if list['errno'] == 0:
for _,record in enumerate(list['records']):
yield BaiduPanShareItem(record)
# next page
start = response.meta['start']
totalCount = (int)(list['total_count'])
if (start + 1) < totalCount and self.parse_share_list:
uk = response.meta['uk']
start = start + self.URL_SHARE_LIMIT
limit = self.URL_SHARE_LIMIT
yield Request(
url=self.URL_SHARE.format(uk=uk, start=start, limit=limit),
headers={'Referer':'https://pan.baidu.com/share/home'},
callback=self.parseShareList,
meta={'uk': uk, 'start': start, 'limit': limit},
priority=self.parse_share_priority
)
"""
解析粉丝
"""
def parseFans(self, response):
list = json.loads(response.body_as_unicode())
print(list)
if list['errno'] == 0:
start = response.meta['start']
for _,record in enumerate(list['fans_list']):
# 解析粉丝的关注,粉丝,分享列表(start从0开始
yield BaiduPanFansItem(record)
uk = record['fans_uk']
if (record['pubshare_count'] > 0 or record['album_count'] > 0) and self.parse_share_list :
yield Request(
url=self.URL_SHARE.format(uk=uk, start=0, limit=self.URL_SHARE_LIMIT),
callback=self.parseShareList,
headers={'Referer':'https://pan.baidu.com/share/home'},
meta={'uk': uk, 'start': 0, 'limit': self.URL_SHARE_LIMIT},
priority=self.parse_share_priority
)
if record['fans_count'] > 0 and self.parse_fans:
yield Request(
url=self.URL_FANS.format(uk=uk, start=0, limit=self.URL_FANS_LIMIT),
callback=self.parseFans,
meta={'uk': uk, 'start': 0, 'limit': self.URL_FANS_LIMIT}
)
if record['follow_count'] > 0:
yield Request(
url=self.URL_FOLLOW.format(uk=uk, start=0, limit=self.URL_FOLLOW_LIMIT),
callback=self.parseFollow,
meta={'uk': uk, 'start': 0, 'limit': self.URL_FOLLOW_LIMIT}
)
# next page
start = response.meta['start']
totalCount = (int)(list['total_count'])
if (start + 1) < totalCount and self.parse_fans:
print('next')
uk = response.meta['uk']
start = start + self.URL_FANS_LIMIT
yield Request(
url=self.URL_FANS.format(uk=uk, start=start, limit=self.URL_FANS_LIMIT),
callback=self.parseFans,
meta={'uk': uk, 'start': start, 'limit': self.URL_FANS_LIMIT}
)
"""
解析关注
"""
def parseFollow(self, response):
list = json.loads(response.body_as_unicode())
start = response.meta['start']
if list['errno'] == 0:
for _,record in enumerate(list['follow_list']):
yield BaiduPanFollwItem(record)
# 请求分享列表
if (record['pubshare_count'] > 0 or record['album_count'] > 0) and self.parse_share_list :
yield Request(
url=self.URL_SHARE.format(uk=record['follow_uk'], start=0, limit=self.URL_SHARE_LIMIT),
callback=self.parseShareList,
headers={'Referer':'https://pan.baidu.com/share/home'},
meta={'uk': record['follow_uk'], 'start': 0, 'limit': self.URL_SHARE_LIMIT},
priority=self.parse_share_priority
)
if record['fans_count'] > 0 and self.parse_fans:
yield Request(
url=self.URL_FANS.format(uk=record['follow_uk'], start=0, limit=self.URL_FANS_LIMIT),
callback=self.parseFans,
meta={'uk': record['follow_uk'], 'start': 0, 'limit': self.URL_FANS_LIMIT}
)
if record['follow_count'] > 0:
yield Request(
url=self.URL_FOLLOW.format(uk=record['follow_uk'], start=0, limit=self.URL_FOLLOW_LIMIT),
callback=self.parseFollow,
meta={'uk': record['follow_uk'], 'start': 0, 'limit': self.URL_FOLLOW_LIMIT}
)
# next page
start = response.meta['start']
totalCount = (int)(list['total_count'])
if (start + 1) < totalCount and self.parse_fans:
uk = response.meta['uk']
start = start + self.URL_FOLLOW_LIMIT
yield Request(
url=self.URL_FOLLOW.format(uk=uk, start=start, limit=self.URL_FOLLOW_LIMIT),
callback=self.parseFollow,
meta={'uk': uk, 'start': start, 'limit': self.URL_FOLLOW_LIMIT}
)
|
{"/Scrapy/spiders/windj007.py": ["/Scrapy/items.py"], "/Scrapy/spiders/Baidupan.py": ["/Scrapy/items.py"], "/Scrapy/spiders/proxy.py": ["/Scrapy/items.py"], "/Scrapy/pipelines.py": ["/Scrapy/items.py"], "/Scrapy/spiders/douban.py": ["/Scrapy/items.py"]}
|
8,889
|
yanpeipan/scrapy
|
refs/heads/master
|
/Scrapy/settings.py
|
# Scrapy settings for Scrapy project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
#
BOT_NAME = 'Scrapy'
SPIDER_MODULES = ['Scrapy.spiders']
NEWSPIDER_MODULE = 'Scrapy.spiders'
ITEM_PIPELINES = {
'Scrapy.pipelines.MongoPipeline': 100,
}
DOWNLOADER_MIDDLEWARES = {
'scrapy.downloadermiddlewares.retry.RetryMiddleware': 90,
'Scrapy.middlewares.ProxyMiddleware': 99,
'scrapy_proxies.RandomProxy': 100,
'scrapy.downloadermiddlewares.httpproxy.HttpProxyMiddleware': 110,
'Scrapy.middlewares.BaiduyunMiddleware': 560,
}
SPIDER_MIDDLEWARES = {
}
CONCURRENT_ITEMS = 100
CONCURRENT_REQUESTS = 16
CONCURRENT_REQUESTS_PER_DOMAIN = 8
REACTOR_THREADPOOL_MAXSIZE = 10
COOKIES_ENABLED = False
CONCURRENT_ITEMS = 1000
LOG_ENABLED = True
#LOG_FILE = 'ScrapyCrawl.log'
#LOG_LEVEL = 'INFO'
DOWNLOAD_DELAY = 0.25
GRAPHITE_HOST = '127.0.0.1'
GRAPHITE_PORT = 2003
#STATS_CLASS = 'Scrapy.graphite.RedisGraphiteStatsCollector'
DEPTH_LIMIT = 0
DEPTH_PRIORITY = 1
DEPTH_STATS = True
AUTOTHROTTLE_ENABLED = True
AUTOTHROTTLE_DEBUG = True
AUTOTHROTTLE_START_DELAY = 1
AUTOTHROTTLE_MAX_DELAY = 300
RETRY_ENABLED = True
RETRY_TIMES = 3
PROXY_LIST = '/tmp/ip-good.txt'
|
{"/Scrapy/spiders/windj007.py": ["/Scrapy/items.py"], "/Scrapy/spiders/Baidupan.py": ["/Scrapy/items.py"], "/Scrapy/spiders/proxy.py": ["/Scrapy/items.py"], "/Scrapy/pipelines.py": ["/Scrapy/items.py"], "/Scrapy/spiders/douban.py": ["/Scrapy/items.py"]}
|
8,890
|
yanpeipan/scrapy
|
refs/heads/master
|
/Scrapy/spiders/proxy.py
|
from scrapy.spiders import Spider, Request
from scrapy.selector import Selector
from Scrapy.items import ProxyItem
from selenium import webdriver
from scrapy.selector import HtmlXPathSelector
import time
class ProxySpider(Spider):
name = 'proxy'
pipelines = ['ProxySpider']
middlewares = ['Selenium']
start_urls = ['http://www.baidu.com']
urls = {
'Youdaili':'http://www.youdaili.cn/Daili/http/',
'Hidemyass':'https://hidemyass.com/proxy-list/',
#'Cnproxy':'http://www.cnproxy.com/proxy1.html'
}
def __init__(self, *args, **kwargs):
pass
def parse(self, response):
if response.status == 200:
self.url = response.url
for proxy, url in self.urls.iteritems():
yield Request(url = url, callback = getattr(self, 'parse' + proxy))
def parseCnproxyDetail(self, response):
pass
def parseCnproxy(self, response):
dr=webdriver.PhantomJS()
dr.get(response.url)
pageSource = dr.page_source
dr.close()
sel = Selector(text = pageSource, type='html')
trs = sel.xpath('//*[@id="proxylisttb"]/table[3]//tr[1]/following-sibling::tr')
for key, tr in enumerate(trs):
result = tr.re(r'(\d+(?:\.\d+){3})(?:.*)(:\d+)')
if len(result) == 2:
proxy = result[0] + result[1]
yield Request(url=self.url + '?' + proxy, method="HEAD", meta={"proxy":'http://' + proxy, "download_timeout":10}, callback=self.parseProxy)
def parseHidemyass(self, response):
return
def parseYoudaili(self, response):
return
sel = Selector(response)
links = sel.xpath('//ul[@class="newslist_line"]/li/a/@href').extract()
for key, link in enumerate(links):
yield Request(url = link, callback = self.parseYoudailiDetail)
return
def parseYoudailiDetail(self, response):
sel = Selector(response)
proxys = sel.xpath('//div[@class="cont_font"]/p').re(r"\d+.\d+.\d+.\d+:\d+")
for proxy in proxys:
yield Request(url=self.url + '?' + proxy, method="HEAD", meta={"proxy":'http://' + proxy, "download_timeout":10}, callback=self.parseProxy)
def parseProxy(self, response):
proxyItem = ProxyItem()
proxyItem['ip'] = response.meta['proxy']
proxyItem['delay'] = response.meta['endTime'] - response.meta['startTime']
proxyItem['status'] = response.status
proxyItem['time'] = time.time()
yield proxyItem
|
{"/Scrapy/spiders/windj007.py": ["/Scrapy/items.py"], "/Scrapy/spiders/Baidupan.py": ["/Scrapy/items.py"], "/Scrapy/spiders/proxy.py": ["/Scrapy/items.py"], "/Scrapy/pipelines.py": ["/Scrapy/items.py"], "/Scrapy/spiders/douban.py": ["/Scrapy/items.py"]}
|
8,891
|
yanpeipan/scrapy
|
refs/heads/master
|
/Scrapy/middlewares.py
|
from urlparse import urlparse,parse_qs
from pymongo import MongoClient
from scrapy.downloadermiddlewares.useragent import UserAgentMiddleware
from selenium import webdriver
import time
import random
import json
class ProxyMiddleware(object):
def process_request(self, request, spider):
url = urlparse(request.url)
if url.path == '/pcloud/friend/gethotuserlist':
request.meta.pop('proxy', None)
elif url.path == '/pcloud/friend/getfollowlist':
request.meta.pop('proxy', None)
class DoubanMiddleware(object):
def __init__(self):
mongo = MongoClient().scrapy
self.proxys = list(mongo.proxy.find({'status':200}))
def process_request(self, request, spider):
if spider.__class__.__name__ == 'DoubanSpider':
url = urlparse(request.url)
params = parse_qs(url.query)
if url.scheme == 'https':
if len(url.query) == 0:
request = request.replace(url = "%s?apikey=0d58236c3758bc2928086a44a60a347b" % request.url)
elif 'apikey' not in parse_qs(url.query):
request = request.replace(url = "%s&apikey=0d58236c3758bc2928086a44a60a347b" % request.url)
elif url.scheme == 'http':
pass
elif 'Selenium' in getattr(spider, 'middlewares', []):
pass
#browser = webdriver.Firefox()
#browser.get(request.url)
def process_response(self, request, response, spider):
if response.status != 200:
pass
return response
def process_exception(self, request, exception, spider):
pass
class BaiduyunMiddleware(object):
def process_response(self, request, response, spider):
if spider.__class__.__name__ == 'BaidupanSpider':
list = json.loads(response.body_as_unicode())
if list['errno'] != 0:
time.sleep(300)
return response.replace(status=500)
return response
|
{"/Scrapy/spiders/windj007.py": ["/Scrapy/items.py"], "/Scrapy/spiders/Baidupan.py": ["/Scrapy/items.py"], "/Scrapy/spiders/proxy.py": ["/Scrapy/items.py"], "/Scrapy/pipelines.py": ["/Scrapy/items.py"], "/Scrapy/spiders/douban.py": ["/Scrapy/items.py"]}
|
8,892
|
yanpeipan/scrapy
|
refs/heads/master
|
/Scrapy/items.py
|
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
from scrapy.item import Item, Field
from scrapy.loader import ItemLoader
from scrapy.loader.processors import Join, MapCompose, TakeFirst,Identity,Compose
from datetime import datetime
class VideoItem(Item):
source=Field()
class BaidupanHotUserItem(Item):
uk = Field()
uname = Field()
type = Field()
hot_uname = Field()
avatar_url = Field()
intro = Field()
user_type = Field()
is_vip = Field()
follow_count = Field()
fans_count = Field()
follow_time = Field()
pubshare_count = Field()
hot_uk = Field()
album_count = Field()
class BaiduPanFansItem(Item):
uk = Field()
uname = Field()
type = Field()
fans_uname = Field()
avatar_url = Field()
intro = Field()
user_type = Field()
is_vip = Field()
follow_count = Field()
fans_count = Field()
follow_time = Field()
pubshare_count = Field()
fans_uk = Field()
album_count = Field()
class BaiduPanFollwItem(Item):
uk = Field()
uname = Field()
type = Field()
follow_uname = Field()
avatar_url = Field()
intro = Field()
user_type = Field()
is_vip = Field()
follow_count = Field()
fans_count = Field()
follow_time = Field()
pubshare_count = Field()
follow_uk = Field()
album_count = Field()
class BaiduPanShareItem(Item):
cover_thumb = Field()
operation = Field()
album_id = Field()
feed_type = Field()
category = Field()
public = Field()
shareid = Field()
data_id = Field()
title = Field()
third = Field()
clienttype = Field()
filecount = Field()
uk = Field()
username = Field()
feed_time = Field()
desc = Field()
avatar_url = Field()
category_1_cnt = Field()
category_2_cnt = Field()
category_3_cnt = Field()
category_4_cnt = Field()
category_5_cnt = Field()
category_6_cnt = Field()
category_7_cnt = Field()
category_8_cnt = Field()
category_9_cnt = Field()
dir_cnt = Field()
filelist = Field()
source_uid = Field()
source_id = Field()
shorturl = Field()
vCnt = Field()
dCnt = Field()
tCnt = Field()
like_status = Field()
like_count = Field()
comment_count = Field()
class CelebrityItem(Item):
mobile_url = Field()
aka_en = Field()
name = Field()
works = Field()
gender = Field()
avatars = Field()
id = Field()
aka = Field()
name_en = Field()
born_place = Field()
alt = Field()
class Person(Item):
id = Field()
name = Field()
icon = Field()
collect = Field()
wish = Field()
class MovieItem(VideoItem):
rating = Field()
title = Field()
collect_count = Field()
original_title = Field()
subtype = Field()
year = Field()
images = Field()
alt = Field()
id = Field()
reviews_count = Field()
wish_count = Field()
douban_site = Field()
mobile_url= Field()
title= Field()
do_count= Field()
seasons_count= Field()
schedule_url= Field()
episodes_count= Field()
countries= Field()
genres= Field()
collect_count= Field()
casts= Field()
current_season= Field()
original_title= Field()
summary= Field()
subtype= Field()
directors= Field()
comments_count= Field()
ratings_count= Field()
aka= Field()
writers = Field()
imdb_id = Field()
tags = Field()
recommendations = Field()
comments = Field()
reviews = Field()
class ProxyItem(Item):
ip = Field()
delay = Field()
type = Field()
anonymity = Field()
status = Field()
time = Field()
class streamtypes(Item):
hd2=Field()
flv=Field()
hd=Field()
hd3gp=Field()
hd3=Field()
class ShowItem(VideoItem):
id=Field()
name=Field()
link=Field()
play_link=Field()
last_play_link=Field()
poster=Field()
thumbnail=Field()
streamtypes=Field()
hasvideotype=Field()
completed=Field()
episode_count=Field(serializer=int)
episode_updated=Field()
category=Field()
view_count=Field(serializer=int)
source=Field()
paid=Field()
published=Field()
released=Field()
comment_count=Field(serializer=int)
favorite_count=Field(serializer=int)
lastupdate=Field()
dma=Field()
type=Field()
dct=Field()
algInfo=Field()
related=Field()
class ShowLoader(ItemLoader):
default_output_processor=TakeFirst()
streamtypes_out=Identity()
hasvideotype_out=Identity()
#published_out=Compose(lambda s:datetime.strptime(s[0], '%Y-%m-%d'))
favorite_count_in=MapCompose(int)
episode_count_in=MapCompose(int)
view_count_in=MapCompose(int)
comment_count_in=MapCompose(int)
class ShowVideoItem(Item):
show_id=Field()
id=Field()
title=Field()
link=Field()
thumbnail=Field()
duration=Field()
category=Field()
view_count=Field()
favorite_count=Field()
comment_count=Field()
up_count=Field()
down_count=Field()
stage=Field()
seq=Field()
published=Field()
operation_limit=Field()
streamtypes=Field()
state=Field()
rc_title=Field()
class UncomplatedItem(Item):
id=Field()
|
{"/Scrapy/spiders/windj007.py": ["/Scrapy/items.py"], "/Scrapy/spiders/Baidupan.py": ["/Scrapy/items.py"], "/Scrapy/spiders/proxy.py": ["/Scrapy/items.py"], "/Scrapy/pipelines.py": ["/Scrapy/items.py"], "/Scrapy/spiders/douban.py": ["/Scrapy/items.py"]}
|
8,893
|
yanpeipan/scrapy
|
refs/heads/master
|
/Scrapy/spiders/youku.py
|
#coding=utf-8
from scrapy.spiders import CrawlSpider, Spider
from scrapy.loader import ItemLoader
from scrapy.loader.processors import TakeFirst, MapCompose, Join
from scrapy.exceptions import CloseSpider
from scrapy.selector import Selector
from scrapy.http import FormRequest
from scrapy.http import Request
from Scrapy.items import *
from urlparse import urlparse,parse_qs
import scrapy
import json
import pymongo
from datetime import datetime, date, time
class YoukuSpider(CrawlSpider):
name = 'youku'
#download_delay=3600/1000
allowed_domins = ['http://www.youku.com', 'https://openapi.youku.com']
start_urls = []
"""
config of youku
"""
client_id='696c961ded023528'
count='100'
max_matches=1500
parse_videos_after_show=False
category=u"电影"
#1000/hour
#@link http://open.youku.com/docs/newbieguide.html#id4
rate=float(1000)/3600
"""
Apis
"""
shows_by_category_url='https://openapi.youku.com/v2/shows/by_category.json'
show_category_url='https://openapi.youku.com/v2/schemas/show/category.json'
shows_show_url='https://openapi.youku.com/v2/shows/show.json'
shows_videos_url='https://openapi.youku.com/v2/shows/videos.json'
def __init__(self, category = None, *args, **kwargs):
self.mongo=pymongo.MongoClient()
if hasattr(self, 'rate'):
self.download_delay=1/getattr(self, 'rate')
if category:
self.category=unicode(category, 'utf-8')
for k,v in enumerate(kwargs):
if not hasattr(self, v):
setattr(self, v, kwargs[v])
def start_requests(self):
if hasattr(self, 'type') and getattr(self, 'type') == 'uncompleted videos':
requests=[]
for show in self.mongo.scrapy.videos.find({'completed':0}):
requests.append(self.queryShowsVideos({'show_id':show['id']}))
return requests
elif hasattr(self, 'show_id') and hasattr(self, 'videos'):
#update videos of show which id is `show_id`
return [self.queryShowsVideos({'show_id':getattr(self, 'show_id')})]
else:
#update all
return [Request(self.show_category_url, callback=self.parseCategory)]
def parseCategory(self, response):
categories=json.loads(response.body)
if 'categories' in categories:
for category in categories['categories']:
if hasattr(self, 'category') and self.category != category['label']:
continue
data={'client_id':self.client_id, 'category':category['label'], 'page':'1', 'count':'100'}
if 'genre' in category:
if hasattr(self, 'year'):
pass
#data['release_year']=getattr(self, 'year')
if hasattr(self, 'area'):
pass
#data['area']=getattr(self, 'area')
if hasattr(self, 'orderby'):
pass
#data['orderby']=getattr(self, 'orderby')
for genre in category['genre']:
data['genre']=genre['label']
yield self.queryShowsByCategory(data)
else:
yield self.queryShowsByCategory(data)
def parseShowsByCategory(self, response):
shows=json.loads(response.body)
if 'total' in shows:
shows_total=int(shows['total'])
if shows_total == 0:
return
# add subclass(area, release_year),if total of shows greater than max_matches
elif shows_total > self.max_matches:
data=response.meta['formdata']
#if 'area' not in response.meta['formdata']:
# for area in self.schemas_unit:
# data['area']=area
# yield self.queryShowsByCategory(data)
if 'release_year' not in response.meta['formdata']:
years=range(2008, datetime.now().year+1)
years.append(9999)
for year in years:
data['release_year']=str(year)
yield self.queryShowsByCategory(data)
return
if 'shows' in shows:
for show in shows['shows']:
if 'id' in show:
pass
#yield self.queryShowsVideos({'client_id':self.client_id, 'show_id':str(show['id'])})
else:
print show
continue
showItem=ShowItem(source='youku')
itemLoader = ShowLoader(item=showItem)
for k in show:
if k in showItem.fields:
showItem[k]=show[k]
itemLoader.add_value(k, show[k])
yield itemLoader.load_item()
else:
raise
# add subclass(area, release_year),if total of shows greater than max_matches
for show in shows['shows']:
#parse videos of show
if 'id' in show and getattr(self, 'parse_videos_after_show'):
yield self.queryShowsVideos({'client_id':self.client_id, 'show_id':str(show['id'])})
showItem=ShowItem(source='youku')
itemLoader = ShowLoader(item=showItem)
for k in show:
if k in showItem.fields:
showItem[k]=show[k]
itemLoader.add_value(k, show[k])
yield itemLoader.load_item()
#next page
if "formdata" in response.meta and all(key in response.meta['formdata'] for key in ['page', 'count', 'category']):
page=int(response.meta['formdata']['page'])
next_page=page+1
count=int(response.meta['formdata']['count'])
if next_page*count < self.max_matches and page*count < shows_total:
data=response.meta['formdata']
data['page']=str(next_page)
print data
yield self.queryShowsByCategory(data)
def queryShowsByCategory(self, formdata):
scrapy.log.msg(formdata, level=scrapy.log.INFO)
#check necessary keys
if all(key in formdata for key in ['client_id', 'category']): return FormRequest(self.shows_by_category_url, formdata=formdata, callback=self.parseShowsByCategory, meta={'formdata':formdata})
def queryShowsVideos(self, formdata):
#check necessary keys
if all(key in formdata for key in ['show_id']):
formdata['count']=str(formdata['count']) if 'count' in formdata else '100'
formdata['page']=str(formdata['page']) if 'page' in formdata else '1'
formdata['client_id']=str(formdata['client_id']) if 'client_id' in formdata else self.client_id
#formdata['show_videotype']=str(formdata['show_videotype']) if 'show_videotype' in formdata else '正片,预告片,花絮,MV,资讯,首映式'
formdata['orderby']=str(formdata['orderby']) if 'orderby' in formdata else 'videoseq-asc'
return FormRequest(self.shows_videos_url, formdata=formdata, callback=self.parseShowsVideos, meta={'formdata':formdata})
else:
pass
def parseShowsVideos(self, response):
if 'formdata' not in response.meta or 'show_id' not in response.meta['formdata']:
return
#init variables
formdata=response.meta['formdata']
videos=json.loads(response.body)
count=int(formdata['count']) if 'count' in formdata else 20
page=int(formdata['page']) if 'page' in formdata else 1
total=int(videos['total']) if 'total' in videos else False
show_id=response.meta['formdata']['show_id']
#videos
if 'videos' in videos:
for video in videos['videos']:
showVideoItem=ShowVideoItem({'show_id':show_id})
for k in video:
if k in showVideoItem.fields:
showVideoItem[k]=video[k]
yield showVideoItem
#next page
if total > page*count:
formdata['page']=str(page+1)
yield self.queryShowsVideos(formdata)
def parseShow(self, response):
pass
|
{"/Scrapy/spiders/windj007.py": ["/Scrapy/items.py"], "/Scrapy/spiders/Baidupan.py": ["/Scrapy/items.py"], "/Scrapy/spiders/proxy.py": ["/Scrapy/items.py"], "/Scrapy/pipelines.py": ["/Scrapy/items.py"], "/Scrapy/spiders/douban.py": ["/Scrapy/items.py"]}
|
8,894
|
yanpeipan/scrapy
|
refs/heads/master
|
/Scrapy/pipelines.py
|
#coding=utf-8
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html import os
import pymongo
from Scrapy.items import *
from os import path
from datetime import datetime
from scrapy.exporters import BaseItemExporter
from elasticsearch import Elasticsearch
class BasePipeline(object):
def __init__(self):
pass
"""
Serializer
"""
class SerializerPipeline(BasePipeline):
def process_item(self, item, spider):
itemExporter=BaseItemExporter()
for k,v in enumerate(item):
item[v]=itemExporter.serialize_field(item.fields[v], v, item[v])
"""
MongoDB
"""
class MongoPipeline(BasePipeline):
def __init__(self):
self.mongo = pymongo.MongoClient()
self.es = Elasticsearch([
{'host': '127.0.0.1'},
])
self.es.indices.create(index='baidupan', ignore=400)
def process_item(self, item, spider):
#upsert youku show
if isinstance(item, ShowItem) and 'id' in item:
result=self.mongo.scrapy.videos.update({'id':item['id']}, {'$set':dict(item)}, upsert=True)
#upsert youku videos when 'ShowVideoItem' == item.__class__.__name__
if isinstance(item, ShowVideoItem) and 'id' in item and 'show_id' in item:
result = self.mongo.scrapy.videos.update({'id':item['show_id'], 'videos.id':item['id']}, {'$set':{'videos.$':dict(item)}}, False, True)
if result['updatedExisting'] == False:
self.mongo.scrapy.videos.update({'id':item['show_id']}, {'$addToSet':{'videos':dict(item)}}, False, True)
if 'ProxyItem' == item.__class__.__name__:
self.mongo.scrapy.proxy.save(dict(item))
#upsert douban movie
if isinstance(item, MovieItem):
if 'comments' in item:
self.mongo.scrapy.videos.update({'id' : item['id']}, {'$push':{'comments': {'$each': item['comments']}}})
del(item['comments'])
self.mongo.scrapy.videos.update({'id' : item['id']}, {'$set':dict(item)}, upsert = True)
if isinstance(item, CelebrityItem):
self.mongo.scrapy.celebritys.update({'id' : item['id']}, {'$set':dict(item)}, upsert = True)
if isinstance(item, BaiduPanShareItem):
if 'shareid' in item:
self.es.update('baidupan', 'sharelist', item['shareid'], {
'doc': dict(item),
'doc_as_upsert': True
}
)
elif 'album_id' in item:
self.es.update('baidupan', 'album', item['album_id'], {
'doc': dict(item),
'doc_as_upsert': True
}
)
if isinstance(item, BaiduPanFansItem):
item['uk'] = item['fans_uk']
item['uname'] = item['fans_uname']
item.pop('fans_uk', None)
item.pop('fans_uname', None)
self.es.update('baidupan', 'user', item['uk'], {
'doc': dict(item),
'doc_as_upsert': True
}
)
if isinstance(item, BaiduPanFollwItem):
item['uk'] = item['follow_uk']
item['uname'] = item['follow_uname']
item.pop('follow_uk', None)
item.pop('follow_uname', None)
self.es.update('baidupan', 'user', item['uk'], {
'doc': dict(item),
'doc_as_upsert': True
}
)
if isinstance(item, BaidupanHotUserItem):
item['uk'] = item['hot_uk']
item['uname'] = item['hot_uname']
item.pop('hot_uk', None)
item.pop('hot_uname', None)
self.es.update('baidupan', 'user', item['uk'], {
'doc': dict(item),
'doc_as_upsert': True
}
)
return item
|
{"/Scrapy/spiders/windj007.py": ["/Scrapy/items.py"], "/Scrapy/spiders/Baidupan.py": ["/Scrapy/items.py"], "/Scrapy/spiders/proxy.py": ["/Scrapy/items.py"], "/Scrapy/pipelines.py": ["/Scrapy/items.py"], "/Scrapy/spiders/douban.py": ["/Scrapy/items.py"]}
|
8,895
|
yanpeipan/scrapy
|
refs/heads/master
|
/Scrapy/spiders/douban.py
|
# -*- coding: utf-8 -*-
from scrapy.spiders import CrawlSpider, Spider
from scrapy.exceptions import CloseSpider
from scrapy .selector import Selector
from pymongo import MongoClient
from scrapy.http import Request
from Scrapy.items import *
import urlparse
import urllib
import json
from datetime import datetime, date, time
from scrapy.loader import ItemLoader
class DoubanSpider(CrawlSpider):
name = 'douban'
allowed_domins = ['http://www.douban.com', 'https://api.douban.com']
start_urls = ['http://movie.douban.com/tag/']
movie_tag_url = 'http://movie.douban.com/tag/'
movie_search_url = 'https://api.douban.com/v2/movie/search'
movei_subject_url = 'https://api.douban.com/v2/movie/subject/'
# parse movei subject after search movie
parse_movie_subject = False
# rate: 40page/min
rate = 40.0 / 60.0
def __init__(self, *args, **kwargs):
for k, v in enumerate(kwargs):
setattr(self, v, kwargs[v])
if hasattr(self, 'rate'):
self.download_delay = 1 / getattr(self, 'rate')
def start_requests(self):
return [Request(self.movie_tag_url, callback=self.parseMovieTag)]
def parseCollect(self, response):
sel = Selector(response)
links = sel.xpath('//div[@class="grid-view"]/div')
links.extract()
for index, link in enumerate(links):
movieId = link.xpath('div[@class="info"]//a[contains(@href, "http://movie.douban.com/subject/")]').re(
r"http://movie.douban.com/subject/(\d+)/")
nextLink = sel.xpath(
'//div[@class="paginator"]/span[@class="next"]/a/@href').extract()
if len(nextLink) > 0:
yield Request(url=nextLink.pop(), callback=self.parseCollect)
def parseCelebrity(self, response):
celebrity = json.loads(response.body_as_unicode())
if len(celebrity) > 0:
celebrityItem = CelebrityItem()
for k, v in celebrity.iteritems():
celebrityItem[k] = v
yield celebrityItem
def parseComment(self, response):
sel = Selector(response)
movieItem = MovieItem()
movieItem['id'] = response.meta['id']
commentLinks = sel.xpath(
'//div[@id="comments"]/div[contains(@class, "comment-item")]')
commentLinks.extract()
comments = []
for index, commentLink in enumerate(commentLinks):
comment = {}
comment['avatar'] = commentLink.xpath(
'div[@class="avatar"]/a/img/@src').extract().pop()
comment['uid'] = commentLink.xpath('div[@class="comment"]//span[@class="comment-info"]/a/@href').re(
r"http://movie.douban.com/people/(.*)/").pop()
comment['name'] = commentLink.xpath(
'div[@class="comment"]//span[@class="comment-info"]/a/text()').extract().pop()
comment['comment'] = commentLink.xpath(
'div[@class="comment"]/p/text()').extract().pop()
dateStr = commentLink.xpath(
'div[@class="comment"]/h3/span[@class="comment-info"]/span/text()').re(r'\d+-\d+-\d+').pop()
comment['date'] = datetime.strptime(dateStr, "%Y-%m-%d")
comment['vote'] = int(
commentLink.xpath('div[@class="comment"]//span[@class="comment-vote"]/span[contains(@class, "votes")]/text()').extract().pop())
comments.append(comment)
movieItem['comments'] = comments
yield movieItem
paginator = sel.xpath(
'//div[@id="paginator"]/a[@class="next"]/@href').extract()
parsedUrl = urlparse(response.url)
return # yan dd
yield Request(url=parsedUrl.scheme + '://' + parsedUrl.netloc + parsedUrl.path + paginator.pop(), callback=self.parseComment, meta={'id': response.meta['id']})
def parseReview(self, response):
pass
def parseSubject(self, response):
sel = Selector(response)
movieItem = MovieItem()
movieItem['id'] = response.meta['id']
# parse writers
writerLinks = sel.xpath('//*[@id="info"]/span[2]/a')
writerLinks.extract()
writers = []
for index, link in enumerate(writerLinks):
writerId = link.xpath('@href').re(r"/celebrity/(\d+)/")
if len(writerId) > 0:
celebrity = writerId.pop()
else:
celebrity = None
writer = {'id': celebrity, 'name':
link.xpath('text()').extract().pop()}
writers.append(writer)
movieItem['writers'] = writers
# prase imdb_id
imdbId = sel.xpath('//*[@id="info"]/a').re(
r"http://www.imdb.com/title/(tt\d+)")
if len(imdbId) > 0:
movieItem['imdb_id'] = imdbId.pop()
else:
movieItem['imdb_id'] = None
# parse tags
tagLinks = sel.xpath("//div[contains(@class, 'tags-body')]/a")
tags = []
for i, tagLink in enumerate(tagLinks):
tagItem = TagItem()
tag = tagLink.xpath('text()').extract().pop()
num = tagLink.xpath('span/text()').re(r"\((\d+)\)").pop()
tags.append({'tag': tag, 'num': num})
movieItem['tags'] = tags
# yield tagItem
# parse recommendations
links = sel.xpath('//*[@id="recommendations"]/div/dl/dd/a')
links.extract()
recommendations = []
for index, recommend in enumerate(links):
movieId = recommend.xpath('@href').re(r"/subject/(\d+)").pop()
movieTitle = recommend.xpath('text()').extract().pop()
recommendations.append({'id': movieId, 'title': movieTitle})
movieItem['recommendations'] = recommendations
yield Request(url='https://api.douban.com/v2/movie/subject/' + movieId, callback=self.parseMovie)
yield movieItem
def parseMovieSubject(self, response):
movie = json.loads(response.body_as_unicode())
if len(movie) > 0:
movieItem = MovieItem()
for k, v in movie.iteritems():
movieItem[k] = v
yield movieItem
for celebrity in (movie['casts'] + movie['directors']):
if id in celebrity:
yield Request(url = 'https://api.douban.com/v2/movie/celebrity/' + celebrity['id'], callback = self.parseCelebrity)
yield Request(url = 'http://movie.douban.com/subject/' + movie['id'], callback = self.parseSubject, meta = {'id':movie['id']})
yield Request(url = 'http://movie.douban.com/subject/' + movie['id'] + '/comments', callback = self.parseComment, meta = {'id':movie['id']})
yield Request(url = 'http://movie.douban.com/subject/' + movie['id'] + '/reviews', callback = self.parseReview, meta = {'id':movie['id']})
def parseMovieList(self, response):
movies = json.loads(response.body_as_unicode())
for movie in movies['subjects']:
movieItem = MovieItem(source='douban')
#itemLoader = ItemLoader(item=movieItem, default_output_processor=TakeFirst())
for key in movie:
if key in movieItem.fields:
movieItem[key] = movie[key]
yield movieItem
# parse movie subject, when self.parse_movie_subject == True
if getattr(self, 'parse_movie_subject'):
yield Request(url='https://api.douban.com/v2/movie/subject/' + movie['id'], callback=self.parseMovieSubject)
if len(movies['subjects']) <= 0:
return
# next page
url_parts = list(urlparse.urlparse(response.url))
query = dict(urlparse.parse_qsl(url_parts[4]))
if 'start' in query:
query['start'] = (int)(query['start']) + 20
else:
query['start'] = 20
url_parts[4] = urllib.urlencode(query)
nextUrl = urlparse.urlunparse(url_parts)
yield Request(url=nextUrl, callback=self.parseMovieList)
def parseMovieTag(self, response):
sel = Selector(response)
items = sel.xpath('//table[@class="tagCol"]//td')
for item in items:
tag = item.xpath('a/text()').extract().pop()
#num=item.xpath('b/text()').re(r"\d+").pop()
yield Request(url=getattr(self, 'movie_search_url') + '?tag=' + tag, callback=self.parseMovieList)
|
{"/Scrapy/spiders/windj007.py": ["/Scrapy/items.py"], "/Scrapy/spiders/Baidupan.py": ["/Scrapy/items.py"], "/Scrapy/spiders/proxy.py": ["/Scrapy/items.py"], "/Scrapy/pipelines.py": ["/Scrapy/items.py"], "/Scrapy/spiders/douban.py": ["/Scrapy/items.py"]}
|
8,936
|
Chalermwutst/basicpython
|
refs/heads/master
|
/Car.py
|
class Car:
#Properties
color = ""
brand = ""
number_of_wheels = 4
number_of_seats = 4
maxspeed = 0
#Constuctor
def __init__(self,color,brand,number_of_wheels,number_of_seats,maxspeed):
self.color = color
self.brand = brand
self.number_of_wheels = number_of_wheels
self.number_of_seats = number_of_seats
self.maxspeed = maxspeed
#Create method set color
def setcolor(self,x):
self.color=x
def setbrand(self,x):
self.brand=x
def setspeed(self,x):
self.maxspeed=x
def printdata(self):
print("Color of this car is",self.color)
print("Brand of this car is",self.brand)
print("Max speed of this car is",self.maxspeed)
# Deconstructor
def __del__(self):
print()
|
{"/usecar.py": ["/Car.py"]}
|
8,937
|
Chalermwutst/basicpython
|
refs/heads/master
|
/ifcondition.py
|
age = 15
if(age==18)
print("Allow to access")
else:
print("Not allow")
user = "admin"
pwd = "1234"
|
{"/usecar.py": ["/Car.py"]}
|
8,938
|
Chalermwutst/basicpython
|
refs/heads/master
|
/Number/calculate.py
|
def plus(number1=0, number2=0):
total = number1 + number2
return total
def fibonacci(n): # return Fibonacci series up to n
result = []
a, b = 0, 1
while b < n:
result.append(b)
a, b = b, a + b
return result
|
{"/usecar.py": ["/Car.py"]}
|
8,939
|
Chalermwutst/basicpython
|
refs/heads/master
|
/usecar.py
|
from Car import*
#สร้าง Obuject หรือ Instance ของ Class Car
objcar1 = Car('red','Toyota',4,4,180)
objcar1.printdata()
print()
objcar2 = Car('Yellow','Honda',4,4,180)
# objcar2.setbrand="Honda"
# objcar2.setcolor="Yellow"
# objcar2.setspeed=200
objcar2.printdata()
|
{"/usecar.py": ["/Car.py"]}
|
8,940
|
Chalermwutst/basicpython
|
refs/heads/master
|
/variable.py
|
a = 3
b = 4.92
c = "itgenius"
print(a)
print(b)
print(c)
print(a, b, c)
x = y = z = 10
j, k = 5, 15
print(x, y, z)
print(j, k)
# Boolean
status = True
msg = False
print(status, msg)
# ตัวแปรแสดงผลร่วมกับข้อความ
|
{"/usecar.py": ["/Car.py"]}
|
8,941
|
Chalermwutst/basicpython
|
refs/heads/master
|
/main.py
|
age = 15
|
{"/usecar.py": ["/Car.py"]}
|
8,943
|
gulsumsr/receipt-reader
|
refs/heads/master
|
/app.py
|
import os
import numpy as np
from flask import Flask, render_template, jsonify, request
from preprocess import preprocess_image
from read_receipt_image import read_receipt
import io
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
# curl -F "file=@../data/receipt.jpg" https://receipt-reader-bk.herokuapp.com/
# curl -F "file=@../data/receipt.jpg" http://0.0.0.0:5000/
def im_to_json(im):
"""Here's where the magic happens"""
items = ['sierra nevada stout', 'lagunitas ipa', 'founders centennial']
prices = [5.50, 6.50, 7.50]
subtotal = sum(prices)
tax = subtotal * 0.07
total = subtotal + tax
bill_dict = {
'items': [{'name': item, 'price': price, 'quantity': 1}
for item, price in zip(items, prices)],
'subtotal': subtotal,
'tax': tax,
'total': total
}
preprocessed_im = preprocess_image(im)
buf = io.BytesIO()
plt.imsave(buf, preprocessed_im)
im_data = buf.getvalue()
bill_dict = read_receipt(im_data)
return jsonify(bill_dict)
app = Flask(__name__)
@app.route("/", methods=["POST", "GET"])
def home():
if 'file' in request.files:
im = plt.imread(request.files['file'])
return im_to_json(im)
else:
# return im_to_json(None)
print('Received request with no image attached')
return 'Request should have an image file attached'
if __name__ == '__main__':
app.run(debug=True)
|
{"/app.py": ["/preprocess.py", "/read_receipt_image.py"]}
|
8,944
|
gulsumsr/receipt-reader
|
refs/heads/master
|
/read_receipt_image.py
|
import sys
import http.client, urllib.request, urllib.parse, urllib.error, base64
import json
import os
import numpy as np
from skimage.transform import rescale
# from skimage.color import rgb2gray
from skimage.measure import LineModelND, ransac
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
# https://azure.microsoft.com/en-us/try/cognitive-services/my-apis/
API_KEY = os.getenv('AZURE_CV_KEY')
if not API_KEY:
print('You need to get an azure CV api key and assign it to the environmental variable AZURE_CV_KEY')
def azure_ocr(im_data, key=API_KEY):
"""
Send image data to azure computer vision api for OCR.
"""
headers = {
# 'Content-Type': 'application/json',
'Content-Type': 'application/octet-stream',
'Ocp-Apim-Subscription-Key': '4d651451c6c74370bda1adf6ed71779e',
}
params = urllib.parse.urlencode({
'language': 'unk',
'detectOrientation ': 'true',
})
# body = "{'url':'http://i3.kym-cdn.com/photos/images/newsfeed/001/217/729/f9a.jpg'}"
body = im_data
try:
conn = http.client.HTTPSConnection('westus.api.cognitive.microsoft.com')
conn.request("POST", "/vision/v1.0/ocr?%s" % params, body, headers)
response = conn.getresponse()
data = response.read()
response_dict = json.loads(data.decode("utf-8"))
conn.close()
return response_dict
except Exception as e:
print("[Errno {0}] {1}".format(e.errno, e.strerror))
def convert_to_float(str_):
"""convert B's to 8's, etc"""
str_ = str_.replace(' ','')
str_ = str_.replace('B','8')
str_ = str_.lower()
str_ = str_.replace('s','5')
str_ = str_.replace('.oo', '.00')
str_ = str_.replace('.o0', '.00')
str_ = str_.replace('.0o', '.00')
return float(str_)
def is_possibly_numeric_word(word_str):
word_str = word_str.replace('.oo', '.00')
word_str = word_str.replace('.o0', '.00')
word_str = word_str.replace('.0o', '.00')
numerals = ['0','1','2','3','4','5','6','7','8','9']
num_numerals = len([char for char in word_str if char in numerals])
has_numerals = num_numerals > 0
return has_numerals
def mean_line_height(line):
word_heights = [word['boundingBox'].split(',')[1]
for word in line]
return sum(map(float, word_heights)) / len(word_heights)
def read_receipt(im_data):
# Hit the Azure API
response_dict = azure_ocr(im_data)
if not response_dict:
raise ValueError('There was a problem reading your photo')
# Make a list of words that are numbers
words = []
for region in response_dict['regions']:
for line in region['lines']:
words.extend(line['words'])
item_price_candidates = [word for word in words if is_possibly_numeric_word(word['text'])]
# print([price['text'] for price in item_price_candidates])
# Prices are usually right aligned. Get coords of right bbox edges
right_pts = [] # centers of bboxs' right sides
for item_price_candidate in item_price_candidates:
bbox = list(map(int,(item_price_candidate['boundingBox'].split(','))))
x0, y0, w, h = bbox
right_pts.append([x0+w, y0+h/2])
right_pts = np.array(right_pts)
# The right margin is a line connecting several of these points
ransac_model, inliers = ransac(right_pts.copy(), LineModelND, min_samples=5, residual_threshold=3, max_trials=50)
item_prices = [c for c, inlier in zip(item_price_candidates, inliers) if inlier]
# Compute the y-coord of every word's center
y_dict = []
for word in words:
bbox = list(map(int,(word['boundingBox'].split(','))))
x0, y0, w, h = bbox
y_dict.append([(y0 + h/2), word])
# For each price-word, get words on the same line
price_lines = []
for item_price in item_prices:
bbox = list(map(int,(item_price['boundingBox'].split(','))))
x0, y0, w, h = bbox
y = y0 + h/2
ordered_word_dict = sorted(y_dict, key=lambda x: abs(x[0] - y)) # all words, ordered by y-dist from this word
# Select words with similar y-coord
margin = h/2
same_line_words = [word for word_y, word in ordered_word_dict
if abs(word_y-y) < margin and
word['text'] != item_price['text']]
price_lines.append([item_price] + same_line_words)
price_lines = sorted(price_lines, key=mean_line_height)
# Figure out what the prices are
items = []
for line in price_lines:
price = line[0]
other_words = line[1:]
other_words = sorted(other_words,
key=lambda x: int(x['boundingBox'].split(',')[0]))
if len(price['text']) > 3: # Got the whole price in one word
try:
price = convert_to_float(price['text'])
except ValueError:
# this probably means a non-price numerical word in the price column,
# e.g., a date
continue
item_name = ' '.join([word['text'] for word in other_words])
elif len(price['text']) == 2: # Probably got cents, dollars in different word
try:
cents = convert_to_float(price['text'].replace(',','').replace('.',''))
except ValueError:
# raise NotImplementedError
continue
# Get other number-words on the same line
number_words = [word for word in other_words
if is_possibly_numeric_word(word['text'])]
if not number_words: # No other numbers on this line other than 'price'
# raise NotImplementedError
continue
dollar_word = number_words[-1]
try:
dollars = int(dollar_word['text'].replace(',','').replace('.',''))
except ValueError:
# raise NotImplementedError
continue
price = dollars + 0.01 * cents
item_name = ' '.join([w['text'] for w in other_words if w != dollar_word])
item_name = item_name.replace(',', '') # remove spurious commas (there are a lot)
item_name = item_name.strip()
if price < 5000 and price >= 0:
items.append((item_name, price))
# Adapt for API/json
receipt_contents = {'items': []}
for item, price in items:
if item.lower().startswith('total'): # TODO: include common misspellings
receipt_contents['total'] = price
elif 'subtotal' in item.lower().replace('-','').replace(' ',''):
receipt_contents['subtotal'] = price
elif 'tax' in item.lower():
receipt_contents['tax'] = price
else:
item_dict = {'name': item, 'price': price, 'quantity': 1}
receipt_contents['items'].append(item_dict)
# Handle cases where required fields are absent
if 'total' not in receipt_contents.keys():
# the total often has the largest font
# the total is usually the largest numerical word
# maybe it's called "due" or something
pass
if 'subtotal' not in receipt_contents.keys():
pass
if 'tax' not in receipt_contents.keys():
pass
if not receipt_contents['items']:
pass # no items
return receipt_contents
if __name__=='__main__':
import io
fn = '../data/receipt_preprocessed.jpg'
# buf = io.BytesIO()
# plt.imsave(buf, im)
# im_data = buf.getvalue()
with open(fn, 'rb') as f:
im_data = f.read()
receipt_dict = read_receipt(im_data)
for key, val in receipt_dict.items():
print(key, val)
print()
|
{"/app.py": ["/preprocess.py", "/read_receipt_image.py"]}
|
8,945
|
gulsumsr/receipt-reader
|
refs/heads/master
|
/preprocess.py
|
import numpy as np
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
from skimage.color import rgb2gray
from skimage.morphology import disk
from skimage.filters import rank
from skimage.transform import ProjectiveTransform, warp
from skimage.filters import gabor_kernel
from scipy.signal import fftconvolve
from skimage.filters import threshold_otsu
from skimage import measure
from skimage.transform import hough_line, hough_line_peaks
from skimage.morphology import skeletonize
from skimage.exposure import adjust_log
from skimage.util import img_as_ubyte
from skimage.transform import resize
import os
MAX_SIDE_LENGTH = 1024
def resize_im(im, max_side):
"""Resize an image so the its longer side is the specified length"""
ratio = float(max_side) / max(im.shape[:2])
new_h = int(round(ratio*im.shape[0]))
new_w = int(round(ratio*im.shape[1]))
return resize(im, (new_h, new_w))
def dist_angle_to_slope_interept(line):
"""Convert between line representations"""
angle, dist = line
slope = np.tan(angle - np.pi/2)
y_intercept = dist / np.sin(angle)
return slope, y_intercept
def line_intersection(line1, line2):
"""
Compute the point of intersection between two lines
(in dist-angle representation)
"""
if line1[0] == 0:
print('line 1 vertical')
if line2[0] == 0:
a, c = dist_angle_to_slope_interept(line1)
print('line 2 vertical')
print(line1, a, c)
a, c = dist_angle_to_slope_interept(line1)
b, d = dist_angle_to_slope_interept(line2)
if a == b: # lines are parallel or coincident
return None
x = (d - c) / (a - b)
y = (a*d - b*c) / (a - b)
return x, y
def line_invrot90(line, im_shape):
"""
Rotate a line in dist-angle representation in the same sense as np.rot90
"""
theta1, d1 = line
theta2 = np.pi/2 - theta1
if abs(theta2) == 0 :
return 0, im_shape[0] - d1
x1, y1 = d1 * np.cos(theta1), d1 * np.sin(theta1)
h1 = d1 / np.cos(theta2)
h2 = im_shape[0] - h1
d2 = h2 * np.cos(theta2)
return -theta2, d2
def edge_response(im, sigma, thetas=np.linspace(-np.pi/10, np.pi/10, 5)):
"""Compute the edge response max-pooled over a range of orientations"""
kernels = []
for theta in thetas:
kern = gabor_kernel(.1/sigma, theta=theta, sigma_x=sigma, sigma_y=2*sigma, n_stds=2).imag
kern = np.rot90(kern, 3)
kernels.append(np.fliplr(np.flipud(kern)))
# kernel responses, max pooled over orientations
resp_im = np.zeros_like(im)
for kern in kernels:
resp = fftconvolve(im, kern, mode='same')
resp_im = np.maximum(resp, resp_im)
return resp_im
def brightest_object_mask(gray):
"""
Threshold a grayscale response image and return a mask of the brightest object
"""
edges = gray > threshold_otsu(gray)
# create a mask containing the object with the strongest response
label_im = measure.label(edges)
regions = measure.regionprops(label_im)
if len(regions) == 0:
raise ValueError('mask must have at least one object')
max_resp = 0
for region in regions:
lbl = region.label
mask = label_im == lbl
region_resp = gray[mask].sum()
if region_resp > max_resp:
max_resp = region_resp
max_region = region
largest_object_mask = label_im == max_region.label
return largest_object_mask
def best_horizontal_line(im, theta_range=np.pi/10, n_theta=5):
"""Find the dominant horizontal (dark-above-bright) line in an image"""
# Compute horizontal edges, get biggest outline
resp_im = edge_response(im, sigma=5, thetas=np.linspace(-theta_range, theta_range, n_theta))
outline_mask = brightest_object_mask(resp_im)
# FIXME: if outline map overlaps the top of im, this means that the receipt
# is not centered correctly (receipt edge outside frame)
# Hough transform
h, theta, d = hough_line(skeletonize(outline_mask), theta=np.linspace(-np.pi/2, np.pi/2, 180))
_, angles, dists = hough_line_peaks(h, theta, d, threshold=0.1 * h.max(), num_peaks=10)
# Compute gradient strength along each Hough line
line_strength_dict = {}
for angle, dist in zip(angles, dists):
y0 = (dist - 0 * np.cos(angle)) / np.sin(angle)
y1 = (dist - outline_mask.shape[1] * np.cos(angle)) / np.sin(angle)
y0 = min([y0, outline_mask.shape[0]])
y1 = min([y1, outline_mask.shape[0]])
pt1 = (y0, 0)
pt2 = (y1, outline_mask.shape[1])
line_profile = measure.profile_line(resp_im, pt1, pt2)
line_strength_dict[(angle, dist)] = line_profile.mean()
horizontal_lines = [line for line in line_strength_dict if np.pi/2 - abs(line[0]) < theta_range]
try:
best_line = max(horizontal_lines, key=lambda x: line_strength_dict[x])
except ValueError:
return None
return best_line
def get_receipt_edges(gray):
"""The (straight-line) edges of a centered 4-sided white object"""
# TODO: disk/selem size should be fraction of input size
median = rank.median(gray, disk(11))
# Divide into halves
top_im = median[:int(median.shape[0]/2), :]
bottom_im = median[int(median.shape[0]/2):, :]
left_im = median[:, :int(median.shape[1]/2)]
right_im = median[:, int(median.shape[1]/2):]
# Rotate so center is down, detect best horizontal line
top_line = best_horizontal_line(top_im)
left_line = best_horizontal_line(np.rot90(left_im,3))
right_line = best_horizontal_line(np.rot90(right_im))
bottom_line = best_horizontal_line(np.rot90(bottom_im,2))
# Rotate back to original orientation
right_line = line_invrot90(right_line, np.rot90(right_im).shape)
bottom_line = line_invrot90(line_invrot90(bottom_line, np.rot90(bottom_im,2).shape), np.rot90(bottom_im).shape)
left_line = line_invrot90(line_invrot90(line_invrot90(left_line, np.rot90(left_im).shape), np.rot90(left_im,2).shape), np.rot90(left_im,3).shape)
# Correct for offset/cropping
right_line = right_line[0], right_line[1] + np.cos(right_line[0])*gray.shape[1]/2
bottom_line = bottom_line[0], + bottom_line[1] + np.sin(bottom_line[0]) * gray.shape[0]/2
return top_line, right_line, bottom_line, left_line
def preprocess_image(im):
im = resize_im(im, MAX_SIDE_LENGTH)
gray = rgb2gray(im)
# Get the edges of the receipt
top_line, right_line, bottom_line, left_line = get_receipt_edges(gray)
# Intersect to get corners
TR = line_intersection(top_line, right_line)
TL = line_intersection(top_line, left_line)
BR = line_intersection(bottom_line, right_line)
BL = line_intersection(bottom_line, left_line)
# Warp so receipt corners are image corners
transform = ProjectiveTransform()
height = max([BL[1] - TL[1], BR[1] - TR[1]])
width = max([TR[0] - TL[0], BR[1] - BL[1]])
src_pts = np.array([TL, TR, BL, BR])
dest_pts = np.array([[0, 0],
[width, 0],
[0, height],
[width, height]
])
success = transform.estimate(src_pts, dest_pts)
warped_im = warp(gray, transform.inverse)[:int(height), :int(width)]
warped_gray = rgb2gray(warped_im)
enhanced_gray = img_as_ubyte(adjust_log(warped_gray))
return enhanced_gray
if __name__=='__main__':
in_fn = '../data/receipt.jpg'
im = plt.imread(in_fn)
print(im.shape)
enhanced_gray = preprocess_image(im)
fname, ext = os.path.splitext(in_fn)
out_fn = fname + '_preprocessed' + ext
print(out_fn)
plt.imsave(out_fn, enhanced_gray)
|
{"/app.py": ["/preprocess.py", "/read_receipt_image.py"]}
|
8,950
|
spidezad/filesdownloader
|
refs/heads/master
|
/filesdownloader/dl_files_to_dir.py
|
"""
filesdownloader
Function to download files from web to target directory.
Enable async download of multiple files.
Required: requests, grequests
"""
import os, sys, re
import string
import random
import requests, grequests
from functools import partial
USER_AGENT = 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.181 Safari/537.36'
headers = { 'User-Agent': USER_AGENT }
def dl_files_to_dir(urllist, tgt_folder, job_size = 100):
"""
Download all files from list of url link to tgt dir
Args:
urllist: list of the url retrieved from the google image search
tgt_folder: dir at which the files to be stored
Kwargs:
job_size: (int) number of downloads to spawn.
"""
if len(urllist) == 0:
print "No links in urllist"
return
def dl_file(r, folder_dir, filename, *args, **kwargs):
fname = os.path.join(folder_dir, filename)
with open(fname, 'wb') as my_file:
# Read by 4KB chunks
for byte_chunk in r.iter_content(chunk_size=1024*10):#4096?
if byte_chunk:
my_file.write(byte_chunk)
my_file.flush()
os.fsync(my_file)
r.close()
do_stuff = []
for run_num, tgt_url in enumerate(urllist):
print tgt_url
# handle the tgt url to be use as basename
basename = os.path.basename(tgt_url)
file_name = re.sub('[^A-Za-z0-9.]+', '_', basename ) #prevent special characters in filename
#handling grequest
action_item = grequests.get(tgt_url, hooks={'response': partial(dl_file, folder_dir = tgt_folder, filename=file_name)}, headers= headers, stream=True)
do_stuff.append(action_item)
grequests.map(do_stuff, size=job_size)
print "All downloads completed"
if __name__ == "__main__":
urllist = []
tgt_folder = r'C:\data\temp\strip_site\webdev4'
dl_files_to_dir(urllist, tgt_folder, job_size = 100)
|
{"/filesdownloader/__init__.py": ["/filesdownloader/dl_files_to_dir.py"]}
|
8,951
|
spidezad/filesdownloader
|
refs/heads/master
|
/filesdownloader/__init__.py
|
from .dl_files_to_dir import dl_files_to_dir
|
{"/filesdownloader/__init__.py": ["/filesdownloader/dl_files_to_dir.py"]}
|
8,956
|
dorajam/few-shot-link-prediction
|
refs/heads/main
|
/models/rgcn_model.py
|
"""
File based off of dgl tutorial on RGCN
Source: https://github.com/dmlc/dgl/tree/master/examples/pytorch/rgcn
"""
import abc
import torch
import torch.nn as nn
import torch.nn.functional as F
import dgl
class RGCN(nn.Module):
def __init__(self, parameter, is_module=True):
super(RGCN, self).__init__()
self.inp_dim = parameter['embed_dim']
self.emb_dim = parameter['rgcn_embed_dim']
self.attn_rel_emb_dim = parameter['rgcn_embed_dim']
self.num_rels = parameter['num_relations']
self.num_bases = parameter['num_bases']
self.num_hidden_layers = parameter['num_gcn_layers']
self.dropout = parameter['dropout']
self.edge_dropout = parameter['edge_dropout']
self.neighborhood_sample_rate = parameter['neighborhood_sample_rate']
self.has_attn = parameter['has_attn']
self.is_module = is_module
self.device = parameter['device']
if self.has_attn:
self.attn_rel_emb = nn.Embedding(self.num_rels, self.attn_rel_emb_dim, sparse=False)
else:
self.attn_rel_emb = None
# initialize aggregators for input and hidden layers
if parameter['gnn_agg_type'] == "sum":
self.aggregator = SumAggregator(self.emb_dim)
elif parameter['gnn_agg_type'] == "mlp":
self.aggregator = MLPAggregator(self.emb_dim)
elif parameter['gnn_agg_type'] == "gru":
self.aggregator = GRUAggregator(self.emb_dim)
# create rgcn layers
self.build_model()
def build_model(self):
self.layers = nn.ModuleList()
# i2h
i2h = self.build_input_layer()
if i2h is not None:
self.layers.append(i2h)
# h2h
for idx in range(self.num_hidden_layers - 1):
h2h = self.build_hidden_layer(idx)
self.layers.append(h2h)
def build_input_layer(self):
return RGCNBasisLayer(self.inp_dim,
self.emb_dim,
self.aggregator,
self.attn_rel_emb_dim,
self.num_rels,
self.num_bases,
activation=F.relu,
dropout=self.dropout,
edge_dropout=self.edge_dropout,
is_input_layer=True,
has_attn=self.has_attn,
attn_rel_emb=self.attn_rel_emb)
def build_hidden_layer(self, idx):
return RGCNBasisLayer(self.emb_dim,
self.emb_dim,
self.aggregator,
self.attn_rel_emb_dim,
self.num_rels,
self.num_bases,
activation=F.relu,
dropout=self.dropout,
edge_dropout=self.edge_dropout,
has_attn=self.has_attn,
attn_rel_emb=self.attn_rel_emb)
def forward(self, batch_nodes, g):
g.readonly(True)
for j, nf in enumerate(dgl.contrib.sampling.NeighborSampler(g, batch_size=len(batch_nodes),
expand_factor=self.neighborhood_sample_rate,
num_hops=self.num_hidden_layers,
seed_nodes=batch_nodes)):
nf.copy_from_parent()
for i, layer in enumerate(self.layers):
nf.prop_flow(message_funcs=layer.msg_func,
reduce_funcs=layer.aggregator,
apply_node_funcs=layer.node_update,
flow_range=slice(i, self.num_hidden_layers, 1))
if not self.is_module:
nf.layers[-1].data['repr'] = nf.layers[-1].data['h'].detach()
nf.copy_to_parent(node_embed_names=[[], [], ['repr']], edge_embed_names=None)
return nf.layers[-1].data.pop('h'), nf.layer_parent_nid(-1)
class RGCNBasisLayer(nn.Module):
def __init__(self, inp_dim, out_dim, aggregator, attn_rel_emb_dim, num_rels, num_bases=-1, bias=None,
activation=None, dropout=0.0, edge_dropout=0.0, is_input_layer=False, has_attn=False, attn_rel_emb=None):
super(RGCNBasisLayer, self).__init__()
self.bias = bias
self.activation = activation
if self.bias:
self.bias = nn.Parameter(torch.Tensor(out_dim))
nn.init.xavier_uniform_(self.bias,
gain=nn.init.calculate_gain('relu'))
self.aggregator = aggregator
if dropout:
self.dropout = nn.Dropout(dropout)
else:
self.dropout = None
if edge_dropout:
self.edge_dropout = nn.Dropout(edge_dropout)
else:
self.edge_dropout = nn.Identity()
self.inp_dim = inp_dim
self.out_dim = out_dim
self.attn_rel_emb_dim = attn_rel_emb_dim
self.num_rels = num_rels
self.num_bases = num_bases
self.is_input_layer = is_input_layer
self.has_attn = has_attn
# add basis weights
# self.weight = basis_weights
self.weight = nn.Parameter(torch.Tensor(self.num_bases, self.inp_dim, self.out_dim))
self.w_comp = nn.Parameter(torch.Tensor(self.num_rels, self.num_bases))
self.self_loop_weight = nn.Parameter(torch.Tensor(self.inp_dim, self.out_dim))
if self.has_attn:
self.attn_rel_emb = attn_rel_emb
self.A = nn.Linear(2 * self.inp_dim + self.attn_rel_emb_dim, inp_dim)
self.B = nn.Linear(inp_dim, 1)
nn.init.xavier_uniform_(self.self_loop_weight, gain=nn.init.calculate_gain('relu'))
nn.init.xavier_uniform_(self.weight, gain=nn.init.calculate_gain('relu'))
nn.init.xavier_uniform_(self.w_comp, gain=nn.init.calculate_gain('relu'))
def msg_func(self, edges):
weight = self.weight.view(self.num_bases, self.inp_dim * self.out_dim)
weight = torch.matmul(self.w_comp, weight).view(self.num_rels, self.inp_dim, self.out_dim)
w = weight.index_select(0, edges.data['type'])
input_ = 'feat' if self.is_input_layer else 'h'
msg = self.edge_dropout(torch.ones(len(edges), 1).to(self.weight.device)) * torch.bmm(edges.src[input_].unsqueeze(1), w).squeeze(1)
curr_emb = torch.mm(edges.dst[input_], self.self_loop_weight) # (B, F)
if self.has_attn:
e = torch.cat([edges.src[input_], edges.dst[input_], self.attn_rel_emb(edges.data['type'])], dim=1)
a = torch.sigmoid(self.B(F.relu(self.A(e))))
else:
a = torch.ones((len(edges), 1)).to(device=w.device)
return {'curr_emb': curr_emb, 'msg': msg, 'alpha': a}
def node_update(self, node):
# apply bias and activation
node_repr = node.data['h']
if self.bias:
node_repr = node_repr + self.bias
if self.activation:
node_repr = self.activation(node_repr)
# if self.dropout:
# node_repr = self.dropout(node_repr)
node.data['h'] = node_repr
# def forward(self, g, attn_rel_emb=None):
# self.propagate(g, attn_rel_emb)
# # apply bias and activation
# node_repr = g.ndata['h']
# if self.bias:
# node_repr = node_repr + self.bias
# if self.activation:
# node_repr = self.activation(node_repr)
# # if self.dropout:
# # node_repr = self.dropout(node_repr)
# g.ndata['h'] = node_repr
# if self.is_input_layer:
# g.ndata['repr'] = g.ndata['h'].unsqueeze(1)
# else:
# g.ndata['repr'] = torch.cat([g.ndata['repr'], g.ndata['h'].unsqueeze(1)], dim=1)
class Aggregator(nn.Module):
def __init__(self, emb_dim):
super(Aggregator, self).__init__()
def forward(self, node):
curr_emb = node.mailbox['curr_emb'][:, 0, :] # (B, F)
nei_msg = torch.bmm(node.mailbox['alpha'].transpose(1, 2), node.mailbox['msg']).squeeze(1) # (B, F)
# nei_msg, _ = torch.max(node.mailbox['msg'], 1) # (B, F)
new_emb = self.update_embedding(curr_emb, nei_msg)
return {'h': new_emb}
@abc.abstractmethod
def update_embedding(curr_emb, nei_msg):
raise NotImplementedError
class SumAggregator(Aggregator):
def __init__(self, emb_dim):
super(SumAggregator, self).__init__(emb_dim)
def update_embedding(self, curr_emb, nei_msg):
new_emb = nei_msg + curr_emb
return new_emb
class MLPAggregator(Aggregator):
def __init__(self, emb_dim):
super(MLPAggregator, self).__init__(emb_dim)
self.linear = nn.Linear(2 * emb_dim, emb_dim)
def update_embedding(self, curr_emb, nei_msg):
inp = torch.cat((nei_msg, curr_emb), 1)
new_emb = F.relu(self.linear(inp))
return new_emb
class GRUAggregator(Aggregator):
def __init__(self, emb_dim):
super(GRUAggregator, self).__init__(emb_dim)
self.gru = nn.GRUCell(emb_dim, emb_dim)
def update_embedding(self, curr_emb, nei_msg):
new_emb = self.gru(nei_msg, curr_emb)
return new_emb
|
{"/models/relation_meta_learner.py": ["/models/modules.py"], "/models/meta_learner.py": ["/models/relation_meta_learner.py", "/models/rgcn_model.py"], "/trainer.py": ["/utils.py"], "/models/modules.py": ["/models/rgcn_model.py"], "/main.py": ["/data_loader.py", "/trainer.py", "/models/meta_learner.py", "/utils.py"]}
|
8,957
|
dorajam/few-shot-link-prediction
|
refs/heads/main
|
/data_loader.py
|
import copy
import json
import random
import dgl
import numpy as np
import pandas as pd
import torch
from networkx.algorithms.components import connected_components
import community as community_louvain
def read_dataset(dataset, data_mode, add_inverse_edges):
data_path = "./data/" + dataset
data_dir = {
'train_tasks_in_train': '/train_tasks_in_train.json',
'train_tasks': '/train_tasks.json',
'test_tasks': '/test_tasks.json',
'dev_tasks': '/dev_tasks.json',
'rel2candidates_in_train': '/rel2candidates_in_train.json',
'rel2candidates': '/rel2candidates.json',
'e1rel_e2_in_train': '/e1rel_e2_in_train.json',
'e1rel_e2': '/e1rel_e2.json',
'ent2ids': '/ent2ids',
'ent2vec': '/ent2vec.npy',
}
for k, v in data_dir.items():
data_dir[k] = data_path + v
tail = ''
if data_mode == 'In-Train':
tail = '_in_train'
dataset = dict()
print("loading train_tasks{} ... ...".format(tail))
dataset['train_tasks'] = json.load(open(data_dir['train_tasks' + tail]))
print("loading test_tasks ... ...")
dataset['test_tasks'] = json.load(open(data_dir['test_tasks']))
print("loading dev_tasks ... ...")
dataset['dev_tasks'] = json.load(open(data_dir['dev_tasks']))
print("loading rel2candidates{} ... ...".format(tail))
dataset['rel2candidates'] = json.load(open(data_dir['rel2candidates' + tail]))
print("loading e1rel_e2{} ... ...".format(tail))
dataset['e1rel_e2'] = json.load(open(data_dir['e1rel_e2' + tail]))
# load background graph
print("preparing background graph {} ... ...".format(tail))
whole_graph = json.load(open(data_dir['train_tasks_in_train']))
if data_mode == 'Pre-Train':
print('loading embedding ... ...')
dataset['ent2emb'] = np.load(data_dir['ent2vec'])
# additional params
all_tasks = copy.deepcopy(whole_graph)
all_tasks.update(dataset['dev_tasks'])
all_tasks.update(dataset['test_tasks'])
rel2id = get_relation_ids(all_tasks, data_path)
dataset['ent2id'] = json.load(open(data_dir['ent2ids']))
dataset['rel2id'] = rel2id
dataset['background'] = build_background_graph(whole_graph, dataset['ent2id'], dataset['rel2id'], add_transpose=add_inverse_edges)
return dataset
def get_relation_ids(all_tasks, data_path):
all_relations = all_tasks.keys()
rel2idx = {rel: idx for idx, rel in enumerate(all_relations)}
idx2rel = {idx: rel for idx, rel in enumerate(all_relations)}
json.dump(rel2idx, open(data_path + '/rel2idx.json', 'w'))
json.dump(idx2rel, open(data_path + '/idx2rel.json', 'w'))
return rel2idx
def build_background_graph(whole_graph, ent2id, rel2id, add_transpose=False):
background_graph = dgl.DGLGraph(multigraph=True)
background_graph.add_nodes(len(ent2id.keys()))
rels = set(whole_graph.keys())
for rel in rels:
background_triples = whole_graph[rel]
background_triple_ids = np.array([[ent2id[h], rel2id[r], ent2id[t]] for (h, r, t) in background_triples])
background_graph.add_edges(background_triple_ids[:, 0], background_triple_ids[:, 2], {'type': torch.tensor(background_triple_ids[:, 1])})
if add_transpose:
background_graph.add_edges(background_triple_ids[:, 2], background_triple_ids[:, 0], {'type': torch.tensor(len(rels) + background_triple_ids[:, 1])})
print('Background graph created for {} relations.'.format(max(background_graph.edata['type']) + 1))
return background_graph
class SyntheticDataLoader(object):
def __init__(self, dataset, parameter, step='train'):
self.ent2id = dataset['ent2id']
self.id2ent = {str(idx): ent for ent, idx in self.ent2id.items()}
self.communities, self.entities = self.get_communities(dataset)
self.num_entities = len(self.entities)
self.curr_rel_idx = 0
original_test_tasks = dataset[step + '_tasks']
print('Generating synthetic relations ...')
# symmetric
symmetric_rels = ['symmetric_rel_' + str(idx) for idx in range(len(original_test_tasks))]
symmetric_tasks = self.generate_symmetric_triples(symmetric_rels, original_test_tasks)
# transitive
transitive_rels = ['transitive_rel_' + str(idx) for idx in range(len(original_test_tasks))]
transitive_tasks = self.generate_transitive_triples(transitive_rels, original_test_tasks)
# positional
positional_rels = ['positional_rel_' + str(idx) for idx in range(len(original_test_tasks))]
positional_tasks = self.generate_positional_triples(positional_rels, original_test_tasks, dataset)
# merge tasks
self.tasks = symmetric_tasks
self.tasks.update(transitive_tasks)
self.tasks.update(positional_tasks)
self.all_rels = self.tasks.keys()
self.num_rels = len(self.tasks)
print('Finished generating {} synthetic relations'.format(self.num_rels))
self.rel2id = {rel: -idx for idx, rel in enumerate(self.all_rels)} ## TODO: confirm these negative ids are not problematic
self.id2rel= {str(idx): rel for rel, idx in self.rel2id.items()} ## TODO: confirm these negative ids are not problematic
# build dict for synthetic "observed" triples
all_triples = [tri for task in self.tasks.values() for tri in task]
self.e1rel_e2 = {e1+rel: e2 for [e1,rel,e2] in all_triples}
# new uniformly sampled candidate tails
original_rel2candidates = dataset['rel2candidates']
original_test_rel2candidates = [original_rel2candidates[rel] for rel in original_test_tasks.keys()]
self.rel2candidates = self.generate_synthetic_candidates(self.all_rels, original_test_rel2candidates)
# parameters
self.few = parameter['few']
self.bs = parameter['batch_size']
self.nq = parameter['num_query']
self.seed = parameter['seed']
# for each rel: use test triples[k_shot:] as query, and test triples[:k_shot] as support
if step != 'train':
self.eval_triples = []
for rel in self.all_rels:
self.eval_triples.extend(self.tasks[rel][self.few:])
self.num_tris = len(self.eval_triples)
self.curr_tri_idx = 0
def get_communities(self, dataset):
train_tasks = dataset['train_tasks']
rel2id = dataset['rel2id'] # for non-synthetic rels
nell_graph = build_background_graph(train_tasks, self.ent2id, rel2id)
print('Converting to a networkx graph ....')
nx_nell_g = nell_graph.to_networkx().to_undirected()
print('Generating communities ....')
components = list(connected_components(nx_nell_g))
largest_component = sorted(components, key=lambda r: -len(r))[0]
# Operating on subgraph of largest component
new_graph = nx_nell_g.subgraph(largest_component)
new_nodes = new_graph.nodes
print('Taking largest component of size: ',len(new_nodes))
communities = community_louvain.best_partition(new_graph, random_state=42)
community_df = [(node, community_id) for node, community_id in communities.items()]
community_df = pd.DataFrame(community_df, columns=['node', 'community_id'])
print('Generating {} number of communities...'.format(community_df['community_id'].max() + 1))
community_sizes = community_df.groupby('community_id').agg(count=('node', 'count'))
print('Mean community size: ', community_sizes['count'].mean())
print('Median community size: ', community_sizes['count'].median())
print('Min community size: ', community_sizes['count'].min())
print('Max community size: ', community_sizes['count'].max())
node_clusters = community_df.groupby('community_id').apply(lambda c: list(c['node'])).values.tolist()
assert len(new_nodes) == len([e for ls in node_clusters for e in ls]), 'Clusters should only contain nodes in the largest component.'
return node_clusters, np.array(new_nodes)
def generate_symmetric_triples(self, relations, original_test_tasks):
# only sample half of the required entities, as we add its symmetric counterpart
num_triples_to_sample = [r for r in map(lambda r: len(r) // 2 , original_test_tasks.values())]
tasks = {rel: [] for rel in relations}
for idx, rel in enumerate(relations):
num_triples = num_triples_to_sample[idx]
for _ in range(num_triples):
# sample H,T for synthetic triples
while True:
ids = np.random.randint(0, self.num_entities, size=2) # not without replacement
if len(set(ids)) == 2:
break
entities = self.entities[ids]
# entities = np.random.choice(self.entities, size=2, replace=False) # too slow
# adds symmetric triples
tasks[rel].append([self.id2ent[str(entities[0])], rel, self.id2ent[str(entities[1])]])
tasks[rel].append([self.id2ent[str(entities[1])], rel, self.id2ent[str(entities[0])]])
return tasks
def generate_positional_triples(self, relations, original_test_tasks, dataset):
num_triples_to_sample = [r for r in map(lambda r: len(r) , original_test_tasks.values())]
tasks = {rel: [] for rel in relations}
for idx, rel in enumerate(relations):
num_triples = num_triples_to_sample[idx]
# sample a community
sampled_communities = np.random.choice(self.communities, size=num_triples, replace=True)
# sample h,t
for c_i in sampled_communities:
while True:
ids = np.random.randint(0, len(c_i), size=2) # not without replacement
if len(set(ids)) == 2:
break
entities = np.array(c_i)[ids]
# entities = np.random.choice(c_i, size=2, replace=False)
# add triple
tasks[rel].append([self.id2ent[str(entities[0])], rel, self.id2ent[str(entities[1])]])
return tasks
def generate_transitive_triples(self, relations, original_test_tasks):
# only sample 1/3 of the required entities, as we add its transitive counterparts
num_triples_to_sample = [r for r in map(lambda r: len(r) // 3 , original_test_tasks.values())]
tasks = {rel: [] for rel in relations}
for idx, rel in enumerate(relations):
num_triples = num_triples_to_sample[idx]
for _ in range(num_triples):
# sample h,t
while True:
ids = np.random.randint(0, self.num_entities, size=3) # not without replacement
if len(set(ids)) == 3:
break
entities = self.entities[ids]
# entities = np.random.choice(self.entities, size=3, replace=False)
# adds transitive triples
if len(entities) == 3:
tasks[rel].append([self.id2ent[str(entities[0])], rel, self.id2ent[str(entities[1])]])
tasks[rel].append([self.id2ent[str(entities[1])], rel, self.id2ent[str(entities[2])]])
tasks[rel].append([self.id2ent[str(entities[0])], rel, self.id2ent[str(entities[2])]])
return tasks
def generate_synthetic_candidates(self, relations, original_rel2candidates):
# tile samples to generate for each synthetic type
num_candidates_to_sample = np.tile([len(cands) for cands in original_rel2candidates], 3)
rel2candidates = {}
for idx, rel in enumerate(relations):
num_cands = num_candidates_to_sample[idx]
curr = num_cands
final_ids = np.array([])
while True:
ids = np.random.randint(0, self.num_entities, size=curr) # not without replacement
curr = curr - len(set(ids))
final_ids = np.append(final_ids, ids)
if curr == 0:
break
sampled_entities = self.entities[final_ids.astype(np.int)]
# sampled_entities = np.random.choice(self.entities, size=num_cands, replace=False)
rel2candidates[rel] = [self.id2ent[str(ent)] for ent in sampled_entities]
return rel2candidates
def get_id(self, triplets):
return [[self.ent2id[h], self.rel2id[r], self.ent2id[t]] for (h, r, t) in triplets]
def next_one_on_eval(self):
if self.curr_tri_idx == self.num_tris:
print('Finished evaluating {} queries.'.format(self.num_tris))
return "EOT", "EOT"
# get current triple
query_triple = self.eval_triples[self.curr_tri_idx]
self.curr_tri_idx += 1
curr_rel = query_triple[1]
curr_cand = self.rel2candidates[curr_rel]
curr_task = self.tasks[curr_rel]
# get support triples
support_triples = curr_task[:self.few]
# construct support negative
support_negative_triples = []
shift = 0
for triple in support_triples:
e1, rel, e2 = triple
while True:
negative = curr_cand[shift]
if (negative not in self.e1rel_e2[e1 + rel]) \
and negative != e2:
break
else:
shift += 1
support_negative_triples.append([e1, rel, negative])
# construct negative triples
negative_triples = []
e1, rel, e2 = query_triple
for negative in curr_cand:
if (negative not in self.e1rel_e2[e1 + rel]) \
and negative != e2:
negative_triples.append([e1, rel, negative])
return [[self.get_id(support_triples)], [self.get_id(support_negative_triples)], [self.get_id([query_triple])], [self.get_id(negative_triples)]], self.rel2id[curr_rel]
class DataLoader(object):
def __init__(self, dataset, parameter, step='train'):
self.ent2id = dataset['ent2id']
self.rel2id = dataset['rel2id']
self.curr_rel_idx = 0
self.tasks = dataset[step + '_tasks']
self.rel2candidates = dataset['rel2candidates']
self.e1rel_e2 = dataset['e1rel_e2']
self.all_rels = sorted(list(self.tasks.keys()))
self.num_rels = len(self.all_rels)
self.few = parameter['few']
self.bs = parameter['batch_size']
self.nq = parameter['num_query']
self.seed = parameter['seed']
if step != 'train':
self.eval_triples = []
for rel in self.all_rels:
self.eval_triples.extend(self.tasks[rel][self.few:])
self.num_tris = len(self.eval_triples)
self.curr_tri_idx = 0
def get_id(self, triplets):
return [[self.ent2id[h], self.rel2id[r], self.ent2id[t]] for (h, r, t) in triplets]
def next_one(self):
# shift curr_rel_idx to 0 after one circle of all relations
if self.curr_rel_idx % self.num_rels == 0:
random.shuffle(self.all_rels)
self.curr_rel_idx = 0
# get current relation and current candidates
curr_rel = self.all_rels[self.curr_rel_idx]
self.curr_rel_idx = (self.curr_rel_idx + 1) % self.num_rels # shift current relation idx to next
curr_cand = self.rel2candidates[curr_rel]
while len(curr_cand) <= 10 or len(self.tasks[curr_rel]) <= 10: # ignore the small task sets
curr_rel = self.all_rels[self.curr_rel_idx]
self.curr_rel_idx = (self.curr_rel_idx + 1) % self.num_rels
curr_cand = self.rel2candidates[curr_rel]
# get current tasks by curr_rel from all tasks and shuffle it
curr_tasks = self.tasks[curr_rel]
curr_tasks_idx = np.arange(0, len(curr_tasks), 1)
curr_tasks_idx = np.random.choice(curr_tasks_idx, self.few + self.nq)
support_triples = [curr_tasks[i] for i in curr_tasks_idx[:self.few]]
query_triples = [curr_tasks[i] for i in curr_tasks_idx[self.few:]]
# construct support and query negative triples
support_negative_triples = []
for triple in support_triples:
e1, rel, e2 = triple
while True:
negative = random.choice(curr_cand)
if (negative not in self.e1rel_e2[e1 + rel]) \
and negative != e2:
break
support_negative_triples.append([e1, rel, negative])
negative_triples = []
for triple in query_triples:
e1, rel, e2 = triple
while True:
negative = random.choice(curr_cand)
if (negative not in self.e1rel_e2[e1 + rel]) \
and negative != e2:
break
negative_triples.append([e1, rel, negative])
return self.get_id(support_triples), self.get_id(support_negative_triples), self.get_id(query_triples), self.get_id(negative_triples), self.rel2id[curr_rel]
def next_batch(self):
next_batch_all = [self.next_one() for _ in range(self.bs)]
support, support_negative, query, negative, curr_rel = zip(*next_batch_all)
return [support, support_negative, query, negative], curr_rel
def next_one_on_eval(self):
if self.curr_tri_idx == self.num_tris:
return "EOT", "EOT"
# get current triple
query_triple = self.eval_triples[self.curr_tri_idx]
self.curr_tri_idx += 1
curr_rel = query_triple[1]
curr_cand = self.rel2candidates[curr_rel]
curr_task = self.tasks[curr_rel]
# get support triples
support_triples = curr_task[:self.few]
# construct support negative
support_negative_triples = []
shift = 0
for triple in support_triples:
e1, rel, e2 = triple
while True:
negative = curr_cand[shift]
if (negative not in self.e1rel_e2[e1 + rel]) \
and negative != e2:
break
else:
shift += 1
support_negative_triples.append([e1, rel, negative])
# construct negative triples
negative_triples = []
e1, rel, e2 = query_triple
for negative in curr_cand:
if (negative not in self.e1rel_e2[e1 + rel]) \
and negative != e2:
negative_triples.append([e1, rel, negative])
return [[self.get_id(support_triples)], [self.get_id(support_negative_triples)], [self.get_id([query_triple])], [self.get_id(negative_triples)]], self.rel2id[curr_rel]
def next_one_on_eval_by_relation(self, curr_rel):
if self.curr_tri_idx == len(self.tasks[curr_rel][self.few:]):
self.curr_tri_idx = 0
return "EOT", "EOT"
# get current triple
query_triple = self.tasks[curr_rel][self.few:][self.curr_tri_idx]
self.curr_tri_idx += 1
# curr_rel = query_triple[1]
curr_cand = self.rel2candidates[curr_rel]
curr_task = self.tasks[curr_rel]
# get support triples
support_triples = curr_task[:self.few]
# construct support negative
support_negative_triples = []
shift = 0
for triple in support_triples:
e1, rel, e2 = triple
while True:
negative = curr_cand[shift]
if (negative not in self.e1rel_e2[e1 + rel]) \
and negative != e2:
break
else:
shift += 1
support_negative_triples.append([e1, rel, negative])
# construct negative triples
negative_triples = []
e1, rel, e2 = query_triple
for negative in curr_cand:
if (negative not in self.e1rel_e2[e1 + rel]) \
and negative != e2:
negative_triples.append([e1, rel, negative])
return [[self.get_id(support_triples)], [self.get_id(support_negative_triples)], [self.get_id([query_triple])], [self.get_id(negative_triples)]], self.rel2id[curr_rel]
|
{"/models/relation_meta_learner.py": ["/models/modules.py"], "/models/meta_learner.py": ["/models/relation_meta_learner.py", "/models/rgcn_model.py"], "/trainer.py": ["/utils.py"], "/models/modules.py": ["/models/rgcn_model.py"], "/main.py": ["/data_loader.py", "/trainer.py", "/models/meta_learner.py", "/utils.py"]}
|
8,958
|
dorajam/few-shot-link-prediction
|
refs/heads/main
|
/utils.py
|
import os
import json
import logging
def get_lr(optimizer):
for param_group in optimizer.param_groups:
return param_group['lr']
def initialize_experiment(params):
params['ckpt_dir'] = os.path.join(params['state_dir'], params['experiment_name'], 'checkpoint')
if not os.path.isdir(params['ckpt_dir']):
print("creating new ckpt_dir",params['ckpt_dir'])
os.makedirs(params['ckpt_dir'])
pre = 'synthetic_' if params['is_synthetic'] else ''
params['log_dir'] = os.path.join(params['log_dir'], pre + params['experiment_name'])
if not os.path.isdir(params['log_dir']):
os.makedirs(params['log_dir'])
print("creating new log_dir",params['log_dir'])
params['state_dir'] = os.path.join(params['state_dir'], params['experiment_name'])
if not os.path.isdir(params['state_dir']):
print("creating new state_dir",params['state_dir'])
os.makedirs(params['state_dir'])
# logging
with open(os.path.join(params['log_dir'], "params.json"), 'w') as fout:
json.dump(params, fout)
file_handler = logging.FileHandler(os.path.join(params['log_dir'], 'res.log'))
logger = logging.getLogger()
logger.addHandler(file_handler)
logging.info('============ Initialized logger ============')
logging.info('\n'.join('%s: %s' % (k, str(v)) for k, v
in sorted(params.items())))
logging.info('============================================')
|
{"/models/relation_meta_learner.py": ["/models/modules.py"], "/models/meta_learner.py": ["/models/relation_meta_learner.py", "/models/rgcn_model.py"], "/trainer.py": ["/utils.py"], "/models/modules.py": ["/models/rgcn_model.py"], "/main.py": ["/data_loader.py", "/trainer.py", "/models/meta_learner.py", "/utils.py"]}
|
8,959
|
dorajam/few-shot-link-prediction
|
refs/heads/main
|
/models/relation_meta_learner.py
|
from collections import OrderedDict
import torch
from torch import nn
from .modules import MLPModule, RGCNModule
class SimplePrototype(nn.Module):
def __init__(self, parameter):
super(SimplePrototype, self).__init__()
self.embedding_dim = parameter['embed_dim']
self.device = parameter['device']
self.prototypes = nn.Embedding(1, self.embedding_dim)
self.prototype2out = nn.Linear(self.embedding_dim, self.embedding_dim)
def forward(self, support_set, support_emb, background_graph):
batch_size = len(support_set) # support batch size
r_meta = self.prototype2out(self.prototypes.weight.expand(batch_size, -1))
return r_meta.view(r_meta.shape[0], 1, 1, r_meta.shape[-1])
class ModularPrototypes(nn.Module):
def __init__(self, parameter):
super(ModularPrototypes, self).__init__()
self.num_prototypes = parameter['num_prototypes']
self.prototype_dim = parameter['prototype_dim']
self.embedding_dim = parameter['embed_dim']
self.num_entities = parameter['num_entities']
self.out_dim = parameter['embed_dim']
self.device = parameter['device']
if parameter['module_type'] == 'MLP':
module_class = MLPModule
elif parameter['module_type'] == 'RGCN':
module_class = RGCNModule
module_list = []
for idx in range(self.num_prototypes):
module = module_class(parameter=parameter)
module_list.append(module)
self.module_list = nn.ModuleList(module_list)
self.prototypes = nn.Embedding(self.num_prototypes, self.prototype_dim)
self.prototype2out = nn.Linear(self.prototype_dim, self.out_dim)
def forward(self, support_set, support_emb, background_graph):
batch_size = len(support_set) # support batch size
prototypes = self.prototypes(torch.tensor(range(self.num_prototypes), device=self.device))
attention_scores = torch.zeros(batch_size, self.num_prototypes, device=self.device)
# Calculate prototype attention scores
for idx in range(self.num_prototypes):
# there are a set of embeddings per each model - ignore them, use initial embeddings in entity lookup
attention_score = self.module_list[idx](support_set, support_emb, background_graph)
attention_scores[:, idx] = attention_score.squeeze(1)
# Take weighted sum over all prototypes, outputs r_meta.
attended_prototypes = torch.matmul(attention_scores, prototypes)
r_meta = self.prototype2out(attended_prototypes)
return r_meta.view(r_meta.shape[0], 1, 1, r_meta.shape[-1])
class MetaR(nn.Module):
def __init__(self, parameter):
super(MetaR, self).__init__()
self.embedding_dim = parameter['embed_dim']
self.out_dim = parameter['embed_dim']
self.device = parameter['device']
if parameter['dataset'] == 'Wiki-One':
num_hidden1 = 250
num_hidden2 = 100
elif parameter['dataset'] == 'NELL-One':
num_hidden1 = 500
num_hidden2 = 200
self.rel_fc1 = nn.Sequential(OrderedDict([
('fc', nn.Linear(2 * self.embedding_dim, num_hidden1)),
('bn', nn.BatchNorm1d(parameter['few'])),
('relu', nn.LeakyReLU()),
('drop', nn.Dropout(p=parameter['dropout'])),
]))
self.rel_fc2 = nn.Sequential(OrderedDict([
('fc', nn.Linear(num_hidden1, num_hidden2)),
('bn', nn.BatchNorm1d(parameter['few'])),
('relu', nn.LeakyReLU()),
('drop', nn.Dropout(p=parameter['dropout'])),
]))
self.rel_fc3 = nn.Sequential(OrderedDict([
('fc', nn.Linear(num_hidden2, self.out_dim)),
('bn', nn.BatchNorm1d(parameter['few'])),
]))
nn.init.xavier_normal_(self.rel_fc1.fc.weight)
nn.init.xavier_normal_(self.rel_fc2.fc.weight)
nn.init.xavier_normal_(self.rel_fc3.fc.weight)
def forward(self, support_set, support_emb, background_graph=None):
size = support_emb.shape
x = support_emb.contiguous().view(size[0], size[1], -1)
x = self.rel_fc1(x)
x = self.rel_fc2(x)
x = self.rel_fc3(x)
x = torch.mean(x, 1)
r_meta = x.view(size[0], self.out_dim) # dim batch_size, 1
return r_meta.view(r_meta.shape[0], 1, 1, r_meta.shape[-1])
|
{"/models/relation_meta_learner.py": ["/models/modules.py"], "/models/meta_learner.py": ["/models/relation_meta_learner.py", "/models/rgcn_model.py"], "/trainer.py": ["/utils.py"], "/models/modules.py": ["/models/rgcn_model.py"], "/main.py": ["/data_loader.py", "/trainer.py", "/models/meta_learner.py", "/utils.py"]}
|
8,960
|
dorajam/few-shot-link-prediction
|
refs/heads/main
|
/models/meta_learner.py
|
import functools
import numpy as np
import torch
from torch import nn
from .relation_meta_learner import MetaR, ModularPrototypes, SimplePrototype
from .rgcn_model import RGCN
class MetaLearner(nn.Module):
def __init__(self, parameter, background_graph=None):
super(MetaLearner, self).__init__()
self.device = parameter['device']
self.beta = parameter['beta']
self.margin = parameter['margin']
self.abla = parameter['ablation']
self.use_rgcn = parameter['use_rgcn']
self.step = parameter['step']
self.embeddings = nn.Parameter(torch.FloatTensor(parameter['num_entities'], parameter['embed_dim']))
nn.init.xavier_uniform_(self.embeddings.data)
self.final_embeddings = nn.Parameter(torch.FloatTensor(parameter['num_entities'], parameter['embed_dim']))
nn.init.xavier_uniform_(self.final_embeddings.data)
self.background_graph = background_graph
self.background_graph.edata['type'] = self.background_graph.edata['type'].to(self.device)
self.background_graph.ndata['feat'] = self.embeddings
self.background_graph.ndata['repr'] = self.embeddings.detach().to(self.device)
if self.use_rgcn:
self.rgcn = RGCN(parameter, is_module=False)
if parameter['rmeta_learner'] == 'MetaR':
self.relation_meta_learner = MetaR(parameter=parameter)
if parameter['rmeta_learner'] == 'Modular':
self.relation_meta_learner = ModularPrototypes(parameter=parameter)
if parameter['rmeta_learner'] == 'Simple':
self.relation_meta_learner = SimplePrototype(parameter=parameter)
self.embedding_learner = EmbeddingLearner()
self.loss_func = nn.MarginRankingLoss(self.margin)
self.rel_q_sharing = dict()
def fill_bg_with_data(self):
self.background_graph.ndata['feat'] = self.embeddings
self.background_graph.ndata['repr'] = self.final_embeddings.detach().to(self.device)
def split_concat(self, positive, negative):
pos_neg_e1 = torch.cat([positive[:, :, 0, :],
negative[:, :, 0, :]], 1).unsqueeze(2)
pos_neg_e2 = torch.cat([positive[:, :, 1, :],
negative[:, :, 1, :]], 1).unsqueeze(2)
return pos_neg_e1, pos_neg_e2
def get_embeddings(self, tasks):
indices = [torch.LongTensor([[[triplet[0], triplet[2]] for triplet in batch_of_triplets] for batch_of_triplets in t]).to(self.device) for t in tasks]
if self.use_rgcn:
if self.training:
batch_nodes = np.array(functools.reduce(lambda x, y: list(set(x + y)), [list(ind.flatten().cpu().numpy()) for ind in indices]))
embeddings, nid = self.rgcn(batch_nodes, self.background_graph)
m = dict()
for i, n in enumerate(nid):
m[n.item()] = i
indices = [torch.LongTensor([[[m[triplet[0]], m[triplet[2]]] for triplet in batch_of_triplets] for batch_of_triplets in t]).to(self.device) for t in tasks]
else:
# embeddings = self.background_graph.ndata['repr'].detach()
self.final_embeddings.data = self.background_graph.ndata['repr'].detach()
embeddings = self.final_embeddings
else:
embeddings = self.embeddings
support, support_negative, query, query_negative = [embeddings[ids] for ids in indices]
return support, support_negative, query, query_negative
def forward(self, tasks, curr_rel=''):
support, support_negative, query, negative = self.get_embeddings(tasks)
num_shots = support.shape[1] # num of few
num_support_negatives = support_negative.shape[1] # num of support negative
num_queries = query.shape[1] # num of query
num_query_negatives = negative.shape[1] # num of query negative
# because in test and dev step, same relation uses same support,
# so it's no need to repeat the step of relation-meta learning
if not self.training and curr_rel != '' and curr_rel in self.rel_q_sharing.keys():
rel_q = self.rel_q_sharing[curr_rel]
else:
r_meta = self.relation_meta_learner(tasks[0], support, background_graph=self.background_graph)
r_meta.retain_grad()
# relation for support
rel_s = r_meta.expand(-1, num_shots + num_support_negatives, -1, -1)
# split on e1/e2 and concat on pos/neg
sup_neg_e1, sup_neg_e2 = self.split_concat(support, support_negative)
p_score, n_score = self.embedding_learner(sup_neg_e1, sup_neg_e2, rel_s, num_shots)
y = torch.Tensor([1]).to(self.device)
self.zero_grad()
loss = self.loss_func(p_score, n_score, y)
loss.backward(retain_graph=self.training)
if not self.abla:
grad_meta = r_meta.grad
rel_q = r_meta - self.beta * grad_meta
else:
rel_q = r_meta
self.rel_q_sharing[curr_rel] = rel_q
rel_q = rel_q.expand(-1, num_queries + num_query_negatives, -1, -1)
que_neg_e1, que_neg_e2 = self.split_concat(query, negative) # [bs, nq+nn, 1, es]
p_score, n_score = self.embedding_learner(que_neg_e1, que_neg_e2, rel_q, num_queries)
return p_score, n_score
class EmbeddingLearner(nn.Module):
def __init__(self):
super(EmbeddingLearner, self).__init__()
def forward(self, h, t, r, pos_num):
score = -torch.norm(h + r - t, 2, -1).squeeze(2)
p_score = score[:, :pos_num]
n_score = score[:, pos_num:]
return p_score, n_score
|
{"/models/relation_meta_learner.py": ["/models/modules.py"], "/models/meta_learner.py": ["/models/relation_meta_learner.py", "/models/rgcn_model.py"], "/trainer.py": ["/utils.py"], "/models/modules.py": ["/models/rgcn_model.py"], "/main.py": ["/data_loader.py", "/trainer.py", "/models/meta_learner.py", "/utils.py"]}
|
8,961
|
dorajam/few-shot-link-prediction
|
refs/heads/main
|
/trainer.py
|
import os
import sys
import shutil
import logging
import time
import torch
import wandb
import numpy as np
from json import load
from collections import Counter
from tensorboardX import SummaryWriter
from utils import get_lr
class Trainer:
def __init__(self, model, data_loaders, parameter):
self.parameter = parameter
# dir
self.state_dir = parameter['state_dir']
self.ckpt_dir = parameter['state_dir']
self.log_dir = parameter['log_dir']
self.state_dict_file = ''
self.idx2rel = load(open('data/' + self.parameter['dataset'] + '/idx2rel.json', 'r'))
self.ent2ids = load(open('data/' + self.parameter['dataset'] + '/ent2ids', 'r'))
self.id2ent = {str(idx): ent for ent, idx in self.ent2ids.items()}
# data loader
self.train_data_loader = data_loaders[0]
self.dev_data_loader = data_loaders[1]
self.test_data_loader = data_loaders[2]
if parameter['is_synthetic']:
self.synthetic_test_data_loader = data_loaders[3]
# triples
triplets = list(self.train_data_loader.tasks.values())
all_entities = np.array([[self.ent2ids[t[0]], self.ent2ids[t[2]]] for rel in triplets for t in rel]).flatten()
self.c = Counter(all_entities)
# parameters
self.few = parameter['few']
self.num_query = parameter['num_query']
self.batch_size = parameter['batch_size']
self.learning_rate = parameter['learning_rate']
self.early_stopping_patience = parameter['early_stopping_patience']
# epoch
self.iterations = parameter['iterations']
self.print_iter = parameter['print_iter']
self.eval_iter = parameter['eval_iter']
self.checkpoint_iter = parameter['checkpoint_iter']
self.device = parameter['device']
# tensorboard log writer
if parameter['step'] == 'train':
self.writer = SummaryWriter(os.path.join(parameter['log_dir']))
# model
self.meta_learner = model
self.meta_learner.to(self.device)
# optimizer
self.optimizer = torch.optim.Adam(self.meta_learner.parameters(), self.learning_rate)
self.scheduler = torch.optim.lr_scheduler.StepLR(self.optimizer, step_size=self.parameter['iterations'] / parameter['lr_step'], gamma=parameter['lr_rate'])
# load state_dict and params
if parameter['step'] in ['test', 'dev']:
self.reload()
def reload(self):
if self.parameter['eval_ckpt'] is not None:
state_dict_file = os.path.join(self.ckpt_dir, 'state_dict_' + self.parameter['eval_ckpt'] + '.ckpt')
else:
state_dict_file = os.path.join(self.state_dir, 'state_dict')
self.state_dict_file = state_dict_file
logging.info('Reload state_dict from {}'.format(state_dict_file))
print('reload state_dict from {}'.format(state_dict_file))
state = torch.load(state_dict_file, map_location=self.device)
if os.path.isfile(state_dict_file):
self.meta_learner.load_state_dict(state)
self.meta_learner.fill_bg_with_data()
else:
raise RuntimeError('No state dict in {}!'.format(state_dict_file))
def save_checkpoint(self, iteration):
torch.save(self.meta_learner.state_dict(), os.path.join(self.ckpt_dir, 'state_dict_' + str(iteration) + '.ckpt'))
def del_checkpoint(self, iteration):
path = os.path.join(self.ckpt_dir, 'state_dict_' + str(iteration) + '.ckpt')
if os.path.exists(path):
os.remove(path)
else:
raise RuntimeError('No such checkpoint to delete: {}'.format(path))
def save_best_state_dict(self, best_epoch):
shutil.copy(os.path.join(self.ckpt_dir, 'state_dict_' + str(best_epoch) + '.ckpt'),
os.path.join(self.state_dir, 'state_dict'))
def write_training_log(self, data, iteration):
self.writer.add_scalar('Training_Loss', data['Loss'], iteration)
if self.parameter['dashboard']:
wandb.log({'train_loss': data['Loss']})
def write_validating_log(self, data, iteration):
self.writer.add_scalar('Validating_MRR', data['MRR'], iteration)
self.writer.add_scalar('Validating_Hits_10', data['Hits@10'], iteration)
self.writer.add_scalar('Validating_Hits_5', data['Hits@5'], iteration)
self.writer.add_scalar('Validating_Hits_1', data['Hits@1'], iteration)
if self.parameter['dashboard']:
wandb.log({
'val_mrr': data['MRR'],
'val_hits10': data['Hits@10'],
'val_hits5': data['Hits@5'],
'val_hits1': data['Hits@1']})
def logging_training_data(self, data, iteration):
logging.info("Iter: {}\tMRR: {:.3f}\tHits@10: {:.3f}\tHits@5: {:.3f}\tHits@1: {:.3f}\r".format(
iteration, data['MRR'], data['Hits@10'], data['Hits@5'], data['Hits@1']))
def logging_eval_data(self, data, state_path, isTest=False):
setname = 'dev set'
if isTest:
setname = 'test set'
logging.info("Eval {} on {}".format(state_path, setname))
logging.info("MRR: {:.3f}\tHits@10: {:.3f}\tHits@5: {:.3f}\tHits@1: {:.3f}\r".format(
data['MRR'], data['Hits@10'], data['Hits@5'], data['Hits@1']))
def rank_predict(self, data, x, ranks, tmp=None):
# query_idx is the idx of positive score
query_idx = x.shape[0] - 1
# sort all scores with descending, because more plausible triple has higher score
_, idx = torch.sort(x, descending=True)
rank = list(idx.cpu().numpy()).index(query_idx) + 1
ranks.append(rank)
h10, h5, h1 = 0., 0., 0.
# update data
if rank <= 10:
data['Hits@10'] += 1.
h10 = 1
if rank <= 5:
data['Hits@5'] += 1.
h5 = 1
if rank == 1:
data['Hits@1'] += 1.
h1 = 1
data['MRR'] += 1.0 / rank
mrr = 1. / rank
if tmp:
tmp['mrr'] = mrr
tmp['H10'] = h10
tmp['H5'] = h5
tmp['H1'] = h1
def do_one_step(self, task, curr_rel=''):
loss, p_score, n_score = 0, 0, 0
if self.meta_learner.training:
self.optimizer.zero_grad()
p_score, n_score = self.meta_learner(task, curr_rel)
y = torch.Tensor([1]).to(self.device)
loss = self.meta_learner.loss_func(p_score, n_score, y)
if self.parameter['rmeta_learner'] == 'Modular':
prototypes = [p for p in self.meta_learner.relation_meta_learner.prototypes.parameters()][0]
ortho_regularizer = torch.norm(
torch.matmul(prototypes, prototypes.transpose(0, 1)) - torch.eye(self.parameter['num_prototypes']).to(self.device)
)
loss += self.parameter['reg_weight'] * ortho_regularizer
loss.backward()
self.optimizer.step()
elif curr_rel != '':
p_score, n_score = self.meta_learner(task, curr_rel)
y = torch.Tensor([1]).to(self.device)
loss = self.meta_learner.loss_func(p_score, n_score, y)
return loss, p_score, n_score
def train(self):
# initialization
best_iter = 0
best_value = 0
bad_counts = 0
tic = time.time()
# training by iter
for e in range(self.iterations):
self.meta_learner.train()
# sample one batch from data_loader
train_task, curr_rel = self.train_data_loader.next_batch()
loss, _, _ = self.do_one_step(train_task, curr_rel=curr_rel)
# print the loss on specific iter
if e % self.print_iter == 0:
loss_num = loss.item()
self.write_training_log({'Loss': loss_num}, e)
logging.info("Iter: {}\tLoss: {:.4f}\tTime: {:.4f}\tlr: {:.4f}".format(e, loss_num, time.time() - tic, get_lr(self.optimizer)))
tic = time.time()
# save checkpoint on specific iter
if e % self.checkpoint_iter == 0 and e != 0:
logging.info('Iter {} has finished, saving...'.format(e))
self.save_checkpoint(e)
# do evaluation on specific iter
if e % self.eval_iter == 0 and e != 0:
logging.info('Iter {} has finished, validating...'.format(e))
valid_data = self.eval(isTest=False)
self.write_validating_log(valid_data, e)
metric = self.parameter['metric']
# early stopping checking
if valid_data[metric] > best_value:
best_value = valid_data[metric]
best_iter = e
logging.info('\tBest model | {0} of valid set is {1:.3f}'.format(metric, best_value))
bad_counts = 0
# save current best
self.save_checkpoint(best_iter)
self.save_best_state_dict(best_iter)
else:
logging.info('\tBest {0} of valid set is {1:.3f} at {2} | bad count is {3}'.format(
metric, best_value, best_iter, bad_counts))
bad_counts += 1
if bad_counts >= self.early_stopping_patience:
logging.info('\tEarly stopping at iteration %d' % e)
break
self.scheduler.step()
logging.info('Training has finished')
logging.info('\tBest iteration is {0} | {1} of valid set is {2:.3f}'.format(best_iter, metric, best_value))
self.save_best_state_dict(best_iter)
logging.info('Finish')
def _diagnostics(self, query_head, correct_tail, rel_id, support, candidates):
"""
For given eval task, take the query head and current relation
and return: the number of candidates for the relation and
the number of occurances of the head entity in the training data.
"""
num_of_candidates = len(candidates)
head_occurance = self.c[query_head]
tail_occurance = self.c[correct_tail]
tail_in_support = 0
head_in_support = 0
support_entities = np.array([[triple[0], triple[2]] for triple in support]).flatten()
support_occurances = 0
for e in support_entities:
support_occurances += self.c[e]
if e == query_head:
head_in_support += 1
if e == correct_tail:
tail_in_support += 1
return num_of_candidates, head_occurance, support_occurances, tail_occurance, head_in_support, tail_in_support, support_entities
def eval(self, isTest=False, save_all=False, synthetic=False):
self.meta_learner.eval()
# clear sharing rel_q
self.meta_learner.rel_q_sharing = dict()
if not synthetic:
if isTest:
data_loader = self.test_data_loader
else:
data_loader = self.dev_data_loader
else:
print('Using synthetic dataloader...')
data_loader = self.synthetic_test_data_loader
data_loader.curr_tri_idx = 0
# initial return data of validation
data = {'loss': 0, 'MRR': 0, 'Hits@1': 0, 'Hits@5': 0, 'Hits@10': 0}
tmp = {'mrr': 0, 'H10': 0, 'H5': 0, 'H1': 0}
ranks = []
t = 0
temp = dict()
tic = time.time()
diagnostics = []
while True:
# sample all the eval tasks
eval_task, curr_rel = data_loader.next_one_on_eval()
# at the end of sample tasks, a symbol 'EOT' will return
if eval_task == 'EOT':
break
t += 1
# pdb.set_trace()
l, p_score, n_score = self.do_one_step(eval_task, curr_rel=curr_rel)
x = torch.cat([n_score, p_score], 1).squeeze()
self.rank_predict(data, x, ranks, tmp)
if save_all:
if synthetic:
relation = data_loader.id2rel[str(curr_rel)]
else:
relation = self.idx2rel[str(curr_rel)]
candidates = data_loader.rel2candidates[relation]
query_head = eval_task[2][0][0][0]
correct_tail = eval_task[2][0][0][2]
support = eval_task[0][0]
candidate_size, query_head_seen, support_entities_seen, correct_tail_seen, head_in_support, tail_in_support, support_entities = self._diagnostics(
query_head, correct_tail, curr_rel, support, candidates)
diagnostics.append({
'MRR': tmp['mrr'],
'H10': tmp['H10'],
'H5': tmp['H5'],
'H1': tmp['H1'],
'query_head': self.id2ent[str(query_head)],
'correct_tail': self.id2ent[str(correct_tail)],
'query_head_seen': query_head_seen,
'correct_tail_seen': correct_tail_seen,
'head_seen_in_support': head_in_support,
'tail_seen_in_support': tail_in_support,
'support_entities': support_entities,
'support_entities_seen': support_entities_seen,
'candidate_size': candidate_size,
'rel': relation,
'synthetic_type': relation[:3] if self.parameter['is_synthetic'] else 'not_synthetic',
'data': self.parameter['dataset'],
'num_shots': self.parameter['few'],
})
data['loss'] += l.item()
# print current temp data dynamically
for k in data.keys():
temp[k] = data[k] / t
sys.stdout.write("{}\tVal. loss: {:.3f}\tMRR: {:.3f}\tHits@10: {:.3f}\tHits@5: {:.3f}\tHits@1: {:.3f}\r".format(
t, temp['loss'], temp['MRR'], temp['Hits@10'], temp['Hits@5'], temp['Hits@1']))
sys.stdout.flush()
flag = '_synthetic' if self.parameter['is_synthetic'] else ''
file = self.parameter['experiment_name'] + flag + '_diagnostics.json'
print('Saving raw diagnostics under: {}'.format(file))
torch.save(diagnostics, os.path.join('best_models', 'diagnostics', file))
toc = time.time()
# print overall evaluation result and return it
for k in data.keys():
data[k] = round(data[k] / t, 3)
logging.info("{} Val. loss: {:.3f} MRR: {:.3f} Hits@10: {:.3f} Hits@5: {:.3f} Hits@1: {:.3f} Time: {:.3f}\r".format(
t, data['loss'], data['MRR'], data['Hits@10'], data['Hits@5'], data['Hits@1'], toc - tic))
if isTest:
with open(os.path.join(self.parameter['log_dir'], flag + 'test_scores.txt'), "w") as f:
f.write('MRR | Hits@10 | Hits@5 | Hits@1 : {:.4f} | {:.4f} | {:.4f} | {:.3f}\n'.format(data['MRR'], data['Hits@10'], data['Hits@5'], data['Hits@1']))
return data
def eval_by_relation(self, isTest=False, iteration=None):
self.meta_learner.eval()
self.meta_learner.rel_q_sharing = dict()
if isTest:
data_loader = self.test_data_loader
else:
data_loader = self.dev_data_loader
data_loader.curr_tri_idx = 0
all_data = {'MRR': 0, 'Hits@1': 0, 'Hits@5': 0, 'Hits@10': 0}
all_t = 0
all_ranks = []
for rel in data_loader.all_rels:
print("rel: {}, num_cands: {}, num_tasks:{}".format(
rel, len(data_loader.rel2candidates[rel]), len(data_loader.tasks[rel][self.few:])))
data = {'MRR': 0, 'Hits@1': 0, 'Hits@5': 0, 'Hits@10': 0}
temp = dict()
t = 0
ranks = []
while True:
eval_task, curr_rel = data_loader.next_one_on_eval_by_relation(rel)
if eval_task == 'EOT':
break
t += 1
_, p_score, n_score = self.do_one_step(eval_task, curr_rel=rel)
x = torch.cat([n_score, p_score], 1).squeeze()
self.rank_predict(data, x, ranks)
for k in data.keys():
temp[k] = data[k] / t
sys.stdout.write("{}\tMRR: {:.3f}\tHits@10: {:.3f}\tHits@5: {:.3f}\tHits@1: {:.3f}\r".format(
t, temp['MRR'], temp['Hits@10'], temp['Hits@5'], temp['Hits@1']))
sys.stdout.flush()
print("{}\tMRR: {:.3f}\tHits@10: {:.3f}\tHits@5: {:.3f}\tHits@1: {:.3f}\r".format(
t, temp['MRR'], temp['Hits@10'], temp['Hits@5'], temp['Hits@1']))
for k in data.keys():
all_data[k] += data[k]
all_t += t
all_ranks.extend(ranks)
print('Overall')
for k in all_data.keys():
all_data[k] = round(all_data[k] / all_t, 3)
print("{}\tMRR: {:.3f}\tHits@10: {:.3f}\tHits@5: {:.3f}\tHits@1: {:.3f}\r".format(
all_t, all_data['MRR'], all_data['Hits@10'], all_data['Hits@5'], all_data['Hits@1']))
return all_data
|
{"/models/relation_meta_learner.py": ["/models/modules.py"], "/models/meta_learner.py": ["/models/relation_meta_learner.py", "/models/rgcn_model.py"], "/trainer.py": ["/utils.py"], "/models/modules.py": ["/models/rgcn_model.py"], "/main.py": ["/data_loader.py", "/trainer.py", "/models/meta_learner.py", "/utils.py"]}
|
8,962
|
dorajam/few-shot-link-prediction
|
refs/heads/main
|
/models/modules.py
|
from collections import OrderedDict
import numpy as np
import torch
from torch import nn
from .rgcn_model import RGCN
class MLPModule(nn.Module):
def __init__(self, parameter):
super(MLPModule, self).__init__()
self.embedding_dim = parameter['embed_dim']
self.out_dim = 1
self.rel_fc1 = nn.Sequential(OrderedDict([
('fc', nn.Linear(2 * self.embedding_dim, self.embedding_dim)),
('bn', nn.BatchNorm1d(parameter['few'])),
('relu', nn.LeakyReLU()),
('drop', nn.Dropout(p=parameter['dropout'])),
]))
self.rel_fc2 = nn.Sequential(OrderedDict([
('fc', nn.Linear(self.embedding_dim, self.embedding_dim)),
('bn', nn.BatchNorm1d(parameter['few'])),
('relu', nn.LeakyReLU()),
('drop', nn.Dropout(p=parameter['dropout'])),
]))
self.rel_fc3 = nn.Sequential(OrderedDict([
('fc', nn.Linear(self.embedding_dim, self.out_dim)),
('bn', nn.BatchNorm1d(parameter['few'])),
]))
nn.init.xavier_normal_(self.rel_fc1.fc.weight)
nn.init.xavier_normal_(self.rel_fc2.fc.weight)
nn.init.xavier_normal_(self.rel_fc3.fc.weight)
def forward(self, support_set, support_emb, background_graph=None):
size = support_emb.shape
x = support_emb.contiguous().view(size[0], size[1], -1)
x = self.rel_fc1(x)
x = self.rel_fc2(x)
x = self.rel_fc3(x)
x = torch.mean(x, 1)
r_meta = x.view(size[0], self.out_dim) # dim batch_size, 1
return r_meta
class RGCNModule(nn.Module):
def __init__(self, parameter):
super(RGCNModule, self).__init__()
self.rgcn_embed_dim = parameter['rgcn_embed_dim']
self.out_dim = 1
self.device = parameter['device']
self.rgcn = RGCN(parameter)
self.mlp = nn.Sequential(
nn.Linear(in_features=self.rgcn_embed_dim, out_features=self.rgcn_embed_dim),
nn.ReLU(),
nn.Linear(in_features=self.rgcn_embed_dim, out_features=self.out_dim),
)
def forward(self, support_set, support_emb, background_graph):
indices = torch.LongTensor([[[triplet[0], triplet[2]] for triplet in batch_of_triplets] for batch_of_triplets in support_set])
batch_nodes = np.array(list(set(indices.flatten().tolist())))
embeddings, node_ids = self.rgcn(batch_nodes, background_graph)
m = dict()
for i, n in enumerate(node_ids):
m[n.item()] = i
indices = torch.LongTensor([[[m[triplet[0]], m[triplet[2]]] for triplet in batch_of_triplets] for batch_of_triplets in support_set]).to(self.device)
embeddings_mean = torch.mean(embeddings[indices], dim=(1, 2)) # [bs, shots, 2, embed_size] -> [bs, embed_size]
out = self.mlp(embeddings_mean)
out = torch.sigmoid(out)
return out
# class RGCNModule(nn.Module):
# def __init__(self, parameter, embeddings, dataset):
# super(RGCNModule, self).__init__()
# self.device = parameter['device']
# self.background = dataset['background']
# self.ent2id = dataset['ent2id']
# self.rel2id = dataset['rel2id']
# self.num_shots = parameter['few']
# self.num_entities = parameter['num_entities']
# self.embedding_dim = parameter['embed_dim']
# self.out_dim = 1
# self.embeddings = embeddings
# self.conv1 = RGCNConv(
# in_channels=self.embedding_dim,
# out_channels=self.embedding_dim,
# num_relations=parameter['num_relations'],
# num_bases=parameter['num_bases'],
# )
# self.conv2 = RGCNConv(
# in_channels=self.embedding_dim,
# out_channels=self.embedding_dim,
# num_relations=parameter['num_relations'],
# num_bases=parameter['num_bases'],
# )
# self.mlp = nn.Sequential(
# nn.Linear(in_features=self.embedding_dim * 2 * self.num_shots, out_features=self.embedding_dim),
# nn.ReLU(),
# nn.Linear(in_features=self.embedding_dim, out_features=self.out_dim),
# )
# def forward(self, tasks, iseval):
# support = tasks[0]
# # format support and BG into RGCN format
# edge_index, edge_type = self._collate(support)
# if not iseval:
# x = F.relu(
# self.conv1(self.embeddings.embedding, edge_index, edge_type,
# size=[self.num_entities, self.num_entities])
# )
# # embeddings here is the data, not the embeddings obj
# self.final_embeddings = self.conv2(x, edge_index, edge_type, size=[
# self.num_entities, self.num_entities]) # out dim: [num_entities, embedding_dim]
# else:
# self.final_embeddings = self.final_embeddings.detach()
# support_ids = lookup_ids(self.ent2id, data=tasks[0])
# support = self.final_embeddings[support_ids]
# flat_support = support.reshape(support.shape[0], -1)
# out = self.mlp(flat_support)
# out = torch.sigmoid(out)
# return out
# def _str_to_ids(self, batch):
# batch_ids = torch.tensor(lookup_ids(self.ent2id, self.rel2id, batch)).to(self.device).transpose(0, -1)
# batch_ids = batch_ids.reshape(3, -1) # 3, num_shots * batch_size
# edge_index = batch_ids[[0, 2]]
# edge_type = batch_ids[1]
# return edge_index, edge_type
# def _collate(self, support_batch):
# edge_index, edge_type = self._str_to_ids(support_batch)
# # convert background graph to ids
# background_ids = torch.tensor(lookup_ids(self.ent2id, self.rel2id, self.background)).transpose(0, 1).to(self.device)
# background_edge_index = background_ids[[0, 2]]
# background_edge_type = background_ids[1]
# batch_size = len(support_batch)
# input_edges = torch.zeros(2, batch_size * self.num_shots + background_edge_index.shape[1]).long().to(
# device=self.device)
# input_edges[:, :self.num_shots * batch_size] = edge_index
# input_edges[:, self.num_shots * batch_size:] = background_edge_index
# input_type = torch.zeros(batch_size * self.num_shots + background_edge_type.shape[0]).long().to(
# device=self.device)
# input_type[:self.num_shots * batch_size] = edge_type
# input_type[self.num_shots * batch_size:] = background_edge_type
# return input_edges, input_type
|
{"/models/relation_meta_learner.py": ["/models/modules.py"], "/models/meta_learner.py": ["/models/relation_meta_learner.py", "/models/rgcn_model.py"], "/trainer.py": ["/utils.py"], "/models/modules.py": ["/models/rgcn_model.py"], "/main.py": ["/data_loader.py", "/trainer.py", "/models/meta_learner.py", "/utils.py"]}
|
8,963
|
dorajam/few-shot-link-prediction
|
refs/heads/main
|
/main.py
|
import argparse
import logging
import random
import numpy as np
import torch
import wandb # dashboard
from data_loader import read_dataset, DataLoader, SyntheticDataLoader
from trainer import Trainer
from models.meta_learner import MetaLearner
from utils import initialize_experiment
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
args = argparse.ArgumentParser()
# Experiment setup params
args.add_argument("-log_dir", "--log_dir", default="log", type=str)
args.add_argument("-state_dir", "--state_dir", default="state", type=str)
args.add_argument("-eval_ckpt", "--eval_ckpt", default=None, type=str)
args.add_argument("-eval_by_rel", "--eval_by_rel", default=False, action='store_true')
args.add_argument("-exp", "--experiment_name", default="deault_name", type=str)
args.add_argument("-is_synthetic", "--is_synthetic", default=False, action='store_true')
args.add_argument("-dash", "--dashboard", default=False, action='store_true')
args.add_argument("-seed", "--seed", default=42, type=int)
args.add_argument("-abla", "--ablation", default=False, action='store_true')
args.add_argument("-gpu", "--device", default=0, type=int)
args.add_argument("-step", "--step", default="train", type=str, choices=['train', 'test', 'dev'])
args.add_argument("-save_all", "--save_all", default=False, action='store_true')
# Data processing pipeline params
args.add_argument("-data", "--dataset", default="Wiki-One", type=str) # ["NELL-One", "Wiki-One"]
args.add_argument("-form", "--data_form", default="In-Train", type=str) # ["Pre-Train", "In-Train", "Discard"]
args.add_argument("-few", "--few", default=5, type=int)
args.add_argument("-nq", "--num_query", default=10, type=int)
args.add_argument("-inverse", "--add_inverse_edges", default=False, action='store_true')
# Training regime params
args.add_argument("-iter", "--iterations", default=100000, type=int)
args.add_argument("-prt_iter", "--print_iter", default=100, type=int)
args.add_argument("-eval_iter", "--eval_iter", default=1000, type=int)
args.add_argument("-ckpt_iter", "--checkpoint_iter", default=1000, type=int)
args.add_argument("-bs", "--batch_size", default=1024, type=int)
args.add_argument("-lr", "--learning_rate", default=0.001, type=float)
args.add_argument("-es", "--early_stopping_patience", default=30, type=int)
args.add_argument("-metric", "--metric", default="Hits@10", choices=["MRR", "Hits@10", "Hits@5", "Hits@1"])
args.add_argument("-b", "--beta", default=5, type=float)
args.add_argument("-m", "--margin", default=1, type=float)
args.add_argument("-reg", "--reg_weight", default=0.01, type=float)
args.add_argument("-lr_step", "--lr_step", default=10, type=float)
args.add_argument("-lr_rate", "--lr_rate", default=1., type=float)
# Model params
args.add_argument("-rmeta", "--rmeta_learner", default="MetaR", choices=["MetaR", "Modular", "Simple"])
args.add_argument("-module", "--module_type", default="RGCN", choices=["MLP", "RGCN"])
args.add_argument('-rgcn', '--use_rgcn', action='store_true', default=False)
# rgcn
args.add_argument("-nb", "--num_bases", default=4, type=int)
args.add_argument("-emb", "--embed_dim", default=50, type=int)
args.add_argument("-rgcn_emb", "--rgcn_embed_dim", default=20, type=int)
args.add_argument("-gcn_l", "--num_gcn_layers", default=2, type=int)
args.add_argument("-e_dp", "--edge_dropout", default=0.0, type=float)
args.add_argument("-ns", "--neighborhood_sample_rate", default=20, type=int)
args.add_argument('--has_attn', '-attn', action='store_true', default=False)
args.add_argument("-agg", "--gnn_agg_type", default="sum", choices=["sum", "mlp", "gru"])
# prototypes
args.add_argument("-pd", "--prototype_dim", default=100, type=int)
args.add_argument("-np", "--num_prototypes", default=4, type=int)
args.add_argument("-dp", "--dropout", default=0.5, type=float)
args = args.parse_args()
params = {}
for k, v in vars(args).items():
params[k] = v
# control random seed
if params['seed'] is not None:
SEED = params['seed']
torch.manual_seed(SEED)
torch.cuda.manual_seed(SEED)
torch.backends.cudnn.deterministic = True
np.random.seed(SEED)
random.seed(SEED)
dataset = read_dataset(params['dataset'], params['data_form'], add_inverse_edges=params['add_inverse_edges'])
params['num_entities'] = len(dataset['ent2id'].keys())
params['num_relations'] = max(dataset['background'].edata['type']).item() + 1
initialize_experiment(params)
if params['dashboard']:
wandb.init(project="logic-nets", config=params, name=args.experiment_name)
if params['device'] < 0:
params['device'] = torch.device('cpu')
else:
params['device'] = torch.device('cuda:' + str(params['device']))
# data_loader
train_data_loader = DataLoader(dataset, params, step='train')
dev_data_loader = DataLoader(dataset, params, step='dev')
test_data_loader = DataLoader(dataset, params, step='test')
if params['is_synthetic']:
synthetic_test_data_loader = SyntheticDataLoader(dataset, params, step='test')
data_loaders = [train_data_loader, dev_data_loader, test_data_loader, synthetic_test_data_loader]
else:
data_loaders = [train_data_loader, dev_data_loader, test_data_loader]
# model
meta_learner = MetaLearner(params, background_graph=dataset['background'])
model_params = list(meta_learner.parameters())
logging.info('Total number of parameters: %d' % sum(map(lambda x: x.numel(), model_params)))
if params['dashboard']:
wandb.watch(meta_learner, log="all")
# trainer
trainer = Trainer(meta_learner, data_loaders, params)
if params['step'] == 'train':
trainer.train()
print("test")
print(params['experiment_name'])
trainer.reload()
trainer.eval(isTest=True, save_all=params['save_all'])
elif params['step'] == 'test':
print(params['experiment_name'])
if params['eval_by_rel']:
trainer.eval_by_relation(isTest=True)
else:
trainer.eval(isTest=True, save_all=params['save_all'], synthetic=params['is_synthetic'])
elif params['step'] == 'dev':
print(params['experiment_name'])
if params['eval_by_rel']:
trainer.eval_by_relation(isTest=False)
else:
trainer.eval(isTest=False)
|
{"/models/relation_meta_learner.py": ["/models/modules.py"], "/models/meta_learner.py": ["/models/relation_meta_learner.py", "/models/rgcn_model.py"], "/trainer.py": ["/utils.py"], "/models/modules.py": ["/models/rgcn_model.py"], "/main.py": ["/data_loader.py", "/trainer.py", "/models/meta_learner.py", "/utils.py"]}
|
8,975
|
OmarElraies/green-hub-api
|
refs/heads/master
|
/greenhub_api/garden/serializers.py
|
from rest_framework import serializers
from .models import Plants, Category, SubCategory, Places
from django.contrib.auth.models import User
class PlantsSerializer(serializers.HyperlinkedModelSerializer):
owner = serializers.ReadOnlyField(source='owner.username')
class Meta:
model = Plants
fields = '__all__'
class CategorySerializer(serializers.HyperlinkedModelSerializer):
owner = serializers.ReadOnlyField(source='owner.username')
class Meta:
model = Category
fields = '__all__'
class SubCategorySerializer(serializers.HyperlinkedModelSerializer):
owner = serializers.ReadOnlyField(source='owner.username')
class Meta:
model = SubCategory
fields = '__all__'
class PlacesSerializer(serializers.HyperlinkedModelSerializer):
owner = serializers.ReadOnlyField(source='owner.username')
class Meta:
model = Places
fields = '__all__'
class UserSerializer(serializers.HyperlinkedModelSerializer):
plants = serializers.HyperlinkedRelatedField(many=True, view_name='plants-detail', read_only=True)
class Meta:
model = User
fields = ('url', 'id', 'username', 'plants')
|
{"/greenhub_api/plants/admin.py": ["/greenhub_api/plants/models.py"], "/greenhub_api/plants/serializers.py": ["/greenhub_api/plants/models.py"]}
|
8,976
|
OmarElraies/green-hub-api
|
refs/heads/master
|
/greenhub_api/plants/models.py
|
from django.db import models
from model_utils.models import TimeStampedModel
from model_utils import Choices
from django.contrib.auth.models import User
class Plants(TimeStampedModel):
name = models.CharField(max_length=255)
owner = models.ForeignKey('auth.User', related_name='plants', on_delete=models.CASCADE, blank=True, null=True)
photo = models.ImageField()
last_watering_time = models.DateTimeField()
place = models.ForeignKey('Places', on_delete=models.CASCADE, blank=True, null=True)
category = models.ForeignKey('Category', on_delete=models.CASCADE, blank=True, null=True)
class Meta:
ordering = ('created',)
def __str__(self):
return self.name
class Places(TimeStampedModel):
name = models.CharField(max_length=255)
owner = models.ForeignKey('auth.User', related_name='places', on_delete=models.CASCADE, blank=True, null=True)
def __str__(self):
return self.name
class Category(TimeStampedModel):
SEASON_CHOICES = (
('SP', 'Spring',),
('SU', 'Summer',),
('A', 'autumn',),
('W', 'Winter',),
)
SUNLIGHT_CHOICES = (
('SL', 'Sunlight',),
('SH', 'Shadow',),
)
SOIL_CHOICES = (
('SA', 'sand',),
('SI', 'silt',),
('C', 'clay',),
)
name = models.CharField(max_length=255)
owner = models.ForeignKey('auth.User', related_name='categories', on_delete=models.CASCADE, blank=True, null=True)
water_every = models.TimeField()
sunlight = models.CharField(max_length=2, choices=SUNLIGHT_CHOICES,)
soil = models.CharField(max_length=2, choices=SOIL_CHOICES,)
season = models.CharField(max_length=2, choices=SEASON_CHOICES,)
def __str__(self):
return self.name
|
{"/greenhub_api/plants/admin.py": ["/greenhub_api/plants/models.py"], "/greenhub_api/plants/serializers.py": ["/greenhub_api/plants/models.py"]}
|
8,977
|
OmarElraies/green-hub-api
|
refs/heads/master
|
/greenhub_api/garden/migrations/0001_initial.py
|
# Generated by Django 2.1.5 on 2019-03-12 15:37
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('water_every', models.TimeField()),
('sunlight', models.CharField(choices=[('SL', 'Sunlight'), ('SH', 'Shadow')], max_length=2)),
('soil', models.CharField(choices=[('SA', 'sand'), ('SI', 'silt'), ('C', 'clay')], max_length=2)),
('season', models.CharField(choices=[('SP', 'Spring'), ('SU', 'Summer'), ('A', 'autumn'), ('W', 'Winter')], max_length=2)),
('owner', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='category', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Places',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('owner', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='places', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Plants',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('name', models.CharField(max_length=255)),
('photo', models.ImageField(upload_to='')),
('last_watering_time', models.DateTimeField()),
('category', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='garden.Category')),
('owner', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='plants', to=settings.AUTH_USER_MODEL)),
('place', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='garden.Places')),
],
options={
'ordering': ('created',),
},
),
migrations.CreateModel(
name='SubCategory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('owner', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='subcategory', to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='category',
name='sub_category',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='garden.SubCategory'),
),
]
|
{"/greenhub_api/plants/admin.py": ["/greenhub_api/plants/models.py"], "/greenhub_api/plants/serializers.py": ["/greenhub_api/plants/models.py"]}
|
8,978
|
OmarElraies/green-hub-api
|
refs/heads/master
|
/greenhub_api/accounts/serializers.py
|
from rest_framework import serializers
from django.contrib.auth.models import User
class UserSerializer(serializers.HyperlinkedModelSerializer):
plants = serializers.HyperlinkedRelatedField(many=True, view_name='plants-detail', read_only=True)
class Meta:
model = User
fields = ('url', 'id', 'username', 'plants')
|
{"/greenhub_api/plants/admin.py": ["/greenhub_api/plants/models.py"], "/greenhub_api/plants/serializers.py": ["/greenhub_api/plants/models.py"]}
|
8,979
|
OmarElraies/green-hub-api
|
refs/heads/master
|
/greenhub_api/plants/urls.py
|
from django.urls import path, include
from rest_framework.routers import DefaultRouter
from plants import views
# Create a router and register our viewsets with it.
router = DefaultRouter()
router.register(r'plants', views.PlantsViewSet)
router.register(r'category', views.CategoryViewSet)
# The API URLs are now determined automatically by the router.
urlpatterns = [
path('', include(router.urls)),
]
|
{"/greenhub_api/plants/admin.py": ["/greenhub_api/plants/models.py"], "/greenhub_api/plants/serializers.py": ["/greenhub_api/plants/models.py"]}
|
8,980
|
OmarElraies/green-hub-api
|
refs/heads/master
|
/greenhub_api/plants/admin.py
|
from django.contrib import admin
# Register your models here.
from .models import Plants, Category, Places
admin.site.register(Plants)
admin.site.register(Category)
admin.site.register(Places)
|
{"/greenhub_api/plants/admin.py": ["/greenhub_api/plants/models.py"], "/greenhub_api/plants/serializers.py": ["/greenhub_api/plants/models.py"]}
|
8,981
|
OmarElraies/green-hub-api
|
refs/heads/master
|
/greenhub_api/accounts/urls.py
|
from django.urls import include, path, re_path
from rest_auth.views import PasswordResetConfirmView
from rest_framework.routers import DefaultRouter
from accounts import views
# Create a router and register our viewsets with it.
router = DefaultRouter()
router.register(r'user', views.UserViewSet)
# The API URLs are now determined automatically by the router.
urlpatterns = [
path('', include(router.urls)),
path('rest-auth/', include('rest_auth.urls')),
path('rest-auth/registration/', include('rest_auth.registration.urls')),
re_path(r'^rest-auth/password/reset/confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', PasswordResetConfirmView.as_view(),
name='password_reset_confirm'),
]
|
{"/greenhub_api/plants/admin.py": ["/greenhub_api/plants/models.py"], "/greenhub_api/plants/serializers.py": ["/greenhub_api/plants/models.py"]}
|
8,982
|
OmarElraies/green-hub-api
|
refs/heads/master
|
/greenhub_api/garden/urls.py
|
from django.urls import path, include
from rest_framework.routers import DefaultRouter
from garden import views
# Create a router and register our viewsets with it.
router = DefaultRouter()
router.register(r'garden', views.PlantsViewSet)
router.register(r'category', views.CategoryViewSet)
router.register(r'users', views.UserViewSet)
# The API URLs are now determined automatically by the router.
urlpatterns = [
path('', include(router.urls)),
]
|
{"/greenhub_api/plants/admin.py": ["/greenhub_api/plants/models.py"], "/greenhub_api/plants/serializers.py": ["/greenhub_api/plants/models.py"]}
|
8,983
|
OmarElraies/green-hub-api
|
refs/heads/master
|
/greenhub_api/plants/views.py
|
from plants.models import Plants, Places, Category
from plants.permissions import IsOwnerOrReadOnly
from plants.serializers import PlantsSerializer, PlacesSerializer, CategorySerializer
from django.contrib.auth.models import User
from rest_framework import permissions
from rest_framework import renderers
from rest_framework import viewsets
from rest_framework.decorators import action
from rest_framework.response import Response
class PlantsViewSet(viewsets.ModelViewSet):
"""
This viewset automatically provides `list`, `create`, `retrieve`,
`update` and `destroy` actions.
"""
queryset = Plants.objects.all()
serializer_class = PlantsSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,
IsOwnerOrReadOnly,)
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
class PlacesViewSet(viewsets.ModelViewSet):
"""
This viewset automatically provides `list` and `detail` actions.
"""
queryset = Places.objects.all()
serializer_class = PlacesSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,
IsOwnerOrReadOnly,)
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
class CategoryViewSet(viewsets.ModelViewSet):
queryset = Category.objects.all()
serializer_class = CategorySerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,
IsOwnerOrReadOnly,)
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
|
{"/greenhub_api/plants/admin.py": ["/greenhub_api/plants/models.py"], "/greenhub_api/plants/serializers.py": ["/greenhub_api/plants/models.py"]}
|
8,984
|
OmarElraies/green-hub-api
|
refs/heads/master
|
/greenhub_api/accounts/views.py
|
from django.contrib.auth.models import User
from rest_framework import viewsets
from accounts.serializers import UserSerializer
class UserViewSet(viewsets.ReadOnlyModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
|
{"/greenhub_api/plants/admin.py": ["/greenhub_api/plants/models.py"], "/greenhub_api/plants/serializers.py": ["/greenhub_api/plants/models.py"]}
|
8,985
|
OmarElraies/green-hub-api
|
refs/heads/master
|
/greenhub_api/plants/serializers.py
|
from rest_framework import serializers
from .models import Plants, Category, Places
class PlantsSerializer(serializers.HyperlinkedModelSerializer):
owner = serializers.ReadOnlyField(source='owner.username')
class Meta:
model = Plants
fields = '__all__'
class CategorySerializer(serializers.HyperlinkedModelSerializer):
owner = serializers.ReadOnlyField(source='owner.username')
class Meta:
model = Category
fields = '__all__'
class PlacesSerializer(serializers.HyperlinkedModelSerializer):
owner = serializers.ReadOnlyField(source='owner.username')
class Meta:
model = Places
fields = '__all__'
|
{"/greenhub_api/plants/admin.py": ["/greenhub_api/plants/models.py"], "/greenhub_api/plants/serializers.py": ["/greenhub_api/plants/models.py"]}
|
8,986
|
OmarElraies/green-hub-api
|
refs/heads/master
|
/greenhub_api/garden/views.py
|
from garden.models import Plants, Places, Category, SubCategory
from garden.permissions import IsOwnerOrReadOnly
from garden.serializers import PlantsSerializer, SubCategorySerializer
from garden.serializers import UserSerializer, PlacesSerializer, CategorySerializer
from django.contrib.auth.models import User
from rest_framework import permissions
from rest_framework import renderers
from rest_framework import viewsets
from rest_framework.decorators import action
from rest_framework.response import Response
class PlantsViewSet(viewsets.ModelViewSet):
"""
This viewset automatically provides `list`, `create`, `retrieve`,
`update` and `destroy` actions.
Additionally we also provide an extra `highlight` action.
"""
queryset = Plants.objects.all()
serializer_class = PlantsSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,
IsOwnerOrReadOnly,)
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
class PlacesViewSet(viewsets.ModelViewSet):
"""
This viewset automatically provides `list` and `detail` actions.
"""
queryset = Places.objects.all()
serializer_class = PlacesSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,
IsOwnerOrReadOnly,)
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
class CategoryViewSet(viewsets.ModelViewSet):
queryset = Category.objects.all()
serializer_class = CategorySerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,
IsOwnerOrReadOnly,)
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
class SubCategoryViewSet(viewsets.ModelViewSet):
queryset = SubCategory.objects.all()
serializer_class = SubCategorySerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,
IsOwnerOrReadOnly,)
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
class UserViewSet(viewsets.ReadOnlyModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
|
{"/greenhub_api/plants/admin.py": ["/greenhub_api/plants/models.py"], "/greenhub_api/plants/serializers.py": ["/greenhub_api/plants/models.py"]}
|
8,990
|
altdesktop/python-dbus-next
|
refs/heads/master
|
/test/service/test_methods.py
|
from dbus_next.service import ServiceInterface, method
from dbus_next.aio import MessageBus
from dbus_next import Message, MessageType, ErrorType, Variant, SignatureTree, DBusError, MessageFlag
import pytest
class ExampleInterface(ServiceInterface):
def __init__(self, name):
super().__init__(name)
@method()
def echo(self, what: 's') -> 's':
assert type(self) is ExampleInterface
return what
@method()
def echo_multiple(self, what1: 's', what2: 's') -> 'ss':
assert type(self) is ExampleInterface
return [what1, what2]
@method()
def echo_containers(self, array: 'as', variant: 'v', dict_entries: 'a{sv}',
struct: '(s(s(v)))') -> 'asva{sv}(s(s(v)))':
assert type(self) is ExampleInterface
return [array, variant, dict_entries, struct]
@method()
def ping(self):
assert type(self) is ExampleInterface
pass
@method(name='renamed')
def original_name(self):
assert type(self) is ExampleInterface
pass
@method(disabled=True)
def not_here(self):
assert type(self) is ExampleInterface
pass
@method()
def throws_unexpected_error(self):
assert type(self) is ExampleInterface
raise Exception('oops')
@method()
def throws_dbus_error(self):
assert type(self) is ExampleInterface
raise DBusError('test.error', 'an error ocurred')
class AsyncInterface(ServiceInterface):
def __init__(self, name):
super().__init__(name)
@method()
async def echo(self, what: 's') -> 's':
assert type(self) is AsyncInterface
return what
@method()
async def echo_multiple(self, what1: 's', what2: 's') -> 'ss':
assert type(self) is AsyncInterface
return [what1, what2]
@method()
async def echo_containers(self, array: 'as', variant: 'v', dict_entries: 'a{sv}',
struct: '(s(s(v)))') -> 'asva{sv}(s(s(v)))':
assert type(self) is AsyncInterface
return [array, variant, dict_entries, struct]
@method()
async def ping(self):
assert type(self) is AsyncInterface
pass
@method(name='renamed')
async def original_name(self):
assert type(self) is AsyncInterface
pass
@method(disabled=True)
async def not_here(self):
assert type(self) is AsyncInterface
pass
@method()
async def throws_unexpected_error(self):
assert type(self) is AsyncInterface
raise Exception('oops')
@method()
def throws_dbus_error(self):
assert type(self) is AsyncInterface
raise DBusError('test.error', 'an error ocurred')
@pytest.mark.parametrize('interface_class', [ExampleInterface, AsyncInterface])
@pytest.mark.asyncio
async def test_methods(interface_class):
bus1 = await MessageBus().connect()
bus2 = await MessageBus().connect()
interface = interface_class('test.interface')
export_path = '/test/path'
async def call(member, signature='', body=[], flags=MessageFlag.NONE):
return await bus2.call(
Message(destination=bus1.unique_name,
path=export_path,
interface=interface.name,
member=member,
signature=signature,
body=body,
flags=flags))
bus1.export(export_path, interface)
body = ['hello world']
reply = await call('echo', 's', body)
assert reply.message_type == MessageType.METHOD_RETURN, reply.body[0]
assert reply.signature == 's'
assert reply.body == body
body = ['hello', 'world']
reply = await call('echo_multiple', 'ss', body)
assert reply.message_type == MessageType.METHOD_RETURN, reply.body[0]
assert reply.signature == 'ss'
assert reply.body == body
body = [['hello', 'world'],
Variant('v', Variant('(ss)', ['hello', 'world'])), {
'foo': Variant('t', 100)
}, ['one', ['two', [Variant('s', 'three')]]]]
signature = 'asva{sv}(s(s(v)))'
SignatureTree(signature).verify(body)
reply = await call('echo_containers', signature, body)
assert reply.message_type == MessageType.METHOD_RETURN, reply.body[0]
assert reply.signature == signature
assert reply.body == body
reply = await call('ping')
assert reply.message_type == MessageType.METHOD_RETURN, reply.body[0]
assert reply.signature == ''
assert reply.body == []
reply = await call('throws_unexpected_error')
assert reply.message_type == MessageType.ERROR, reply.body[0]
assert reply.error_name == ErrorType.SERVICE_ERROR.value, reply.body[0]
reply = await call('throws_dbus_error')
assert reply.message_type == MessageType.ERROR, reply.body[0]
assert reply.error_name == 'test.error', reply.body[0]
assert reply.body == ['an error ocurred']
reply = await call('ping', flags=MessageFlag.NO_REPLY_EXPECTED)
assert reply is None
reply = await call('throws_unexpected_error', flags=MessageFlag.NO_REPLY_EXPECTED)
assert reply is None
reply = await call('throws_dbus_error', flags=MessageFlag.NO_REPLY_EXPECTED)
assert reply is None
|
{"/test/service/test_methods.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/glib/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/constants.py", "/dbus_next/__init__.py"], "/test/service/test_properties.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/introspection.py", "/dbus_next/constants.py"], "/dbus_next/auth.py": ["/dbus_next/errors.py"], "/dbus_next/glib/message_bus.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/constants.py", "/dbus_next/message.py", "/dbus_next/message_bus.py", "/dbus_next/errors.py", "/dbus_next/glib/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/dbus_next/_private/unmarshaller.py": ["/dbus_next/message.py", "/dbus_next/_private/constants.py", "/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/errors.py"], "/examples/aio-tcp-notification.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/introspection.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/validators.py", "/dbus_next/errors.py"], "/dbus_next/aio/__init__.py": ["/dbus_next/aio/message_bus.py", "/dbus_next/aio/proxy_object.py"], "/test/test_glib_low_level.py": ["/dbus_next/__init__.py", "/test/util.py"], "/dbus_next/proxy_object.py": ["/dbus_next/validators.py", "/dbus_next/__init__.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/dbus_next/service.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/__init__.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/test/service/test_export.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_fd_passing.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_methods.py": ["/dbus_next/message.py", "/dbus_next/service.py", "/dbus_next/introspection.py", "/dbus_next/__init__.py", "/test/util.py"], "/test/service/test_standard_interfaces.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py"], "/test/test_validators.py": ["/dbus_next/__init__.py"], "/test/test_disconnect.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_request_name.py": ["/dbus_next/__init__.py", "/test/util.py"], "/examples/mpris.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/signature.py": ["/dbus_next/validators.py", "/dbus_next/errors.py"], "/test/service/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py", "/dbus_next/signature.py"], "/dbus_next/__init__.py": ["/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/validators.py"], "/dbus_next/validators.py": ["/dbus_next/errors.py"], "/test/test_introspection.py": ["/dbus_next/__init__.py"], "/examples/example-service.py": ["/dbus_next/service.py", "/dbus_next/aio/message_bus.py", "/dbus_next/__init__.py"], "/dbus_next/aio/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/errors.py", "/dbus_next/constants.py", "/dbus_next/_private/util.py", "/dbus_next/__init__.py"], "/test/test_aio_low_level.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/_private/util.py": ["/dbus_next/signature.py"], "/examples/aio-list-names.py": ["/dbus_next/__init__.py", "/dbus_next/aio/__init__.py"], "/test/service/test_decorators.py": ["/dbus_next/__init__.py", "/dbus_next/service.py"], "/examples/dbus-next-send.py": ["/dbus_next/validators.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/errors.py": ["/dbus_next/message.py", "/dbus_next/validators.py", "/dbus_next/constants.py"], "/dbus_next/message.py": ["/dbus_next/_private/marshaller.py", "/dbus_next/constants.py", "/dbus_next/_private/constants.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py"], "/test/test_marshaller.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/__init__.py"], "/examples/glib-list-names.py": ["/dbus_next/__init__.py"], "/dbus_next/message_bus.py": ["/dbus_next/_private/address.py", "/dbus_next/_private/util.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/proxy_object.py", "/dbus_next/__init__.py"], "/test/test_big_message.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/aio/message_bus.py": ["/dbus_next/message_bus.py", "/dbus_next/_private/unmarshaller.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/errors.py", "/dbus_next/aio/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/test/client/test_properties.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/_private/marshaller.py": ["/dbus_next/signature.py"], "/test/test_address_parser.py": ["/dbus_next/_private/address.py"], "/test/test_tcp_address.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/_private/address.py"], "/bench/unmarshall.py": ["/dbus_next/_private/unmarshaller.py"], "/test/test_signature.py": ["/dbus_next/__init__.py", "/dbus_next/_private/util.py"], "/dbus_next/_private/address.py": ["/dbus_next/constants.py", "/dbus_next/errors.py"]}
|
8,991
|
altdesktop/python-dbus-next
|
refs/heads/master
|
/dbus_next/glib/proxy_object.py
|
from ..proxy_object import BaseProxyObject, BaseProxyInterface
from ..message_bus import BaseMessageBus
from ..message import Message
from ..errors import DBusError
from ..signature import Variant
from ..constants import ErrorType
from .. import introspection as intr
import xml.etree.ElementTree as ET
from typing import Union, List
# glib is optional
try:
from gi.repository import GLib
except ImportError:
pass
class ProxyInterface(BaseProxyInterface):
"""A class representing a proxy to an interface exported on the bus by
another client for the GLib :class:`MessageBus <dbus_next.glib.MessageBus>`
implementation.
This class is not meant to be constructed directly by the user. Use
:func:`ProxyObject.get_interface()
<dbus_next.glib.ProxyObject.get_interface>` on a GLib proxy
object to get a proxy interface.
This class exposes methods to call DBus methods, listen to signals, and get
and set properties on the interface that are created dynamically based on
the introspection data passed to the proxy object that made this proxy
interface.
A *method call* takes this form:
.. code-block:: python3
def callback(error: Exception, result: list(Any)):
pass
interface.call_[METHOD](*args, callback)
result = interface.call_[METHOD]_sync(*args)
Where ``METHOD`` is the name of the method converted to snake case.
To call a method, provide ``*args`` that correspond to the *in args* of the
introspection method definition.
To *asynchronously* call a method, provide a callback that takes an error
as the first argument and a list as the second argument. If the call
completed successfully, ``error`` will be :class:`None`. If the service
returns an error, it will be a :class:`DBusError <dbus_next.DBusError>`
with information about the error returned from the bus. The result will be
a list of values that correspond to the *out args* of the introspection
method definition.
To *synchronously* call a method, use the ``call_[METHOD]_sync()`` form.
The ``result`` corresponds to the *out arg* of the introspection method
definition. If the method has more than one otu arg, they are returned
within a :class:`list`.
To *listen to a signal* use this form:
.. code-block:: python3
interface.on_[SIGNAL](callback)
To *stop listening to a signal* use this form:
.. code-block:: python3
interface.off_[SIGNAL](callback)
Where ``SIGNAL`` is the name of the signal converted to snake case.
DBus signals are exposed with an event-callback interface. The provided
``callback`` will be called when the signal is emitted with arguments that
correspond to the *out args* of the interface signal definition.
To *get or set a property* use this form:
.. code-block:: python3
def get_callback(error: Exception, value: Any):
pass
def set_callback(error: Exception)
pass
interface.get_[PROPERTY](get_callback)
value: Any = interface.get_[PROPERTY]_sync()
interface.set_[PROPERTY](set_callback)
interface.set_[PROPERTY]_sync(value)
Where ``PROPERTY`` is the name of the property converted to snake case.
The ``value`` must correspond to the type of the property in the interface
definition.
To asynchronously get or set a property, provide a callback that takes an
:class:`Exception` as the first argument. If the call completed
successfully, ``error`` will be :class:`None`. If the service returns an
error, it will be a :class:`DBusError <dbus_next.DBusError>` with
information about the error returned from the bus.
If the service returns an error for a synchronous DBus call, a
:class:`DBusError <dbus_next.DBusError>` will be raised with information
about the error.
"""
def _add_method(self, intr_method):
in_len = len(intr_method.in_args)
out_len = len(intr_method.out_args)
def method_fn(*args):
if len(args) != in_len + 1:
raise TypeError(
f'method {intr_method.name} expects {in_len} arguments and a callback (got {len(args)} args)'
)
args = list(args)
# TODO type check: this callback takes two parameters
# (MessageBus.check_callback(cb))
callback = args.pop()
def call_notify(msg, err):
if err:
callback([], err)
return
try:
BaseProxyInterface._check_method_return(msg, intr_method.out_signature)
except DBusError as e:
err = e
callback(msg.body, err)
self.bus.call(
Message(destination=self.bus_name,
path=self.path,
interface=self.introspection.name,
member=intr_method.name,
signature=intr_method.in_signature,
body=list(args)), call_notify)
def method_fn_sync(*args):
main = GLib.MainLoop()
call_error = None
call_body = None
def callback(body, err):
nonlocal call_error
nonlocal call_body
call_error = err
call_body = body
main.quit()
method_fn(*args, callback)
main.run()
if call_error:
raise call_error
if not out_len:
return None
elif out_len == 1:
return call_body[0]
else:
return call_body
method_name = f'call_{BaseProxyInterface._to_snake_case(intr_method.name)}'
method_name_sync = f'{method_name}_sync'
setattr(self, method_name, method_fn)
setattr(self, method_name_sync, method_fn_sync)
def _add_property(self, intr_property):
def property_getter(callback):
def call_notify(msg, err):
if err:
callback(None, err)
return
try:
BaseProxyInterface._check_method_return(msg)
except Exception as e:
callback(None, e)
return
variant = msg.body[0]
if variant.signature != intr_property.signature:
err = DBusError(ErrorType.CLIENT_ERROR,
'property returned unexpected signature "{variant.signature}"',
msg)
callback(None, err)
return
callback(variant.value, None)
self.bus.call(
Message(destination=self.bus_name,
path=self.path,
interface='org.freedesktop.DBus.Properties',
member='Get',
signature='ss',
body=[self.introspection.name, intr_property.name]), call_notify)
def property_getter_sync():
property_value = None
reply_error = None
main = GLib.MainLoop()
def callback(value, err):
nonlocal property_value
nonlocal reply_error
property_value = value
reply_error = err
main.quit()
property_getter(callback)
main.run()
if reply_error:
raise reply_error
return property_value
def property_setter(value, callback):
def call_notify(msg, err):
if err:
callback(None, err)
return
try:
BaseProxyInterface._check_method_return(msg)
except Exception as e:
callback(None, e)
return
return callback(None, None)
variant = Variant(intr_property.signature, value)
self.bus.call(
Message(destination=self.bus_name,
path=self.path,
interface='org.freedesktop.DBus.Properties',
member='Set',
signature='ssv',
body=[self.introspection.name, intr_property.name, variant]), call_notify)
def property_setter_sync(val):
reply_error = None
main = GLib.MainLoop()
def callback(value, err):
nonlocal reply_error
reply_error = err
main.quit()
property_setter(val, callback)
main.run()
if reply_error:
raise reply_error
return None
snake_case = super()._to_snake_case(intr_property.name)
setattr(self, f'get_{snake_case}', property_getter)
setattr(self, f'get_{snake_case}_sync', property_getter_sync)
setattr(self, f'set_{snake_case}', property_setter)
setattr(self, f'set_{snake_case}_sync', property_setter_sync)
class ProxyObject(BaseProxyObject):
"""The proxy object implementation for the asyncio :class:`MessageBus <dbus_next.aio.MessageBus>`.
For more information, see the :class:`BaseProxyObject <dbus_next.proxy_object.BaseProxyObject>`.
"""
def __init__(self, bus_name: str, path: str, introspection: Union[intr.Node, str, ET.Element],
bus: BaseMessageBus):
super().__init__(bus_name, path, introspection, bus, ProxyInterface)
def get_interface(self, name: str) -> ProxyInterface:
return super().get_interface(name)
def get_children(self) -> List['ProxyObject']:
return super().get_children()
|
{"/test/service/test_methods.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/glib/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/constants.py", "/dbus_next/__init__.py"], "/test/service/test_properties.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/introspection.py", "/dbus_next/constants.py"], "/dbus_next/auth.py": ["/dbus_next/errors.py"], "/dbus_next/glib/message_bus.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/constants.py", "/dbus_next/message.py", "/dbus_next/message_bus.py", "/dbus_next/errors.py", "/dbus_next/glib/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/dbus_next/_private/unmarshaller.py": ["/dbus_next/message.py", "/dbus_next/_private/constants.py", "/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/errors.py"], "/examples/aio-tcp-notification.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/introspection.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/validators.py", "/dbus_next/errors.py"], "/dbus_next/aio/__init__.py": ["/dbus_next/aio/message_bus.py", "/dbus_next/aio/proxy_object.py"], "/test/test_glib_low_level.py": ["/dbus_next/__init__.py", "/test/util.py"], "/dbus_next/proxy_object.py": ["/dbus_next/validators.py", "/dbus_next/__init__.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/dbus_next/service.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/__init__.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/test/service/test_export.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_fd_passing.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_methods.py": ["/dbus_next/message.py", "/dbus_next/service.py", "/dbus_next/introspection.py", "/dbus_next/__init__.py", "/test/util.py"], "/test/service/test_standard_interfaces.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py"], "/test/test_validators.py": ["/dbus_next/__init__.py"], "/test/test_disconnect.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_request_name.py": ["/dbus_next/__init__.py", "/test/util.py"], "/examples/mpris.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/signature.py": ["/dbus_next/validators.py", "/dbus_next/errors.py"], "/test/service/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py", "/dbus_next/signature.py"], "/dbus_next/__init__.py": ["/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/validators.py"], "/dbus_next/validators.py": ["/dbus_next/errors.py"], "/test/test_introspection.py": ["/dbus_next/__init__.py"], "/examples/example-service.py": ["/dbus_next/service.py", "/dbus_next/aio/message_bus.py", "/dbus_next/__init__.py"], "/dbus_next/aio/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/errors.py", "/dbus_next/constants.py", "/dbus_next/_private/util.py", "/dbus_next/__init__.py"], "/test/test_aio_low_level.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/_private/util.py": ["/dbus_next/signature.py"], "/examples/aio-list-names.py": ["/dbus_next/__init__.py", "/dbus_next/aio/__init__.py"], "/test/service/test_decorators.py": ["/dbus_next/__init__.py", "/dbus_next/service.py"], "/examples/dbus-next-send.py": ["/dbus_next/validators.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/errors.py": ["/dbus_next/message.py", "/dbus_next/validators.py", "/dbus_next/constants.py"], "/dbus_next/message.py": ["/dbus_next/_private/marshaller.py", "/dbus_next/constants.py", "/dbus_next/_private/constants.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py"], "/test/test_marshaller.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/__init__.py"], "/examples/glib-list-names.py": ["/dbus_next/__init__.py"], "/dbus_next/message_bus.py": ["/dbus_next/_private/address.py", "/dbus_next/_private/util.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/proxy_object.py", "/dbus_next/__init__.py"], "/test/test_big_message.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/aio/message_bus.py": ["/dbus_next/message_bus.py", "/dbus_next/_private/unmarshaller.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/errors.py", "/dbus_next/aio/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/test/client/test_properties.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/_private/marshaller.py": ["/dbus_next/signature.py"], "/test/test_address_parser.py": ["/dbus_next/_private/address.py"], "/test/test_tcp_address.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/_private/address.py"], "/bench/unmarshall.py": ["/dbus_next/_private/unmarshaller.py"], "/test/test_signature.py": ["/dbus_next/__init__.py", "/dbus_next/_private/util.py"], "/dbus_next/_private/address.py": ["/dbus_next/constants.py", "/dbus_next/errors.py"]}
|
8,992
|
altdesktop/python-dbus-next
|
refs/heads/master
|
/test/service/test_properties.py
|
from dbus_next.service import ServiceInterface, dbus_property, method
from dbus_next.aio import MessageBus
from dbus_next import Message, MessageType, PropertyAccess, ErrorType, Variant, DBusError
import pytest
import asyncio
class ExampleInterface(ServiceInterface):
def __init__(self, name):
super().__init__(name)
self._string_prop = 'hi'
self._readonly_prop = 100
self._disabled_prop = '1234'
self._container_prop = [['hello', 'world']]
self._renamed_prop = '65'
@dbus_property()
def string_prop(self) -> 's':
return self._string_prop
@string_prop.setter
def string_prop_setter(self, val: 's'):
self._string_prop = val
@dbus_property(PropertyAccess.READ)
def readonly_prop(self) -> 't':
return self._readonly_prop
@dbus_property()
def container_prop(self) -> 'a(ss)':
return self._container_prop
@container_prop.setter
def container_prop(self, val: 'a(ss)'):
self._container_prop = val
@dbus_property(name='renamed_prop')
def original_name(self) -> 's':
return self._renamed_prop
@original_name.setter
def original_name_setter(self, val: 's'):
self._renamed_prop = val
@dbus_property(disabled=True)
def disabled_prop(self) -> 's':
return self._disabled_prop
@disabled_prop.setter
def disabled_prop(self, val: 's'):
self._disabled_prop = val
@dbus_property(disabled=True)
def throws_error(self) -> 's':
raise DBusError('test.error', 'told you so')
@throws_error.setter
def throws_error(self, val: 's'):
raise DBusError('test.error', 'told you so')
@dbus_property(PropertyAccess.READ, disabled=True)
def returns_wrong_type(self) -> 's':
return 5
@method()
def do_emit_properties_changed(self):
changed = {'string_prop': 'asdf'}
invalidated = ['container_prop']
self.emit_properties_changed(changed, invalidated)
class AsyncInterface(ServiceInterface):
def __init__(self, name):
super().__init__(name)
self._string_prop = 'hi'
self._readonly_prop = 100
self._disabled_prop = '1234'
self._container_prop = [['hello', 'world']]
self._renamed_prop = '65'
@dbus_property()
async def string_prop(self) -> 's':
return self._string_prop
@string_prop.setter
async def string_prop_setter(self, val: 's'):
self._string_prop = val
@dbus_property(PropertyAccess.READ)
async def readonly_prop(self) -> 't':
return self._readonly_prop
@dbus_property()
async def container_prop(self) -> 'a(ss)':
return self._container_prop
@container_prop.setter
async def container_prop(self, val: 'a(ss)'):
self._container_prop = val
@dbus_property(name='renamed_prop')
async def original_name(self) -> 's':
return self._renamed_prop
@original_name.setter
async def original_name_setter(self, val: 's'):
self._renamed_prop = val
@dbus_property(disabled=True)
async def disabled_prop(self) -> 's':
return self._disabled_prop
@disabled_prop.setter
async def disabled_prop(self, val: 's'):
self._disabled_prop = val
@dbus_property(disabled=True)
async def throws_error(self) -> 's':
raise DBusError('test.error', 'told you so')
@throws_error.setter
async def throws_error(self, val: 's'):
raise DBusError('test.error', 'told you so')
@dbus_property(PropertyAccess.READ, disabled=True)
async def returns_wrong_type(self) -> 's':
return 5
@method()
def do_emit_properties_changed(self):
changed = {'string_prop': 'asdf'}
invalidated = ['container_prop']
self.emit_properties_changed(changed, invalidated)
@pytest.mark.parametrize('interface_class', [ExampleInterface, AsyncInterface])
@pytest.mark.asyncio
async def test_property_methods(interface_class):
bus1 = await MessageBus().connect()
bus2 = await MessageBus().connect()
interface = interface_class('test.interface')
export_path = '/test/path'
bus1.export(export_path, interface)
async def call_properties(member, signature, body):
return await bus2.call(
Message(destination=bus1.unique_name,
path=export_path,
interface='org.freedesktop.DBus.Properties',
member=member,
signature=signature,
body=body))
result = await call_properties('GetAll', 's', [interface.name])
assert result.message_type == MessageType.METHOD_RETURN, result.body[0]
assert result.signature == 'a{sv}'
assert result.body == [{
'string_prop': Variant('s', interface._string_prop),
'readonly_prop': Variant('t', interface._readonly_prop),
'container_prop': Variant('a(ss)', interface._container_prop),
'renamed_prop': Variant('s', interface._renamed_prop)
}]
result = await call_properties('Get', 'ss', [interface.name, 'string_prop'])
assert result.message_type == MessageType.METHOD_RETURN, result.body[0]
assert result.signature == 'v'
assert result.body == [Variant('s', 'hi')]
result = await call_properties(
'Set', 'ssv',
[interface.name, 'string_prop', Variant('s', 'ho')])
assert result.message_type == MessageType.METHOD_RETURN, result.body[0]
assert interface._string_prop == 'ho'
if interface_class is AsyncInterface:
assert 'ho', await interface.string_prop()
else:
assert 'ho', interface.string_prop
result = await call_properties(
'Set', 'ssv',
[interface.name, 'readonly_prop', Variant('t', 100)])
assert result.message_type == MessageType.ERROR, result.body[0]
assert result.error_name == ErrorType.PROPERTY_READ_ONLY.value, result.body[0]
result = await call_properties(
'Set', 'ssv',
[interface.name, 'disabled_prop', Variant('s', 'asdf')])
assert result.message_type == MessageType.ERROR, result.body[0]
assert result.error_name == ErrorType.UNKNOWN_PROPERTY.value
result = await call_properties(
'Set', 'ssv',
[interface.name, 'not_a_prop', Variant('s', 'asdf')])
assert result.message_type == MessageType.ERROR, result.body[0]
assert result.error_name == ErrorType.UNKNOWN_PROPERTY.value
# wrong type
result = await call_properties('Set', 'ssv', [interface.name, 'string_prop', Variant('t', 100)])
assert result.message_type == MessageType.ERROR
assert result.error_name == ErrorType.INVALID_SIGNATURE.value
# enable the erroring properties so we can test them
for prop in ServiceInterface._get_properties(interface):
if prop.name in ['throws_error', 'returns_wrong_type']:
prop.disabled = False
result = await call_properties('Get', 'ss', [interface.name, 'returns_wrong_type'])
assert result.message_type == MessageType.ERROR, result.body[0]
assert result.error_name == ErrorType.SERVICE_ERROR.value
result = await call_properties(
'Set', 'ssv',
[interface.name, 'throws_error', Variant('s', 'ho')])
assert result.message_type == MessageType.ERROR, result.body[0]
assert result.error_name == 'test.error'
assert result.body == ['told you so']
result = await call_properties('Get', 'ss', [interface.name, 'throws_error'])
assert result.message_type == MessageType.ERROR, result.body[0]
assert result.error_name == 'test.error'
assert result.body == ['told you so']
result = await call_properties('GetAll', 's', [interface.name])
assert result.message_type == MessageType.ERROR, result.body[0]
assert result.error_name == 'test.error'
assert result.body == ['told you so']
@pytest.mark.parametrize('interface_class', [ExampleInterface, AsyncInterface])
@pytest.mark.asyncio
async def test_property_changed_signal(interface_class):
bus1 = await MessageBus().connect()
bus2 = await MessageBus().connect()
await bus2.call(
Message(destination='org.freedesktop.DBus',
path='/org/freedesktop/DBus',
interface='org.freedesktop.DBus',
member='AddMatch',
signature='s',
body=[f'sender={bus1.unique_name}']))
interface = interface_class('test.interface')
export_path = '/test/path'
bus1.export(export_path, interface)
async def wait_for_message():
# TODO timeout
future = asyncio.get_event_loop().create_future()
def message_handler(signal):
if signal.interface == 'org.freedesktop.DBus.Properties':
bus2.remove_message_handler(message_handler)
future.set_result(signal)
bus2.add_message_handler(message_handler)
return await future
bus2.send(
Message(destination=bus1.unique_name,
interface=interface.name,
path=export_path,
member='do_emit_properties_changed'))
signal = await wait_for_message()
assert signal.interface == 'org.freedesktop.DBus.Properties'
assert signal.member == 'PropertiesChanged'
assert signal.signature == 'sa{sv}as'
assert signal.body == [
interface.name, {
'string_prop': Variant('s', 'asdf')
}, ['container_prop']
]
|
{"/test/service/test_methods.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/glib/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/constants.py", "/dbus_next/__init__.py"], "/test/service/test_properties.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/introspection.py", "/dbus_next/constants.py"], "/dbus_next/auth.py": ["/dbus_next/errors.py"], "/dbus_next/glib/message_bus.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/constants.py", "/dbus_next/message.py", "/dbus_next/message_bus.py", "/dbus_next/errors.py", "/dbus_next/glib/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/dbus_next/_private/unmarshaller.py": ["/dbus_next/message.py", "/dbus_next/_private/constants.py", "/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/errors.py"], "/examples/aio-tcp-notification.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/introspection.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/validators.py", "/dbus_next/errors.py"], "/dbus_next/aio/__init__.py": ["/dbus_next/aio/message_bus.py", "/dbus_next/aio/proxy_object.py"], "/test/test_glib_low_level.py": ["/dbus_next/__init__.py", "/test/util.py"], "/dbus_next/proxy_object.py": ["/dbus_next/validators.py", "/dbus_next/__init__.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/dbus_next/service.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/__init__.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/test/service/test_export.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_fd_passing.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_methods.py": ["/dbus_next/message.py", "/dbus_next/service.py", "/dbus_next/introspection.py", "/dbus_next/__init__.py", "/test/util.py"], "/test/service/test_standard_interfaces.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py"], "/test/test_validators.py": ["/dbus_next/__init__.py"], "/test/test_disconnect.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_request_name.py": ["/dbus_next/__init__.py", "/test/util.py"], "/examples/mpris.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/signature.py": ["/dbus_next/validators.py", "/dbus_next/errors.py"], "/test/service/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py", "/dbus_next/signature.py"], "/dbus_next/__init__.py": ["/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/validators.py"], "/dbus_next/validators.py": ["/dbus_next/errors.py"], "/test/test_introspection.py": ["/dbus_next/__init__.py"], "/examples/example-service.py": ["/dbus_next/service.py", "/dbus_next/aio/message_bus.py", "/dbus_next/__init__.py"], "/dbus_next/aio/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/errors.py", "/dbus_next/constants.py", "/dbus_next/_private/util.py", "/dbus_next/__init__.py"], "/test/test_aio_low_level.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/_private/util.py": ["/dbus_next/signature.py"], "/examples/aio-list-names.py": ["/dbus_next/__init__.py", "/dbus_next/aio/__init__.py"], "/test/service/test_decorators.py": ["/dbus_next/__init__.py", "/dbus_next/service.py"], "/examples/dbus-next-send.py": ["/dbus_next/validators.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/errors.py": ["/dbus_next/message.py", "/dbus_next/validators.py", "/dbus_next/constants.py"], "/dbus_next/message.py": ["/dbus_next/_private/marshaller.py", "/dbus_next/constants.py", "/dbus_next/_private/constants.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py"], "/test/test_marshaller.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/__init__.py"], "/examples/glib-list-names.py": ["/dbus_next/__init__.py"], "/dbus_next/message_bus.py": ["/dbus_next/_private/address.py", "/dbus_next/_private/util.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/proxy_object.py", "/dbus_next/__init__.py"], "/test/test_big_message.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/aio/message_bus.py": ["/dbus_next/message_bus.py", "/dbus_next/_private/unmarshaller.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/errors.py", "/dbus_next/aio/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/test/client/test_properties.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/_private/marshaller.py": ["/dbus_next/signature.py"], "/test/test_address_parser.py": ["/dbus_next/_private/address.py"], "/test/test_tcp_address.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/_private/address.py"], "/bench/unmarshall.py": ["/dbus_next/_private/unmarshaller.py"], "/test/test_signature.py": ["/dbus_next/__init__.py", "/dbus_next/_private/util.py"], "/dbus_next/_private/address.py": ["/dbus_next/constants.py", "/dbus_next/errors.py"]}
|
8,993
|
altdesktop/python-dbus-next
|
refs/heads/master
|
/test/client/test_signals.py
|
from dbus_next.service import ServiceInterface, signal
from dbus_next.aio import MessageBus
from dbus_next import Message
from dbus_next.introspection import Node
from dbus_next.constants import RequestNameReply
import pytest
class ExampleInterface(ServiceInterface):
def __init__(self):
super().__init__('test.interface')
@signal()
def SomeSignal(self) -> 's':
return 'hello'
@signal()
def SignalMultiple(self) -> 'ss':
return ['hello', 'world']
@pytest.mark.asyncio
async def test_signals():
bus1 = await MessageBus().connect()
bus2 = await MessageBus().connect()
bus_intr = await bus1.introspect('org.freedesktop.DBus', '/org/freedesktop/DBus')
bus_obj = bus1.get_proxy_object('org.freedesktop.DBus', '/org/freedesktop/DBus', bus_intr)
stats = bus_obj.get_interface('org.freedesktop.DBus.Debug.Stats')
await bus1.request_name('test.signals.name')
service_interface = ExampleInterface()
bus1.export('/test/path', service_interface)
obj = bus2.get_proxy_object('test.signals.name', '/test/path',
bus1._introspect_export_path('/test/path'))
interface = obj.get_interface(service_interface.name)
async def ping():
await bus2.call(
Message(destination=bus1.unique_name,
interface='org.freedesktop.DBus.Peer',
path='/test/path',
member='Ping'))
err = None
single_counter = 0
def single_handler(value):
try:
nonlocal single_counter
nonlocal err
assert value == 'hello'
single_counter += 1
except Exception as e:
err = e
multiple_counter = 0
def multiple_handler(value1, value2):
nonlocal multiple_counter
nonlocal err
try:
assert value1 == 'hello'
assert value2 == 'world'
multiple_counter += 1
except Exception as e:
err = e
await ping()
match_rules = await stats.call_get_all_match_rules()
assert bus2.unique_name in match_rules
bus_match_rules = match_rules[bus2.unique_name]
# the bus connection itself takes a rule on NameOwnerChange after the high
# level client is initialized
assert len(bus_match_rules) == 1
assert len(bus2._user_message_handlers) == 0
interface.on_some_signal(single_handler)
interface.on_signal_multiple(multiple_handler)
# Interlude: adding a signal handler with `on_[signal]` should add a match rule and
# message handler. Removing a signal handler with `off_[signal]` should
# remove the match rule and message handler to avoid memory leaks.
await ping()
match_rules = await stats.call_get_all_match_rules()
assert bus2.unique_name in match_rules
bus_match_rules = match_rules[bus2.unique_name]
# test the match rule and user handler has been added
assert len(bus_match_rules) == 2
assert "type='signal',interface='test.interface',path='/test/path',sender='test.signals.name'" in bus_match_rules
assert len(bus2._user_message_handlers) == 1
service_interface.SomeSignal()
await ping()
assert err is None
assert single_counter == 1
service_interface.SignalMultiple()
await ping()
assert err is None
assert multiple_counter == 1
# special case: another bus with the same path and interface but on a
# different name and connection will trigger the match rule of the first
# (happens with mpris)
bus3 = await MessageBus().connect()
await bus3.request_name('test.signals.name2')
service_interface2 = ExampleInterface()
bus3.export('/test/path', service_interface2)
obj = bus2.get_proxy_object('test.signals.name2', '/test/path',
bus3._introspect_export_path('/test/path'))
# we have to add a dummy handler to add the match rule
iface2 = obj.get_interface(service_interface2.name)
def dummy_signal_handler(what):
pass
iface2.on_some_signal(dummy_signal_handler)
await ping()
service_interface2.SomeSignal()
await ping()
# single_counter is not incremented for signals of the second interface
assert single_counter == 1
interface.off_some_signal(single_handler)
interface.off_signal_multiple(multiple_handler)
iface2.off_some_signal(dummy_signal_handler)
# After `off_[signal]`, the match rule and user handler should be removed
await ping()
match_rules = await stats.call_get_all_match_rules()
assert bus2.unique_name in match_rules
bus_match_rules = match_rules[bus2.unique_name]
assert len(bus_match_rules) == 1
assert "type='signal',interface='test.interface',path='/test/path',sender='test.signals.name'" not in bus_match_rules
assert len(bus2._user_message_handlers) == 0
bus1.disconnect()
bus2.disconnect()
bus3.disconnect()
@pytest.mark.asyncio
async def test_signals_with_changing_owners():
well_known_name = 'test.signals.changing.name'
bus1 = await MessageBus().connect()
bus2 = await MessageBus().connect()
bus3 = await MessageBus().connect()
async def ping():
await bus1.call(
Message(destination=bus1.unique_name,
interface='org.freedesktop.DBus.Peer',
path='/test/path',
member='Ping'))
service_interface = ExampleInterface()
introspection = Node.default()
introspection.interfaces.append(service_interface.introspect())
# get the interface before export
obj = bus1.get_proxy_object(well_known_name, '/test/path', introspection)
iface = obj.get_interface('test.interface')
counter = 0
def handler(what):
nonlocal counter
counter += 1
iface.on_some_signal(handler)
await ping()
# now export and get the name
bus2.export('/test/path', service_interface)
result = await bus2.request_name(well_known_name)
assert result is RequestNameReply.PRIMARY_OWNER
# the signal should work
service_interface.SomeSignal()
await ping()
assert counter == 1
counter = 0
# now queue up a transfer of the name
service_interface2 = ExampleInterface()
bus3.export('/test/path', service_interface2)
result = await bus3.request_name(well_known_name)
assert result is RequestNameReply.IN_QUEUE
# if it doesn't own the name, the signal shouldn't work here
service_interface2.SomeSignal()
await ping()
assert counter == 0
# now transfer over the name and it should work
bus2.disconnect()
await ping()
service_interface2.SomeSignal()
await ping()
assert counter == 1
counter = 0
bus1.disconnect()
bus2.disconnect()
bus3.disconnect()
|
{"/test/service/test_methods.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/glib/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/constants.py", "/dbus_next/__init__.py"], "/test/service/test_properties.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/introspection.py", "/dbus_next/constants.py"], "/dbus_next/auth.py": ["/dbus_next/errors.py"], "/dbus_next/glib/message_bus.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/constants.py", "/dbus_next/message.py", "/dbus_next/message_bus.py", "/dbus_next/errors.py", "/dbus_next/glib/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/dbus_next/_private/unmarshaller.py": ["/dbus_next/message.py", "/dbus_next/_private/constants.py", "/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/errors.py"], "/examples/aio-tcp-notification.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/introspection.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/validators.py", "/dbus_next/errors.py"], "/dbus_next/aio/__init__.py": ["/dbus_next/aio/message_bus.py", "/dbus_next/aio/proxy_object.py"], "/test/test_glib_low_level.py": ["/dbus_next/__init__.py", "/test/util.py"], "/dbus_next/proxy_object.py": ["/dbus_next/validators.py", "/dbus_next/__init__.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/dbus_next/service.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/__init__.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/test/service/test_export.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_fd_passing.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_methods.py": ["/dbus_next/message.py", "/dbus_next/service.py", "/dbus_next/introspection.py", "/dbus_next/__init__.py", "/test/util.py"], "/test/service/test_standard_interfaces.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py"], "/test/test_validators.py": ["/dbus_next/__init__.py"], "/test/test_disconnect.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_request_name.py": ["/dbus_next/__init__.py", "/test/util.py"], "/examples/mpris.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/signature.py": ["/dbus_next/validators.py", "/dbus_next/errors.py"], "/test/service/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py", "/dbus_next/signature.py"], "/dbus_next/__init__.py": ["/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/validators.py"], "/dbus_next/validators.py": ["/dbus_next/errors.py"], "/test/test_introspection.py": ["/dbus_next/__init__.py"], "/examples/example-service.py": ["/dbus_next/service.py", "/dbus_next/aio/message_bus.py", "/dbus_next/__init__.py"], "/dbus_next/aio/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/errors.py", "/dbus_next/constants.py", "/dbus_next/_private/util.py", "/dbus_next/__init__.py"], "/test/test_aio_low_level.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/_private/util.py": ["/dbus_next/signature.py"], "/examples/aio-list-names.py": ["/dbus_next/__init__.py", "/dbus_next/aio/__init__.py"], "/test/service/test_decorators.py": ["/dbus_next/__init__.py", "/dbus_next/service.py"], "/examples/dbus-next-send.py": ["/dbus_next/validators.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/errors.py": ["/dbus_next/message.py", "/dbus_next/validators.py", "/dbus_next/constants.py"], "/dbus_next/message.py": ["/dbus_next/_private/marshaller.py", "/dbus_next/constants.py", "/dbus_next/_private/constants.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py"], "/test/test_marshaller.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/__init__.py"], "/examples/glib-list-names.py": ["/dbus_next/__init__.py"], "/dbus_next/message_bus.py": ["/dbus_next/_private/address.py", "/dbus_next/_private/util.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/proxy_object.py", "/dbus_next/__init__.py"], "/test/test_big_message.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/aio/message_bus.py": ["/dbus_next/message_bus.py", "/dbus_next/_private/unmarshaller.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/errors.py", "/dbus_next/aio/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/test/client/test_properties.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/_private/marshaller.py": ["/dbus_next/signature.py"], "/test/test_address_parser.py": ["/dbus_next/_private/address.py"], "/test/test_tcp_address.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/_private/address.py"], "/bench/unmarshall.py": ["/dbus_next/_private/unmarshaller.py"], "/test/test_signature.py": ["/dbus_next/__init__.py", "/dbus_next/_private/util.py"], "/dbus_next/_private/address.py": ["/dbus_next/constants.py", "/dbus_next/errors.py"]}
|
8,994
|
altdesktop/python-dbus-next
|
refs/heads/master
|
/dbus_next/auth.py
|
from .errors import AuthError
import enum
import os
# The auth interface here is unstable. I would like to eventually open this up
# for people to define their own custom authentication protocols, but I'm not
# familiar with what's needed for that exactly. To work with any message bus
# implementation would require abstracting out all the IO. Async operations
# might be challenging because different IO backends have different ways of
# doing that. I might just end up giving the raw socket and leaving it all up
# to the user, but it would be nice to have a little guidance in the interface
# since a lot of it is strongly specified. If you have a need for this, contact
# the project maintainer to help stabalize this interface.
class _AuthResponse(enum.Enum):
OK = 'OK'
REJECTED = 'REJECTED'
DATA = 'DATA'
ERROR = 'ERROR'
AGREE_UNIX_FD = 'AGREE_UNIX_FD'
@classmethod
def parse(klass, line):
args = line.split(' ')
response = klass(args[0])
return response, args[1:]
# UNSTABLE
class Authenticator:
"""The base class for authenticators for :class:`MessageBus <dbus_next.message_bus.BaseMessageBus>` authentication.
In the future, the library may allow extending this class for custom authentication protocols.
:seealso: https://dbus.freedesktop.org/doc/dbus-specification.html#auth-protocol
"""
def _authentication_start(self, negotiate_unix_fd=False):
raise NotImplementedError(
'authentication_start() must be implemented in the inheriting class')
def _receive_line(self, line):
raise NotImplementedError('receive_line() must be implemented in the inheriting class')
@staticmethod
def _format_line(line):
return f'{line}\r\n'.encode()
class AuthExternal(Authenticator):
"""An authenticator class for the external auth protocol for use with the
:class:`MessageBus <dbus_next.message_bus.BaseMessageBus>`.
:sealso: https://dbus.freedesktop.org/doc/dbus-specification.html#auth-protocol
"""
def __init__(self):
self.negotiate_unix_fd = False
self.negotiating_fds = False
def _authentication_start(self, negotiate_unix_fd=False) -> str:
self.negotiate_unix_fd = negotiate_unix_fd
hex_uid = str(os.getuid()).encode().hex()
return f'AUTH EXTERNAL {hex_uid}'
def _receive_line(self, line: str):
response, args = _AuthResponse.parse(line)
if response is _AuthResponse.OK:
if self.negotiate_unix_fd:
self.negotiating_fds = True
return "NEGOTIATE_UNIX_FD"
else:
return "BEGIN"
if response is _AuthResponse.AGREE_UNIX_FD:
return "BEGIN"
raise AuthError(f'authentication failed: {response.value}: {args}')
class AuthAnnonymous(Authenticator):
"""An authenticator class for the annonymous auth protocol for use with the
:class:`MessageBus <dbus_next.message_bus.BaseMessageBus>`.
:sealso: https://dbus.freedesktop.org/doc/dbus-specification.html#auth-protocol
"""
def _authentication_start(self, negotiate_unix_fd=False) -> str:
if negotiate_unix_fd:
raise AuthError(
'annonymous authentication does not support negotiating unix fds right now')
return 'AUTH ANONYMOUS'
def _receive_line(self, line: str) -> str:
response, args = _AuthResponse.parse(line)
if response != _AuthResponse.OK:
raise AuthError(f'authentication failed: {response.value}: {args}')
return 'BEGIN'
|
{"/test/service/test_methods.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/glib/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/constants.py", "/dbus_next/__init__.py"], "/test/service/test_properties.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/introspection.py", "/dbus_next/constants.py"], "/dbus_next/auth.py": ["/dbus_next/errors.py"], "/dbus_next/glib/message_bus.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/constants.py", "/dbus_next/message.py", "/dbus_next/message_bus.py", "/dbus_next/errors.py", "/dbus_next/glib/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/dbus_next/_private/unmarshaller.py": ["/dbus_next/message.py", "/dbus_next/_private/constants.py", "/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/errors.py"], "/examples/aio-tcp-notification.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/introspection.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/validators.py", "/dbus_next/errors.py"], "/dbus_next/aio/__init__.py": ["/dbus_next/aio/message_bus.py", "/dbus_next/aio/proxy_object.py"], "/test/test_glib_low_level.py": ["/dbus_next/__init__.py", "/test/util.py"], "/dbus_next/proxy_object.py": ["/dbus_next/validators.py", "/dbus_next/__init__.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/dbus_next/service.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/__init__.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/test/service/test_export.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_fd_passing.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_methods.py": ["/dbus_next/message.py", "/dbus_next/service.py", "/dbus_next/introspection.py", "/dbus_next/__init__.py", "/test/util.py"], "/test/service/test_standard_interfaces.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py"], "/test/test_validators.py": ["/dbus_next/__init__.py"], "/test/test_disconnect.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_request_name.py": ["/dbus_next/__init__.py", "/test/util.py"], "/examples/mpris.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/signature.py": ["/dbus_next/validators.py", "/dbus_next/errors.py"], "/test/service/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py", "/dbus_next/signature.py"], "/dbus_next/__init__.py": ["/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/validators.py"], "/dbus_next/validators.py": ["/dbus_next/errors.py"], "/test/test_introspection.py": ["/dbus_next/__init__.py"], "/examples/example-service.py": ["/dbus_next/service.py", "/dbus_next/aio/message_bus.py", "/dbus_next/__init__.py"], "/dbus_next/aio/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/errors.py", "/dbus_next/constants.py", "/dbus_next/_private/util.py", "/dbus_next/__init__.py"], "/test/test_aio_low_level.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/_private/util.py": ["/dbus_next/signature.py"], "/examples/aio-list-names.py": ["/dbus_next/__init__.py", "/dbus_next/aio/__init__.py"], "/test/service/test_decorators.py": ["/dbus_next/__init__.py", "/dbus_next/service.py"], "/examples/dbus-next-send.py": ["/dbus_next/validators.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/errors.py": ["/dbus_next/message.py", "/dbus_next/validators.py", "/dbus_next/constants.py"], "/dbus_next/message.py": ["/dbus_next/_private/marshaller.py", "/dbus_next/constants.py", "/dbus_next/_private/constants.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py"], "/test/test_marshaller.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/__init__.py"], "/examples/glib-list-names.py": ["/dbus_next/__init__.py"], "/dbus_next/message_bus.py": ["/dbus_next/_private/address.py", "/dbus_next/_private/util.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/proxy_object.py", "/dbus_next/__init__.py"], "/test/test_big_message.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/aio/message_bus.py": ["/dbus_next/message_bus.py", "/dbus_next/_private/unmarshaller.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/errors.py", "/dbus_next/aio/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/test/client/test_properties.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/_private/marshaller.py": ["/dbus_next/signature.py"], "/test/test_address_parser.py": ["/dbus_next/_private/address.py"], "/test/test_tcp_address.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/_private/address.py"], "/bench/unmarshall.py": ["/dbus_next/_private/unmarshaller.py"], "/test/test_signature.py": ["/dbus_next/__init__.py", "/dbus_next/_private/util.py"], "/dbus_next/_private/address.py": ["/dbus_next/constants.py", "/dbus_next/errors.py"]}
|
8,995
|
altdesktop/python-dbus-next
|
refs/heads/master
|
/dbus_next/glib/message_bus.py
|
from .._private.unmarshaller import Unmarshaller
from ..constants import BusType
from ..message import Message
from ..constants import MessageType, MessageFlag, NameFlag, RequestNameReply, ReleaseNameReply
from ..message_bus import BaseMessageBus
from ..errors import AuthError
from .proxy_object import ProxyObject
from .. import introspection as intr
from ..auth import Authenticator, AuthExternal
import io
from typing import Callable, Optional
# glib is optional
_import_error = None
try:
from gi.repository import GLib
_GLibSource = GLib.Source
except ImportError as e:
_import_error = e
class _GLibSource:
pass
class _MessageSource(_GLibSource):
def __init__(self, bus):
self.unmarshaller = None
self.bus = bus
def prepare(self):
return (False, -1)
def check(self):
return False
def dispatch(self, callback, user_data):
try:
while self.bus._stream.readable():
if not self.unmarshaller:
self.unmarshaller = Unmarshaller(self.bus._stream)
if self.unmarshaller.unmarshall():
callback(self.unmarshaller.message)
self.unmarshaller = None
else:
break
except Exception as e:
self.bus.disconnect()
self.bus._finalize(e)
return GLib.SOURCE_REMOVE
return GLib.SOURCE_CONTINUE
class _MessageWritableSource(_GLibSource):
def __init__(self, bus):
self.bus = bus
self.buf = b''
self.message_stream = None
self.chunk_size = 128
def prepare(self):
return (False, -1)
def check(self):
return False
def dispatch(self, callback, user_data):
try:
if self.buf:
self.bus._stream.write(self.buf)
self.buf = b''
if self.message_stream:
while True:
self.buf = self.message_stream.read(self.chunk_size)
if self.buf == b'':
break
self.bus._stream.write(self.buf)
if len(self.buf) < self.chunk_size:
self.buf = b''
break
self.buf = b''
self.bus._stream.flush()
if not self.bus._buffered_messages:
return GLib.SOURCE_REMOVE
else:
message = self.bus._buffered_messages.pop(0)
self.message_stream = io.BytesIO(message._marshall())
return GLib.SOURCE_CONTINUE
except BlockingIOError:
return GLib.SOURCE_CONTINUE
except Exception as e:
self.bus._finalize(e)
return GLib.SOURCE_REMOVE
class _AuthLineSource(_GLibSource):
def __init__(self, stream):
self.stream = stream
self.buf = b''
def prepare(self):
return (False, -1)
def check(self):
return False
def dispatch(self, callback, user_data):
self.buf += self.stream.read()
if self.buf[-2:] == b'\r\n':
resp = callback(self.buf.decode()[:-2])
if resp:
return GLib.SOURCE_REMOVE
return GLib.SOURCE_CONTINUE
class MessageBus(BaseMessageBus):
"""The message bus implementation for use with the GLib main loop.
The message bus class is the entry point into all the features of the
library. It sets up a connection to the DBus daemon and exposes an
interface to send and receive messages and expose services.
You must call :func:`connect() <dbus_next.glib.MessageBus.connect>` or
:func:`connect_sync() <dbus_next.glib.MessageBus.connect_sync>` before
using this message bus.
:param bus_type: The type of bus to connect to. Affects the search path for
the bus address.
:type bus_type: :class:`BusType <dbus_next.BusType>`
:param bus_address: A specific bus address to connect to. Should not be
used under normal circumstances.
:param auth: The authenticator to use, defaults to an instance of
:class:`AuthExternal <dbus_next.auth.AuthExternal>`.
:type auth: :class:`Authenticator <dbus_next.auth.Authenticator>`
:ivar connected: True if this message bus is expected to be able to send
and receive messages.
:vartype connected: bool
:ivar unique_name: The unique name of the message bus connection. It will
be :class:`None` until the message bus connects.
:vartype unique_name: str
"""
def __init__(self,
bus_address: str = None,
bus_type: BusType = BusType.SESSION,
auth: Authenticator = None):
if _import_error:
raise _import_error
super().__init__(bus_address, bus_type, ProxyObject)
self._main_context = GLib.main_context_default()
# buffer messages until connect
self._buffered_messages = []
if auth is None:
self._auth = AuthExternal()
else:
self._auth = auth
def connect(self, connect_notify: Callable[['MessageBus', Optional[Exception]], None] = None):
"""Connect this message bus to the DBus daemon.
This method or the synchronous version must be called before the
message bus can be used.
:param connect_notify: A callback that will be called with this message
bus. May return an :class:`Exception` on connection errors or
:class:`AuthError <dbus_next.AuthError>` on authorization errors.
:type callback: :class:`Callable`
"""
def authenticate_notify(exc):
if exc is not None:
if connect_notify is not None:
connect_notify(None, exc)
return
self.message_source = _MessageSource(self)
self.message_source.set_callback(self._on_message)
self.message_source.attach(self._main_context)
self.writable_source = None
self.message_source.add_unix_fd(self._fd, GLib.IO_IN)
def on_hello(reply, err):
if err:
if connect_notify:
connect_notify(reply, err)
return
self.unique_name = reply.body[0]
for m in self._buffered_messages:
self.send(m)
if connect_notify:
connect_notify(self, err)
hello_msg = Message(destination='org.freedesktop.DBus',
path='/org/freedesktop/DBus',
interface='org.freedesktop.DBus',
member='Hello',
serial=self.next_serial())
self._method_return_handlers[hello_msg.serial] = on_hello
self._stream.write(hello_msg._marshall())
self._stream.flush()
self._authenticate(authenticate_notify)
def connect_sync(self) -> 'MessageBus':
"""Connect this message bus to the DBus daemon.
This method or the asynchronous version must be called before the
message bus can be used.
:returns: This message bus for convenience.
:rtype: :class:`MessageBus <dbus_next.glib.MessageBus>`
:raises:
- :class:`AuthError <dbus_next.AuthError>` - If authorization to \
the DBus daemon failed.
- :class:`Exception` - If there was a connection error.
"""
main = GLib.MainLoop()
connection_error = None
def connect_notify(bus, err):
nonlocal connection_error
connection_error = err
main.quit()
self.connect(connect_notify)
main.run()
if connection_error:
raise connection_error
return self
def call(self,
msg: Message,
reply_notify: Callable[[Optional[Message], Optional[Exception]], None] = None):
"""Send a method call and asynchronously wait for a reply from the DBus
daemon.
:param msg: The method call message to send.
:type msg: :class:`Message <dbus_next.Message>`
:param reply_notify: A callback that will be called with the reply to
this message. May return an :class:`Exception` on connection errors.
:type reply_notify: Callable
"""
self._call(msg, reply_notify)
def call_sync(self, msg: Message) -> Optional[Message]:
"""Send a method call and synchronously wait for a reply from the DBus
daemon.
:param msg: The method call message to send.
:type msg: :class:`Message <dbus_next.Message>`
:returns: A message in reply to the message sent. If the message does
not expect a reply based on the message flags or type, returns
``None`` immediately.
:rtype: :class:`Message <dbus_next.Message>`
:raises:
- :class:`DBusError <dbus_next.DBusError>` - If the service threw \
an error for the method call or returned an invalid result.
- :class:`Exception` - If a connection error occurred.
"""
if msg.flags & MessageFlag.NO_REPLY_EXPECTED or msg.message_type is not MessageType.METHOD_CALL:
self.send(msg)
return None
if not msg.serial:
msg.serial = self.next_serial()
main = GLib.MainLoop()
handler_reply = None
connection_error = None
def reply_handler(reply, err):
nonlocal handler_reply
nonlocal connection_error
handler_reply = reply
connection_error = err
main.quit()
self._method_return_handlers[msg.serial] = reply_handler
self.send(msg)
main.run()
if connection_error:
raise connection_error
return handler_reply
def introspect_sync(self, bus_name: str, path: str) -> intr.Node:
"""Get introspection data for the node at the given path from the given
bus name.
Calls the standard ``org.freedesktop.DBus.Introspectable.Introspect``
on the bus for the path.
:param bus_name: The name to introspect.
:type bus_name: str
:param path: The path to introspect.
:type path: str
:returns: The introspection data for the name at the path.
:rtype: :class:`Node <dbus_next.introspection.Node>`
:raises:
- :class:`InvalidObjectPathError <dbus_next.InvalidObjectPathError>` \
- If the given object path is not valid.
- :class:`InvalidBusNameError <dbus_next.InvalidBusNameError>` - If \
the given bus name is not valid.
- :class:`DBusError <dbus_next.DBusError>` - If the service threw \
an error for the method call or returned an invalid result.
- :class:`Exception` - If a connection error occurred.
"""
main = GLib.MainLoop()
request_result = None
request_error = None
def reply_notify(result, err):
nonlocal request_result
nonlocal request_error
request_result = result
request_error = err
main.quit()
super().introspect(bus_name, path, reply_notify)
main.run()
if request_error:
raise request_error
return request_result
def request_name_sync(self, name: str, flags: NameFlag = NameFlag.NONE) -> RequestNameReply:
"""Request that this message bus owns the given name.
:param name: The name to request.
:type name: str
:param flags: Name flags that affect the behavior of the name request.
:type flags: :class:`NameFlag <dbus_next.NameFlag>`
:returns: The reply to the name request.
:rtype: :class:`RequestNameReply <dbus_next.RequestNameReply>`
:raises:
- :class:`InvalidBusNameError <dbus_next.InvalidBusNameError>` - If \
the given bus name is not valid.
- :class:`DBusError <dbus_next.DBusError>` - If the service threw \
an error for the method call or returned an invalid result.
- :class:`Exception` - If a connection error occurred.
"""
main = GLib.MainLoop()
request_result = None
request_error = None
def reply_notify(result, err):
nonlocal request_result
nonlocal request_error
request_result = result
request_error = err
main.quit()
super().request_name(name, flags, reply_notify)
main.run()
if request_error:
raise request_error
return request_result
def release_name_sync(self, name: str) -> ReleaseNameReply:
"""Request that this message bus release the given name.
:param name: The name to release.
:type name: str
:returns: The reply to the release request.
:rtype: :class:`ReleaseNameReply <dbus_next.ReleaseNameReply>`
:raises:
- :class:`InvalidBusNameError <dbus_next.InvalidBusNameError>` - If \
the given bus name is not valid.
- :class:`DBusError <dbus_next.DBusError>` - If the service threw \
an error for the method call or returned an invalid result.
- :class:`Exception` - If a connection error occurred.
"""
main = GLib.MainLoop()
release_result = None
release_error = None
def reply_notify(result, err):
nonlocal release_result
nonlocal release_error
release_result = result
release_error = err
main.quit()
super().release_name(name, reply_notify)
main.run()
if release_error:
raise release_error
return release_result
def send(self, msg: Message):
if not msg.serial:
msg.serial = self.next_serial()
self._buffered_messages.append(msg)
if self.unique_name:
self._schedule_write()
def get_proxy_object(self, bus_name: str, path: str, introspection: intr.Node) -> ProxyObject:
return super().get_proxy_object(bus_name, path, introspection)
def _schedule_write(self):
if self.writable_source is None or self.writable_source.is_destroyed():
self.writable_source = _MessageWritableSource(self)
self.writable_source.attach(self._main_context)
self.writable_source.add_unix_fd(self._fd, GLib.IO_OUT)
def _authenticate(self, authenticate_notify):
self._stream.write(b'\0')
first_line = self._auth._authentication_start()
if first_line is not None:
if type(first_line) is not str:
raise AuthError('authenticator gave response not type str')
self._stream.write(f'{first_line}\r\n'.encode())
self._stream.flush()
def line_notify(line):
try:
resp = self._auth._receive_line(line)
self._stream.write(Authenticator._format_line(resp))
self._stream.flush()
if resp == 'BEGIN':
self._readline_source.destroy()
authenticate_notify(None)
return True
except Exception as e:
authenticate_notify(e)
return True
readline_source = _AuthLineSource(self._stream)
readline_source.set_callback(line_notify)
readline_source.add_unix_fd(self._fd, GLib.IO_IN)
readline_source.attach(self._main_context)
# make sure it doesnt get cleaned up
self._readline_source = readline_source
|
{"/test/service/test_methods.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/glib/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/constants.py", "/dbus_next/__init__.py"], "/test/service/test_properties.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/introspection.py", "/dbus_next/constants.py"], "/dbus_next/auth.py": ["/dbus_next/errors.py"], "/dbus_next/glib/message_bus.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/constants.py", "/dbus_next/message.py", "/dbus_next/message_bus.py", "/dbus_next/errors.py", "/dbus_next/glib/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/dbus_next/_private/unmarshaller.py": ["/dbus_next/message.py", "/dbus_next/_private/constants.py", "/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/errors.py"], "/examples/aio-tcp-notification.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/introspection.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/validators.py", "/dbus_next/errors.py"], "/dbus_next/aio/__init__.py": ["/dbus_next/aio/message_bus.py", "/dbus_next/aio/proxy_object.py"], "/test/test_glib_low_level.py": ["/dbus_next/__init__.py", "/test/util.py"], "/dbus_next/proxy_object.py": ["/dbus_next/validators.py", "/dbus_next/__init__.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/dbus_next/service.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/__init__.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/test/service/test_export.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_fd_passing.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_methods.py": ["/dbus_next/message.py", "/dbus_next/service.py", "/dbus_next/introspection.py", "/dbus_next/__init__.py", "/test/util.py"], "/test/service/test_standard_interfaces.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py"], "/test/test_validators.py": ["/dbus_next/__init__.py"], "/test/test_disconnect.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_request_name.py": ["/dbus_next/__init__.py", "/test/util.py"], "/examples/mpris.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/signature.py": ["/dbus_next/validators.py", "/dbus_next/errors.py"], "/test/service/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py", "/dbus_next/signature.py"], "/dbus_next/__init__.py": ["/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/validators.py"], "/dbus_next/validators.py": ["/dbus_next/errors.py"], "/test/test_introspection.py": ["/dbus_next/__init__.py"], "/examples/example-service.py": ["/dbus_next/service.py", "/dbus_next/aio/message_bus.py", "/dbus_next/__init__.py"], "/dbus_next/aio/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/errors.py", "/dbus_next/constants.py", "/dbus_next/_private/util.py", "/dbus_next/__init__.py"], "/test/test_aio_low_level.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/_private/util.py": ["/dbus_next/signature.py"], "/examples/aio-list-names.py": ["/dbus_next/__init__.py", "/dbus_next/aio/__init__.py"], "/test/service/test_decorators.py": ["/dbus_next/__init__.py", "/dbus_next/service.py"], "/examples/dbus-next-send.py": ["/dbus_next/validators.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/errors.py": ["/dbus_next/message.py", "/dbus_next/validators.py", "/dbus_next/constants.py"], "/dbus_next/message.py": ["/dbus_next/_private/marshaller.py", "/dbus_next/constants.py", "/dbus_next/_private/constants.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py"], "/test/test_marshaller.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/__init__.py"], "/examples/glib-list-names.py": ["/dbus_next/__init__.py"], "/dbus_next/message_bus.py": ["/dbus_next/_private/address.py", "/dbus_next/_private/util.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/proxy_object.py", "/dbus_next/__init__.py"], "/test/test_big_message.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/aio/message_bus.py": ["/dbus_next/message_bus.py", "/dbus_next/_private/unmarshaller.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/errors.py", "/dbus_next/aio/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/test/client/test_properties.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/_private/marshaller.py": ["/dbus_next/signature.py"], "/test/test_address_parser.py": ["/dbus_next/_private/address.py"], "/test/test_tcp_address.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/_private/address.py"], "/bench/unmarshall.py": ["/dbus_next/_private/unmarshaller.py"], "/test/test_signature.py": ["/dbus_next/__init__.py", "/dbus_next/_private/util.py"], "/dbus_next/_private/address.py": ["/dbus_next/constants.py", "/dbus_next/errors.py"]}
|
8,996
|
altdesktop/python-dbus-next
|
refs/heads/master
|
/dbus_next/_private/unmarshaller.py
|
from typing import Any, Callable, Dict, List, Optional, Tuple
from ..message import Message
from .constants import (
HeaderField,
LITTLE_ENDIAN,
BIG_ENDIAN,
PROTOCOL_VERSION,
HEADER_NAME_MAP,
)
from ..constants import MessageType, MessageFlag, MESSAGE_FLAG_MAP, MESSAGE_TYPE_MAP
from ..signature import SignatureTree, SignatureType, Variant
from ..errors import InvalidMessageError
import array
import io
import socket
import sys
from struct import Struct
MAX_UNIX_FDS = 16
UNPACK_SYMBOL = {LITTLE_ENDIAN: "<", BIG_ENDIAN: ">"}
UNPACK_LENGTHS = {BIG_ENDIAN: Struct(">III"), LITTLE_ENDIAN: Struct("<III")}
IS_BIG_ENDIAN = sys.byteorder == "big"
IS_LITTLE_ENDIAN = sys.byteorder == "little"
DBUS_TO_CTYPE = {
"y": ("B", 1), # byte
"n": ("h", 2), # int16
"q": ("H", 2), # uint16
"i": ("i", 4), # int32
"u": ("I", 4), # uint32
"x": ("q", 8), # int64
"t": ("Q", 8), # uint64
"d": ("d", 8), # double
"h": ("I", 4), # uint32
}
HEADER_SIGNATURE_SIZE = 16
HEADER_ARRAY_OF_STRUCT_SIGNATURE_POSITION = 12
UINT32_SIGNATURE = SignatureTree._get("u").types[0]
HEADER_DESTINATION = HeaderField.DESTINATION.name
HEADER_PATH = HeaderField.PATH.name
HEADER_INTERFACE = HeaderField.INTERFACE.name
HEADER_MEMBER = HeaderField.MEMBER.name
HEADER_ERROR_NAME = HeaderField.ERROR_NAME.name
HEADER_REPLY_SERIAL = HeaderField.REPLY_SERIAL.name
HEADER_SENDER = HeaderField.SENDER.name
READER_TYPE = Dict[str, Tuple[Optional[Callable[["Unmarshaller", SignatureType], Any]],
Optional[str], Optional[int], Optional[Struct], ], ]
class MarshallerStreamEndError(Exception):
"""This exception is raised when the end of the stream is reached.
This means more data is expected on the wire that has not yet been
received. The caller should call unmarshall later when more data is
available.
"""
pass
#
# Alignment padding is handled with the following formula below
#
# For any align value, the correct padding formula is:
#
# (align - (offset % align)) % align
#
# However, if align is a power of 2 (always the case here), the slow MOD
# operator can be replaced by a bitwise AND:
#
# (align - (offset & (align - 1))) & (align - 1)
#
# Which can be simplified to:
#
# (-offset) & (align - 1)
#
#
class Unmarshaller:
buf: bytearray
view: memoryview
message: Message
unpack: Dict[str, Struct]
readers: READER_TYPE
def __init__(self, stream: io.BufferedRWPair, sock=None):
self.unix_fds: List[int] = []
self.can_cast = False
self.buf = bytearray() # Actual buffer
self.view = None # Memory view of the buffer
self.offset = 0
self.stream = stream
self.sock = sock
self.message = None
self.readers = None
self.body_len: int | None = None
self.serial: int | None = None
self.header_len: int | None = None
self.message_type: MessageType | None = None
self.flag: MessageFlag | None = None
def read_sock(self, length: int) -> bytes:
"""reads from the socket, storing any fds sent and handling errors
from the read itself"""
unix_fd_list = array.array("i")
try:
msg, ancdata, *_ = self.sock.recvmsg(
length, socket.CMSG_LEN(MAX_UNIX_FDS * unix_fd_list.itemsize))
except BlockingIOError:
raise MarshallerStreamEndError()
for level, type_, data in ancdata:
if not (level == socket.SOL_SOCKET and type_ == socket.SCM_RIGHTS):
continue
unix_fd_list.frombytes(data[:len(data) - (len(data) % unix_fd_list.itemsize)])
self.unix_fds.extend(list(unix_fd_list))
return msg
def read_to_offset(self, offset: int) -> None:
"""
Read from underlying socket into buffer.
Raises MarshallerStreamEndError if there is not enough data to be read.
:arg offset:
The offset to read to. If not enough bytes are available in the
buffer, read more from it.
:returns:
None
"""
start_len = len(self.buf)
missing_bytes = offset - (start_len - self.offset)
if self.sock is None:
data = self.stream.read(missing_bytes)
else:
data = self.read_sock(missing_bytes)
if data == b"":
raise EOFError()
if data is None:
raise MarshallerStreamEndError()
self.buf.extend(data)
if len(data) + start_len != offset:
raise MarshallerStreamEndError()
def read_boolean(self, _=None):
return bool(self.read_argument(UINT32_SIGNATURE))
def read_string(self, _=None):
str_length = self.read_argument(UINT32_SIGNATURE)
str_start = self.offset
# read terminating '\0' byte as well (str_length + 1)
self.offset += str_length + 1
return self.buf[str_start:str_start + str_length].decode()
def read_signature(self, _=None):
signature_len = self.view[self.offset] # byte
o = self.offset + 1
# read terminating '\0' byte as well (str_length + 1)
self.offset = o + signature_len + 1
return self.buf[o:o + signature_len].decode()
def read_variant(self, _=None):
tree = SignatureTree._get(self.read_signature())
# verify in Variant is only useful on construction not unmarshalling
return Variant(tree, self.read_argument(tree.types[0]), verify=False)
def read_struct(self, type_: SignatureType):
self.offset += -self.offset & 7 # align 8
return [self.read_argument(child_type) for child_type in type_.children]
def read_dict_entry(self, type_: SignatureType):
self.offset += -self.offset & 7 # align 8
return self.read_argument(type_.children[0]), self.read_argument(type_.children[1])
def read_array(self, type_: SignatureType):
self.offset += -self.offset & 3 # align 4 for the array
array_length = self.read_argument(UINT32_SIGNATURE)
child_type = type_.children[0]
if child_type.token in "xtd{(":
# the first alignment is not included in the array size
self.offset += -self.offset & 7 # align 8
if child_type.token == "y":
self.offset += array_length
return self.buf[self.offset - array_length:self.offset]
beginning_offset = self.offset
if child_type.token == "{":
result_dict = {}
while self.offset - beginning_offset < array_length:
key, value = self.read_dict_entry(child_type)
result_dict[key] = value
return result_dict
result_list = []
while self.offset - beginning_offset < array_length:
result_list.append(self.read_argument(child_type))
return result_list
def read_argument(self, type_: SignatureType) -> Any:
"""Dispatch to an argument reader or cast/unpack a C type."""
token = type_.token
reader, ctype, size, struct = self.readers[token]
if reader: # complex type
return reader(self, type_)
self.offset += size + (-self.offset & (size - 1)) # align
if self.can_cast:
return self.view[self.offset - size:self.offset].cast(ctype)[0]
return struct.unpack_from(self.view, self.offset - size)[0]
def header_fields(self, header_length):
"""Header fields are always a(yv)."""
beginning_offset = self.offset
headers = {}
while self.offset - beginning_offset < header_length:
# Now read the y (byte) of struct (yv)
self.offset += (-self.offset & 7) + 1 # align 8 + 1 for 'y' byte
field_0 = self.view[self.offset - 1]
# Now read the v (variant) of struct (yv)
signature_len = self.view[self.offset] # byte
o = self.offset + 1
self.offset += signature_len + 2 # one for the byte, one for the '\0'
tree = SignatureTree._get(self.buf[o:o + signature_len].decode())
headers[HEADER_NAME_MAP[field_0]] = self.read_argument(tree.types[0])
return headers
def _read_header(self):
"""Read the header of the message."""
# Signature is of the header is
# BYTE, BYTE, BYTE, BYTE, UINT32, UINT32, ARRAY of STRUCT of (BYTE,VARIANT)
self.read_to_offset(HEADER_SIGNATURE_SIZE)
buffer = self.buf
endian = buffer[0]
self.message_type = MESSAGE_TYPE_MAP[buffer[1]]
self.flag = MESSAGE_FLAG_MAP[buffer[2]]
protocol_version = buffer[3]
if endian != LITTLE_ENDIAN and endian != BIG_ENDIAN:
raise InvalidMessageError(
f"Expecting endianness as the first byte, got {endian} from {buffer}")
if protocol_version != PROTOCOL_VERSION:
raise InvalidMessageError(f"got unknown protocol version: {protocol_version}")
self.body_len, self.serial, self.header_len = UNPACK_LENGTHS[endian].unpack_from(buffer, 4)
self.msg_len = (self.header_len + (-self.header_len & 7) + self.body_len) # align 8
if IS_BIG_ENDIAN and endian == BIG_ENDIAN:
self.can_cast = True
elif IS_LITTLE_ENDIAN and endian == LITTLE_ENDIAN:
self.can_cast = True
self.readers = self._readers_by_type[endian]
def _read_body(self):
"""Read the body of the message."""
self.read_to_offset(HEADER_SIGNATURE_SIZE + self.msg_len)
self.view = memoryview(self.buf)
self.offset = HEADER_ARRAY_OF_STRUCT_SIGNATURE_POSITION
header_fields = self.header_fields(self.header_len)
self.offset += -self.offset & 7 # align 8
tree = SignatureTree._get(header_fields.get(HeaderField.SIGNATURE.name, ""))
self.message = Message(
destination=header_fields.get(HEADER_DESTINATION),
path=header_fields.get(HEADER_PATH),
interface=header_fields.get(HEADER_INTERFACE),
member=header_fields.get(HEADER_MEMBER),
message_type=self.message_type,
flags=self.flag,
error_name=header_fields.get(HEADER_ERROR_NAME),
reply_serial=header_fields.get(HEADER_REPLY_SERIAL),
sender=header_fields.get(HEADER_SENDER),
unix_fds=self.unix_fds,
signature=tree.signature,
body=[self.read_argument(t) for t in tree.types] if self.body_len else [],
serial=self.serial,
)
def unmarshall(self):
"""Unmarshall the message.
The underlying read function will raise MarshallerStreamEndError
if there are not enough bytes in the buffer. This allows unmarshall
to be resumed when more data comes in over the wire.
"""
try:
if not self.message_type:
self._read_header()
self._read_body()
except MarshallerStreamEndError:
return None
return self.message
_complex_parsers: Dict[str, Tuple[Callable[["Unmarshaller", SignatureType], Any], None, None,
None]] = {
"b": (read_boolean, None, None, None),
"o": (read_string, None, None, None),
"s": (read_string, None, None, None),
"g": (read_signature, None, None, None),
"a": (read_array, None, None, None),
"(": (read_struct, None, None, None),
"{": (read_dict_entry, None, None, None),
"v": (read_variant, None, None, None),
}
_ctype_by_endian: Dict[int, Dict[str, Tuple[None, str, int, Struct]]] = {
endian: {
dbus_type: (
None,
*ctype_size,
Struct(f"{UNPACK_SYMBOL[endian]}{ctype_size[0]}"),
)
for dbus_type, ctype_size in DBUS_TO_CTYPE.items()
}
for endian in (BIG_ENDIAN, LITTLE_ENDIAN)
}
_readers_by_type: Dict[int, READER_TYPE] = {
BIG_ENDIAN: {
**_ctype_by_endian[BIG_ENDIAN],
**_complex_parsers
},
LITTLE_ENDIAN: {
**_ctype_by_endian[LITTLE_ENDIAN],
**_complex_parsers
},
}
|
{"/test/service/test_methods.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/glib/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/constants.py", "/dbus_next/__init__.py"], "/test/service/test_properties.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/introspection.py", "/dbus_next/constants.py"], "/dbus_next/auth.py": ["/dbus_next/errors.py"], "/dbus_next/glib/message_bus.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/constants.py", "/dbus_next/message.py", "/dbus_next/message_bus.py", "/dbus_next/errors.py", "/dbus_next/glib/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/dbus_next/_private/unmarshaller.py": ["/dbus_next/message.py", "/dbus_next/_private/constants.py", "/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/errors.py"], "/examples/aio-tcp-notification.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/introspection.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/validators.py", "/dbus_next/errors.py"], "/dbus_next/aio/__init__.py": ["/dbus_next/aio/message_bus.py", "/dbus_next/aio/proxy_object.py"], "/test/test_glib_low_level.py": ["/dbus_next/__init__.py", "/test/util.py"], "/dbus_next/proxy_object.py": ["/dbus_next/validators.py", "/dbus_next/__init__.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/dbus_next/service.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/__init__.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/test/service/test_export.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_fd_passing.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_methods.py": ["/dbus_next/message.py", "/dbus_next/service.py", "/dbus_next/introspection.py", "/dbus_next/__init__.py", "/test/util.py"], "/test/service/test_standard_interfaces.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py"], "/test/test_validators.py": ["/dbus_next/__init__.py"], "/test/test_disconnect.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_request_name.py": ["/dbus_next/__init__.py", "/test/util.py"], "/examples/mpris.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/signature.py": ["/dbus_next/validators.py", "/dbus_next/errors.py"], "/test/service/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py", "/dbus_next/signature.py"], "/dbus_next/__init__.py": ["/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/validators.py"], "/dbus_next/validators.py": ["/dbus_next/errors.py"], "/test/test_introspection.py": ["/dbus_next/__init__.py"], "/examples/example-service.py": ["/dbus_next/service.py", "/dbus_next/aio/message_bus.py", "/dbus_next/__init__.py"], "/dbus_next/aio/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/errors.py", "/dbus_next/constants.py", "/dbus_next/_private/util.py", "/dbus_next/__init__.py"], "/test/test_aio_low_level.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/_private/util.py": ["/dbus_next/signature.py"], "/examples/aio-list-names.py": ["/dbus_next/__init__.py", "/dbus_next/aio/__init__.py"], "/test/service/test_decorators.py": ["/dbus_next/__init__.py", "/dbus_next/service.py"], "/examples/dbus-next-send.py": ["/dbus_next/validators.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/errors.py": ["/dbus_next/message.py", "/dbus_next/validators.py", "/dbus_next/constants.py"], "/dbus_next/message.py": ["/dbus_next/_private/marshaller.py", "/dbus_next/constants.py", "/dbus_next/_private/constants.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py"], "/test/test_marshaller.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/__init__.py"], "/examples/glib-list-names.py": ["/dbus_next/__init__.py"], "/dbus_next/message_bus.py": ["/dbus_next/_private/address.py", "/dbus_next/_private/util.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/proxy_object.py", "/dbus_next/__init__.py"], "/test/test_big_message.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/aio/message_bus.py": ["/dbus_next/message_bus.py", "/dbus_next/_private/unmarshaller.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/errors.py", "/dbus_next/aio/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/test/client/test_properties.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/_private/marshaller.py": ["/dbus_next/signature.py"], "/test/test_address_parser.py": ["/dbus_next/_private/address.py"], "/test/test_tcp_address.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/_private/address.py"], "/bench/unmarshall.py": ["/dbus_next/_private/unmarshaller.py"], "/test/test_signature.py": ["/dbus_next/__init__.py", "/dbus_next/_private/util.py"], "/dbus_next/_private/address.py": ["/dbus_next/constants.py", "/dbus_next/errors.py"]}
|
8,997
|
altdesktop/python-dbus-next
|
refs/heads/master
|
/dbus_next/_private/constants.py
|
from enum import Enum
PROTOCOL_VERSION = 1
LITTLE_ENDIAN = ord('l')
BIG_ENDIAN = ord('B')
class HeaderField(Enum):
PATH = 1
INTERFACE = 2
MEMBER = 3
ERROR_NAME = 4
REPLY_SERIAL = 5
DESTINATION = 6
SENDER = 7
SIGNATURE = 8
UNIX_FDS = 9
HEADER_NAME_MAP = {field.value: field.name for field in HeaderField}
|
{"/test/service/test_methods.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/glib/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/constants.py", "/dbus_next/__init__.py"], "/test/service/test_properties.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/introspection.py", "/dbus_next/constants.py"], "/dbus_next/auth.py": ["/dbus_next/errors.py"], "/dbus_next/glib/message_bus.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/constants.py", "/dbus_next/message.py", "/dbus_next/message_bus.py", "/dbus_next/errors.py", "/dbus_next/glib/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/dbus_next/_private/unmarshaller.py": ["/dbus_next/message.py", "/dbus_next/_private/constants.py", "/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/errors.py"], "/examples/aio-tcp-notification.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/introspection.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/validators.py", "/dbus_next/errors.py"], "/dbus_next/aio/__init__.py": ["/dbus_next/aio/message_bus.py", "/dbus_next/aio/proxy_object.py"], "/test/test_glib_low_level.py": ["/dbus_next/__init__.py", "/test/util.py"], "/dbus_next/proxy_object.py": ["/dbus_next/validators.py", "/dbus_next/__init__.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/dbus_next/service.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/__init__.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/test/service/test_export.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_fd_passing.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_methods.py": ["/dbus_next/message.py", "/dbus_next/service.py", "/dbus_next/introspection.py", "/dbus_next/__init__.py", "/test/util.py"], "/test/service/test_standard_interfaces.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py"], "/test/test_validators.py": ["/dbus_next/__init__.py"], "/test/test_disconnect.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_request_name.py": ["/dbus_next/__init__.py", "/test/util.py"], "/examples/mpris.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/signature.py": ["/dbus_next/validators.py", "/dbus_next/errors.py"], "/test/service/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py", "/dbus_next/signature.py"], "/dbus_next/__init__.py": ["/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/validators.py"], "/dbus_next/validators.py": ["/dbus_next/errors.py"], "/test/test_introspection.py": ["/dbus_next/__init__.py"], "/examples/example-service.py": ["/dbus_next/service.py", "/dbus_next/aio/message_bus.py", "/dbus_next/__init__.py"], "/dbus_next/aio/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/errors.py", "/dbus_next/constants.py", "/dbus_next/_private/util.py", "/dbus_next/__init__.py"], "/test/test_aio_low_level.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/_private/util.py": ["/dbus_next/signature.py"], "/examples/aio-list-names.py": ["/dbus_next/__init__.py", "/dbus_next/aio/__init__.py"], "/test/service/test_decorators.py": ["/dbus_next/__init__.py", "/dbus_next/service.py"], "/examples/dbus-next-send.py": ["/dbus_next/validators.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/errors.py": ["/dbus_next/message.py", "/dbus_next/validators.py", "/dbus_next/constants.py"], "/dbus_next/message.py": ["/dbus_next/_private/marshaller.py", "/dbus_next/constants.py", "/dbus_next/_private/constants.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py"], "/test/test_marshaller.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/__init__.py"], "/examples/glib-list-names.py": ["/dbus_next/__init__.py"], "/dbus_next/message_bus.py": ["/dbus_next/_private/address.py", "/dbus_next/_private/util.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/proxy_object.py", "/dbus_next/__init__.py"], "/test/test_big_message.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/aio/message_bus.py": ["/dbus_next/message_bus.py", "/dbus_next/_private/unmarshaller.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/errors.py", "/dbus_next/aio/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/test/client/test_properties.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/_private/marshaller.py": ["/dbus_next/signature.py"], "/test/test_address_parser.py": ["/dbus_next/_private/address.py"], "/test/test_tcp_address.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/_private/address.py"], "/bench/unmarshall.py": ["/dbus_next/_private/unmarshaller.py"], "/test/test_signature.py": ["/dbus_next/__init__.py", "/dbus_next/_private/util.py"], "/dbus_next/_private/address.py": ["/dbus_next/constants.py", "/dbus_next/errors.py"]}
|
8,998
|
altdesktop/python-dbus-next
|
refs/heads/master
|
/examples/aio-tcp-notification.py
|
#!/usr/bin/env python3
# In order for this to work a local tcp connection to the DBus a port
# must be opened to forward to the dbus socket file. The easiest way
# to achieve this is using "socat":
# socat TCP-LISTEN:55556,reuseaddr,fork,range=127.0.0.1/32 UNIX-CONNECT:$(echo $DBUS_SESSION_BUS_ADDRESS | sed 's/unix:path=//g')
# For actual DBus transport over network the authentication might
# be a further problem. More information here:
# https://dbus.freedesktop.org/doc/dbus-specification.html#auth-mechanisms
import sys
import os
sys.path.append(os.path.abspath(os.path.dirname(__file__) + '/..'))
from dbus_next.aio import MessageBus
import asyncio
loop = asyncio.get_event_loop()
async def main():
bus = await MessageBus(bus_address="tcp:host=127.0.0.1,port=55556").connect()
introspection = await bus.introspect('org.freedesktop.Notifications',
'/org/freedesktop/Notifications')
obj = bus.get_proxy_object('org.freedesktop.Notifications', '/org/freedesktop/Notifications',
introspection)
notification = obj.get_interface('org.freedesktop.Notifications')
await notification.call_notify("test.py", 0, "", "DBus Test", "Test notification", [""], dict(),
5000)
loop.run_until_complete(main())
|
{"/test/service/test_methods.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/glib/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/constants.py", "/dbus_next/__init__.py"], "/test/service/test_properties.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/introspection.py", "/dbus_next/constants.py"], "/dbus_next/auth.py": ["/dbus_next/errors.py"], "/dbus_next/glib/message_bus.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/constants.py", "/dbus_next/message.py", "/dbus_next/message_bus.py", "/dbus_next/errors.py", "/dbus_next/glib/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/dbus_next/_private/unmarshaller.py": ["/dbus_next/message.py", "/dbus_next/_private/constants.py", "/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/errors.py"], "/examples/aio-tcp-notification.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/introspection.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/validators.py", "/dbus_next/errors.py"], "/dbus_next/aio/__init__.py": ["/dbus_next/aio/message_bus.py", "/dbus_next/aio/proxy_object.py"], "/test/test_glib_low_level.py": ["/dbus_next/__init__.py", "/test/util.py"], "/dbus_next/proxy_object.py": ["/dbus_next/validators.py", "/dbus_next/__init__.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/dbus_next/service.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/__init__.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/test/service/test_export.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_fd_passing.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_methods.py": ["/dbus_next/message.py", "/dbus_next/service.py", "/dbus_next/introspection.py", "/dbus_next/__init__.py", "/test/util.py"], "/test/service/test_standard_interfaces.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py"], "/test/test_validators.py": ["/dbus_next/__init__.py"], "/test/test_disconnect.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_request_name.py": ["/dbus_next/__init__.py", "/test/util.py"], "/examples/mpris.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/signature.py": ["/dbus_next/validators.py", "/dbus_next/errors.py"], "/test/service/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py", "/dbus_next/signature.py"], "/dbus_next/__init__.py": ["/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/validators.py"], "/dbus_next/validators.py": ["/dbus_next/errors.py"], "/test/test_introspection.py": ["/dbus_next/__init__.py"], "/examples/example-service.py": ["/dbus_next/service.py", "/dbus_next/aio/message_bus.py", "/dbus_next/__init__.py"], "/dbus_next/aio/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/errors.py", "/dbus_next/constants.py", "/dbus_next/_private/util.py", "/dbus_next/__init__.py"], "/test/test_aio_low_level.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/_private/util.py": ["/dbus_next/signature.py"], "/examples/aio-list-names.py": ["/dbus_next/__init__.py", "/dbus_next/aio/__init__.py"], "/test/service/test_decorators.py": ["/dbus_next/__init__.py", "/dbus_next/service.py"], "/examples/dbus-next-send.py": ["/dbus_next/validators.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/errors.py": ["/dbus_next/message.py", "/dbus_next/validators.py", "/dbus_next/constants.py"], "/dbus_next/message.py": ["/dbus_next/_private/marshaller.py", "/dbus_next/constants.py", "/dbus_next/_private/constants.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py"], "/test/test_marshaller.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/__init__.py"], "/examples/glib-list-names.py": ["/dbus_next/__init__.py"], "/dbus_next/message_bus.py": ["/dbus_next/_private/address.py", "/dbus_next/_private/util.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/proxy_object.py", "/dbus_next/__init__.py"], "/test/test_big_message.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/aio/message_bus.py": ["/dbus_next/message_bus.py", "/dbus_next/_private/unmarshaller.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/errors.py", "/dbus_next/aio/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/test/client/test_properties.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/_private/marshaller.py": ["/dbus_next/signature.py"], "/test/test_address_parser.py": ["/dbus_next/_private/address.py"], "/test/test_tcp_address.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/_private/address.py"], "/bench/unmarshall.py": ["/dbus_next/_private/unmarshaller.py"], "/test/test_signature.py": ["/dbus_next/__init__.py", "/dbus_next/_private/util.py"], "/dbus_next/_private/address.py": ["/dbus_next/constants.py", "/dbus_next/errors.py"]}
|
8,999
|
altdesktop/python-dbus-next
|
refs/heads/master
|
/dbus_next/introspection.py
|
from .constants import PropertyAccess, ArgDirection
from .signature import SignatureTree, SignatureType
from .validators import assert_member_name_valid, assert_interface_name_valid
from .errors import InvalidIntrospectionError
from typing import List, Union
import xml.etree.ElementTree as ET
# https://dbus.freedesktop.org/doc/dbus-specification.html#introspection-format
# TODO annotations
class Arg:
"""A class that represents an input or output argument to a signal or a method.
:ivar name: The name of this arg.
:vartype name: str
:ivar direction: Whether this is an input or an output argument.
:vartype direction: :class:`ArgDirection <dbus_next.ArgDirection>`
:ivar type: The parsed signature type of this argument.
:vartype type: :class:`SignatureType <dbus_next.SignatureType>`
:ivar signature: The signature string of this argument.
:vartype signature: str
:raises:
- :class:`InvalidMemberNameError <dbus_next.InvalidMemberNameError>` - If the name of the arg is not valid.
- :class:`InvalidSignatureError <dbus_next.InvalidSignatureError>` - If the signature is not valid.
- :class:`InvalidIntrospectionError <dbus_next.InvalidIntrospectionError>` - If the signature is not a single complete type.
"""
def __init__(self,
signature: Union[SignatureType, str],
direction: List[ArgDirection] = None,
name: str = None):
if name is not None:
assert_member_name_valid(name)
type_ = None
if type(signature) is SignatureType:
type_ = signature
signature = signature.signature
else:
tree = SignatureTree._get(signature)
if len(tree.types) != 1:
raise InvalidIntrospectionError(
f'an argument must have a single complete type. (has {len(tree.types)} types)')
type_ = tree.types[0]
self.type = type_
self.signature = signature
self.name = name
self.direction = direction
def from_xml(element: ET.Element, direction: ArgDirection) -> 'Arg':
"""Convert a :class:`xml.etree.ElementTree.Element` into a
:class:`Arg`.
The element must be valid DBus introspection XML for an ``arg``.
:param element: The parsed XML element.
:type element: :class:`xml.etree.ElementTree.Element`
:param direction: The direction of this arg. Must be specified because it can default to different values depending on if it's in a method or signal.
:type direction: :class:`ArgDirection <dbus_next.ArgDirection>`
:raises:
- :class:`InvalidIntrospectionError <dbus_next.InvalidIntrospectionError>` - If the XML tree is not valid introspection data.
"""
name = element.attrib.get('name')
signature = element.attrib.get('type')
if not signature:
raise InvalidIntrospectionError('a method argument must have a "type" attribute')
return Arg(signature, direction, name)
def to_xml(self) -> ET.Element:
"""Convert this :class:`Arg` into an :class:`xml.etree.ElementTree.Element`.
"""
element = ET.Element('arg')
if self.name:
element.set('name', self.name)
if self.direction:
element.set('direction', self.direction.value)
element.set('type', self.signature)
return element
class Signal:
"""A class that represents a signal exposed on an interface.
:ivar name: The name of this signal
:vartype name: str
:ivar args: A list of output arguments for this signal.
:vartype args: list(Arg)
:ivar signature: The collected signature of the output arguments.
:vartype signature: str
:raises:
- :class:`InvalidMemberNameError <dbus_next.InvalidMemberNameError>` - If the name of the signal is not a valid member name.
"""
def __init__(self, name: str, args: List[Arg] = None):
if name is not None:
assert_member_name_valid(name)
self.name = name
self.args = args or []
self.signature = ''.join(arg.signature for arg in self.args)
def from_xml(element):
"""Convert an :class:`xml.etree.ElementTree.Element` to a :class:`Signal`.
The element must be valid DBus introspection XML for a ``signal``.
:param element: The parsed XML element.
:type element: :class:`xml.etree.ElementTree.Element`
:param is_root: Whether this is the root node
:type is_root: bool
:raises:
- :class:`InvalidIntrospectionError <dbus_next.InvalidIntrospectionError>` - If the XML tree is not valid introspection data.
"""
name = element.attrib.get('name')
if not name:
raise InvalidIntrospectionError('signals must have a "name" attribute')
args = []
for child in element:
if child.tag == 'arg':
args.append(Arg.from_xml(child, ArgDirection.OUT))
signal = Signal(name, args)
return signal
def to_xml(self) -> ET.Element:
"""Convert this :class:`Signal` into an :class:`xml.etree.ElementTree.Element`.
"""
element = ET.Element('signal')
element.set('name', self.name)
for arg in self.args:
element.append(arg.to_xml())
return element
class Method:
"""A class that represents a method exposed on an :class:`Interface`.
:ivar name: The name of this method.
:vartype name: str
:ivar in_args: A list of input arguments to this method.
:vartype in_args: list(Arg)
:ivar out_args: A list of output arguments to this method.
:vartype out_args: list(Arg)
:ivar in_signature: The collected signature string of the input arguments.
:vartype in_signature: str
:ivar out_signature: The collected signature string of the output arguments.
:vartype out_signature: str
:raises:
- :class:`InvalidMemberNameError <dbus_next.InvalidMemberNameError>` - If the name of this method is not valid.
"""
def __init__(self, name: str, in_args: List[Arg] = [], out_args: List[Arg] = []):
assert_member_name_valid(name)
self.name = name
self.in_args = in_args
self.out_args = out_args
self.in_signature = ''.join(arg.signature for arg in in_args)
self.out_signature = ''.join(arg.signature for arg in out_args)
def from_xml(element: ET.Element) -> 'Method':
"""Convert an :class:`xml.etree.ElementTree.Element` to a :class:`Method`.
The element must be valid DBus introspection XML for a ``method``.
:param element: The parsed XML element.
:type element: :class:`xml.etree.ElementTree.Element`
:param is_root: Whether this is the root node
:type is_root: bool
:raises:
- :class:`InvalidIntrospectionError <dbus_next.InvalidIntrospectionError>` - If the XML tree is not valid introspection data.
"""
name = element.attrib.get('name')
if not name:
raise InvalidIntrospectionError('interfaces must have a "name" attribute')
in_args = []
out_args = []
for child in element:
if child.tag == 'arg':
direction = ArgDirection(child.attrib.get('direction', 'in'))
arg = Arg.from_xml(child, direction)
if direction == ArgDirection.IN:
in_args.append(arg)
elif direction == ArgDirection.OUT:
out_args.append(arg)
return Method(name, in_args, out_args)
def to_xml(self) -> ET.Element:
"""Convert this :class:`Method` into an :class:`xml.etree.ElementTree.Element`.
"""
element = ET.Element('method')
element.set('name', self.name)
for arg in self.in_args:
element.append(arg.to_xml())
for arg in self.out_args:
element.append(arg.to_xml())
return element
class Property:
"""A class that represents a DBus property exposed on an
:class:`Interface`.
:ivar name: The name of this property.
:vartype name: str
:ivar signature: The signature string for this property. Must be a single complete type.
:vartype signature: str
:ivar access: Whether this property is readable and writable.
:vartype access: :class:`PropertyAccess <dbus_next.PropertyAccess>`
:ivar type: The parsed type of this property.
:vartype type: :class:`SignatureType <dbus_next.SignatureType>`
:raises:
- :class:`InvalidIntrospectionError <dbus_next.InvalidIntrospectionError>` - If the property is not a single complete type.
- :class `InvalidSignatureError <dbus_next.InvalidSignatureError>` - If the given signature is not valid.
- :class: `InvalidMemberNameError <dbus_next.InvalidMemberNameError>` - If the member name is not valid.
"""
def __init__(self,
name: str,
signature: str,
access: PropertyAccess = PropertyAccess.READWRITE):
assert_member_name_valid(name)
tree = SignatureTree._get(signature)
if len(tree.types) != 1:
raise InvalidIntrospectionError(
f'properties must have a single complete type. (has {len(tree.types)} types)')
self.name = name
self.signature = signature
self.access = access
self.type = tree.types[0]
def from_xml(element):
"""Convert an :class:`xml.etree.ElementTree.Element` to a :class:`Property`.
The element must be valid DBus introspection XML for a ``property``.
:param element: The parsed XML element.
:type element: :class:`xml.etree.ElementTree.Element`
:raises:
- :class:`InvalidIntrospectionError <dbus_next.InvalidIntrospectionError>` - If the XML tree is not valid introspection data.
"""
name = element.attrib.get('name')
signature = element.attrib.get('type')
access = PropertyAccess(element.attrib.get('access', 'readwrite'))
if not name:
raise InvalidIntrospectionError('properties must have a "name" attribute')
if not signature:
raise InvalidIntrospectionError('properties must have a "type" attribute')
return Property(name, signature, access)
def to_xml(self) -> ET.Element:
"""Convert this :class:`Property` into an :class:`xml.etree.ElementTree.Element`.
"""
element = ET.Element('property')
element.set('name', self.name)
element.set('type', self.signature)
element.set('access', self.access.value)
return element
class Interface:
"""A class that represents a DBus interface exported on on object path.
Contains information about the methods, signals, and properties exposed on
this interface.
:ivar name: The name of this interface.
:vartype name: str
:ivar methods: A list of methods exposed on this interface.
:vartype methods: list(:class:`Method`)
:ivar signals: A list of signals exposed on this interface.
:vartype signals: list(:class:`Signal`)
:ivar properties: A list of properties exposed on this interface.
:vartype properties: list(:class:`Property`)
:raises:
- :class:`InvalidInterfaceNameError <dbus_next.InvalidInterfaceNameError>` - If the name is not a valid interface name.
"""
def __init__(self,
name: str,
methods: List[Method] = None,
signals: List[Signal] = None,
properties: List[Property] = None):
assert_interface_name_valid(name)
self.name = name
self.methods = methods if methods is not None else []
self.signals = signals if signals is not None else []
self.properties = properties if properties is not None else []
@staticmethod
def from_xml(element: ET.Element) -> 'Interface':
"""Convert a :class:`xml.etree.ElementTree.Element` into a
:class:`Interface`.
The element must be valid DBus introspection XML for an ``interface``.
:param element: The parsed XML element.
:type element: :class:`xml.etree.ElementTree.Element`
:raises:
- :class:`InvalidIntrospectionError <dbus_next.InvalidIntrospectionError>` - If the XML tree is not valid introspection data.
"""
name = element.attrib.get('name')
if not name:
raise InvalidIntrospectionError('interfaces must have a "name" attribute')
interface = Interface(name)
for child in element:
if child.tag == 'method':
interface.methods.append(Method.from_xml(child))
elif child.tag == 'signal':
interface.signals.append(Signal.from_xml(child))
elif child.tag == 'property':
interface.properties.append(Property.from_xml(child))
return interface
def to_xml(self) -> ET.Element:
"""Convert this :class:`Interface` into an :class:`xml.etree.ElementTree.Element`.
"""
element = ET.Element('interface')
element.set('name', self.name)
for method in self.methods:
element.append(method.to_xml())
for signal in self.signals:
element.append(signal.to_xml())
for prop in self.properties:
element.append(prop.to_xml())
return element
class Node:
"""A class that represents a node in an object path in introspection data.
A node contains information about interfaces exported on this path and
child nodes. A node can be converted to and from introspection XML exposed
through the ``org.freedesktop.DBus.Introspectable`` standard DBus
interface.
This class is an essential building block for a high-level DBus interface.
This is the underlying data structure for the :class:`ProxyObject
<dbus_next.proxy_object.BaseProxyInterface>`. A :class:`ServiceInterface
<dbus_next.service.ServiceInterface>` definition is converted to this class
to expose XML on the introspectable interface.
:ivar interfaces: A list of interfaces exposed on this node.
:vartype interfaces: list(:class:`Interface <dbus_next.introspection.Interface>`)
:ivar nodes: A list of child nodes.
:vartype nodes: list(:class:`Node`)
:ivar name: The object path of this node.
:vartype name: str
:ivar is_root: Whether this is the root node. False if it is a child node.
:vartype is_root: bool
:raises:
- :class:`InvalidIntrospectionError <dbus_next.InvalidIntrospectionError>` - If the name is not a valid node name.
"""
def __init__(self, name: str = None, interfaces: List[Interface] = None, is_root: bool = True):
if not is_root and not name:
raise InvalidIntrospectionError('child nodes must have a "name" attribute')
self.interfaces = interfaces if interfaces is not None else []
self.nodes = []
self.name = name
self.is_root = is_root
@staticmethod
def from_xml(element: ET.Element, is_root: bool = False):
"""Convert an :class:`xml.etree.ElementTree.Element` to a :class:`Node`.
The element must be valid DBus introspection XML for a ``node``.
:param element: The parsed XML element.
:type element: :class:`xml.etree.ElementTree.Element`
:param is_root: Whether this is the root node
:type is_root: bool
:raises:
- :class:`InvalidIntrospectionError <dbus_next.InvalidIntrospectionError>` - If the XML tree is not valid introspection data.
"""
node = Node(element.attrib.get('name'), is_root=is_root)
for child in element:
if child.tag == 'interface':
node.interfaces.append(Interface.from_xml(child))
elif child.tag == 'node':
node.nodes.append(Node.from_xml(child))
return node
@staticmethod
def parse(data: str) -> 'Node':
"""Parse XML data as a string into a :class:`Node`.
The string must be valid DBus introspection XML.
:param data: The XMl string.
:type data: str
:raises:
- :class:`InvalidIntrospectionError <dbus_next.InvalidIntrospectionError>` - If the string is not valid introspection data.
"""
element = ET.fromstring(data)
if element.tag != 'node':
raise InvalidIntrospectionError(
'introspection data must have a "node" for the root element')
return Node.from_xml(element, is_root=True)
def to_xml(self) -> ET.Element:
"""Convert this :class:`Node` into an :class:`xml.etree.ElementTree.Element`.
"""
element = ET.Element('node')
if self.name:
element.set('name', self.name)
for interface in self.interfaces:
element.append(interface.to_xml())
for node in self.nodes:
element.append(node.to_xml())
return element
def tostring(self) -> str:
"""Convert this :class:`Node` into a DBus introspection XML string.
"""
header = '<!DOCTYPE node PUBLIC "-//freedesktop//DTD D-BUS Object Introspection 1.0//EN"\n"http://www.freedesktop.org/standards/dbus/1.0/introspect.dtd">\n'
def indent(elem, level=0):
i = "\n" + level * " "
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
indent(elem, level + 1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
xml = self.to_xml()
indent(xml)
return header + ET.tostring(xml, encoding='unicode').rstrip()
@staticmethod
def default(name: str = None) -> 'Node':
"""Create a :class:`Node` with the default interfaces supported by this library.
The default interfaces include:
* ``org.freedesktop.DBus.Introspectable``
* ``org.freedesktop.DBus.Peer``
* ``org.freedesktop.DBus.Properties``
* ``org.freedesktop.DBus.ObjectManager``
"""
return Node(name,
is_root=True,
interfaces=[
Interface('org.freedesktop.DBus.Introspectable',
methods=[
Method('Introspect',
out_args=[Arg('s', ArgDirection.OUT, 'data')])
]),
Interface('org.freedesktop.DBus.Peer',
methods=[
Method('GetMachineId',
out_args=[Arg('s', ArgDirection.OUT, 'machine_uuid')]),
Method('Ping')
]),
Interface('org.freedesktop.DBus.Properties',
methods=[
Method('Get',
in_args=[
Arg('s', ArgDirection.IN, 'interface_name'),
Arg('s', ArgDirection.IN, 'property_name')
],
out_args=[Arg('v', ArgDirection.OUT, 'value')]),
Method('Set',
in_args=[
Arg('s', ArgDirection.IN, 'interface_name'),
Arg('s', ArgDirection.IN, 'property_name'),
Arg('v', ArgDirection.IN, 'value')
]),
Method('GetAll',
in_args=[Arg('s', ArgDirection.IN, 'interface_name')],
out_args=[Arg('a{sv}', ArgDirection.OUT, 'props')])
],
signals=[
Signal('PropertiesChanged',
args=[
Arg('s', ArgDirection.OUT, 'interface_name'),
Arg('a{sv}', ArgDirection.OUT,
'changed_properties'),
Arg('as', ArgDirection.OUT,
'invalidated_properties')
])
]),
Interface('org.freedesktop.DBus.ObjectManager',
methods=[
Method('GetManagedObjects',
out_args=[
Arg('a{oa{sa{sv}}}', ArgDirection.OUT,
'objpath_interfaces_and_properties')
]),
],
signals=[
Signal('InterfacesAdded',
args=[
Arg('o', ArgDirection.OUT, 'object_path'),
Arg('a{sa{sv}}', ArgDirection.OUT,
'interfaces_and_properties'),
]),
Signal('InterfacesRemoved',
args=[
Arg('o', ArgDirection.OUT, 'object_path'),
Arg('as', ArgDirection.OUT, 'interfaces'),
])
]),
])
|
{"/test/service/test_methods.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/glib/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/constants.py", "/dbus_next/__init__.py"], "/test/service/test_properties.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/introspection.py", "/dbus_next/constants.py"], "/dbus_next/auth.py": ["/dbus_next/errors.py"], "/dbus_next/glib/message_bus.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/constants.py", "/dbus_next/message.py", "/dbus_next/message_bus.py", "/dbus_next/errors.py", "/dbus_next/glib/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/dbus_next/_private/unmarshaller.py": ["/dbus_next/message.py", "/dbus_next/_private/constants.py", "/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/errors.py"], "/examples/aio-tcp-notification.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/introspection.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/validators.py", "/dbus_next/errors.py"], "/dbus_next/aio/__init__.py": ["/dbus_next/aio/message_bus.py", "/dbus_next/aio/proxy_object.py"], "/test/test_glib_low_level.py": ["/dbus_next/__init__.py", "/test/util.py"], "/dbus_next/proxy_object.py": ["/dbus_next/validators.py", "/dbus_next/__init__.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/dbus_next/service.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/__init__.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/test/service/test_export.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_fd_passing.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_methods.py": ["/dbus_next/message.py", "/dbus_next/service.py", "/dbus_next/introspection.py", "/dbus_next/__init__.py", "/test/util.py"], "/test/service/test_standard_interfaces.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py"], "/test/test_validators.py": ["/dbus_next/__init__.py"], "/test/test_disconnect.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_request_name.py": ["/dbus_next/__init__.py", "/test/util.py"], "/examples/mpris.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/signature.py": ["/dbus_next/validators.py", "/dbus_next/errors.py"], "/test/service/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py", "/dbus_next/signature.py"], "/dbus_next/__init__.py": ["/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/validators.py"], "/dbus_next/validators.py": ["/dbus_next/errors.py"], "/test/test_introspection.py": ["/dbus_next/__init__.py"], "/examples/example-service.py": ["/dbus_next/service.py", "/dbus_next/aio/message_bus.py", "/dbus_next/__init__.py"], "/dbus_next/aio/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/errors.py", "/dbus_next/constants.py", "/dbus_next/_private/util.py", "/dbus_next/__init__.py"], "/test/test_aio_low_level.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/_private/util.py": ["/dbus_next/signature.py"], "/examples/aio-list-names.py": ["/dbus_next/__init__.py", "/dbus_next/aio/__init__.py"], "/test/service/test_decorators.py": ["/dbus_next/__init__.py", "/dbus_next/service.py"], "/examples/dbus-next-send.py": ["/dbus_next/validators.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/errors.py": ["/dbus_next/message.py", "/dbus_next/validators.py", "/dbus_next/constants.py"], "/dbus_next/message.py": ["/dbus_next/_private/marshaller.py", "/dbus_next/constants.py", "/dbus_next/_private/constants.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py"], "/test/test_marshaller.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/__init__.py"], "/examples/glib-list-names.py": ["/dbus_next/__init__.py"], "/dbus_next/message_bus.py": ["/dbus_next/_private/address.py", "/dbus_next/_private/util.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/proxy_object.py", "/dbus_next/__init__.py"], "/test/test_big_message.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/aio/message_bus.py": ["/dbus_next/message_bus.py", "/dbus_next/_private/unmarshaller.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/errors.py", "/dbus_next/aio/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/test/client/test_properties.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/_private/marshaller.py": ["/dbus_next/signature.py"], "/test/test_address_parser.py": ["/dbus_next/_private/address.py"], "/test/test_tcp_address.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/_private/address.py"], "/bench/unmarshall.py": ["/dbus_next/_private/unmarshaller.py"], "/test/test_signature.py": ["/dbus_next/__init__.py", "/dbus_next/_private/util.py"], "/dbus_next/_private/address.py": ["/dbus_next/constants.py", "/dbus_next/errors.py"]}
|
9,000
|
altdesktop/python-dbus-next
|
refs/heads/master
|
/dbus_next/aio/__init__.py
|
from .message_bus import MessageBus
from .proxy_object import ProxyObject, ProxyInterface
|
{"/test/service/test_methods.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/glib/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/constants.py", "/dbus_next/__init__.py"], "/test/service/test_properties.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/introspection.py", "/dbus_next/constants.py"], "/dbus_next/auth.py": ["/dbus_next/errors.py"], "/dbus_next/glib/message_bus.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/constants.py", "/dbus_next/message.py", "/dbus_next/message_bus.py", "/dbus_next/errors.py", "/dbus_next/glib/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/dbus_next/_private/unmarshaller.py": ["/dbus_next/message.py", "/dbus_next/_private/constants.py", "/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/errors.py"], "/examples/aio-tcp-notification.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/introspection.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/validators.py", "/dbus_next/errors.py"], "/dbus_next/aio/__init__.py": ["/dbus_next/aio/message_bus.py", "/dbus_next/aio/proxy_object.py"], "/test/test_glib_low_level.py": ["/dbus_next/__init__.py", "/test/util.py"], "/dbus_next/proxy_object.py": ["/dbus_next/validators.py", "/dbus_next/__init__.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/dbus_next/service.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/__init__.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/test/service/test_export.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_fd_passing.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_methods.py": ["/dbus_next/message.py", "/dbus_next/service.py", "/dbus_next/introspection.py", "/dbus_next/__init__.py", "/test/util.py"], "/test/service/test_standard_interfaces.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py"], "/test/test_validators.py": ["/dbus_next/__init__.py"], "/test/test_disconnect.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_request_name.py": ["/dbus_next/__init__.py", "/test/util.py"], "/examples/mpris.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/signature.py": ["/dbus_next/validators.py", "/dbus_next/errors.py"], "/test/service/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py", "/dbus_next/signature.py"], "/dbus_next/__init__.py": ["/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/validators.py"], "/dbus_next/validators.py": ["/dbus_next/errors.py"], "/test/test_introspection.py": ["/dbus_next/__init__.py"], "/examples/example-service.py": ["/dbus_next/service.py", "/dbus_next/aio/message_bus.py", "/dbus_next/__init__.py"], "/dbus_next/aio/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/errors.py", "/dbus_next/constants.py", "/dbus_next/_private/util.py", "/dbus_next/__init__.py"], "/test/test_aio_low_level.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/_private/util.py": ["/dbus_next/signature.py"], "/examples/aio-list-names.py": ["/dbus_next/__init__.py", "/dbus_next/aio/__init__.py"], "/test/service/test_decorators.py": ["/dbus_next/__init__.py", "/dbus_next/service.py"], "/examples/dbus-next-send.py": ["/dbus_next/validators.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/errors.py": ["/dbus_next/message.py", "/dbus_next/validators.py", "/dbus_next/constants.py"], "/dbus_next/message.py": ["/dbus_next/_private/marshaller.py", "/dbus_next/constants.py", "/dbus_next/_private/constants.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py"], "/test/test_marshaller.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/__init__.py"], "/examples/glib-list-names.py": ["/dbus_next/__init__.py"], "/dbus_next/message_bus.py": ["/dbus_next/_private/address.py", "/dbus_next/_private/util.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/proxy_object.py", "/dbus_next/__init__.py"], "/test/test_big_message.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/aio/message_bus.py": ["/dbus_next/message_bus.py", "/dbus_next/_private/unmarshaller.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/errors.py", "/dbus_next/aio/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/test/client/test_properties.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/_private/marshaller.py": ["/dbus_next/signature.py"], "/test/test_address_parser.py": ["/dbus_next/_private/address.py"], "/test/test_tcp_address.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/_private/address.py"], "/bench/unmarshall.py": ["/dbus_next/_private/unmarshaller.py"], "/test/test_signature.py": ["/dbus_next/__init__.py", "/dbus_next/_private/util.py"], "/dbus_next/_private/address.py": ["/dbus_next/constants.py", "/dbus_next/errors.py"]}
|
9,001
|
altdesktop/python-dbus-next
|
refs/heads/master
|
/test/test_glib_low_level.py
|
from dbus_next.glib import MessageBus
from dbus_next import Message, MessageType, MessageFlag
from test.util import check_gi_repository, skip_reason_no_gi
import pytest
has_gi = check_gi_repository()
if has_gi:
from gi.repository import GLib
@pytest.mark.skipif(not has_gi, reason=skip_reason_no_gi)
def test_standard_interfaces():
bus = MessageBus().connect_sync()
msg = Message(destination='org.freedesktop.DBus',
path='/org/freedesktop/DBus',
interface='org.freedesktop.DBus',
member='ListNames',
serial=bus.next_serial())
reply = bus.call_sync(msg)
assert reply.message_type == MessageType.METHOD_RETURN
assert reply.reply_serial == msg.serial
assert reply.signature == 'as'
assert bus.unique_name in reply.body[0]
msg.interface = 'org.freedesktop.DBus.Introspectable'
msg.member = 'Introspect'
msg.serial = bus.next_serial()
reply = bus.call_sync(msg)
assert reply.message_type == MessageType.METHOD_RETURN
assert reply.reply_serial == msg.serial
assert reply.signature == 's'
assert type(reply.body[0]) is str
msg.member = 'MemberDoesNotExist'
msg.serial = bus.next_serial()
reply = bus.call_sync(msg)
assert reply.message_type == MessageType.ERROR
assert reply.reply_serial == msg.serial
assert reply.error_name
assert reply.signature == 's'
assert type(reply.body[0]) is str
@pytest.mark.skipif(not has_gi, reason=skip_reason_no_gi)
def test_sending_messages_between_buses():
bus1 = MessageBus().connect_sync()
bus2 = MessageBus().connect_sync()
msg = Message(destination=bus1.unique_name,
path='/org/test/path',
interface='org.test.iface',
member='SomeMember',
serial=bus2.next_serial())
def message_handler(sent):
if sent.sender == bus2.unique_name and sent.serial == msg.serial:
assert sent.path == msg.path
assert sent.serial == msg.serial
assert sent.interface == msg.interface
assert sent.member == msg.member
bus1.send(Message.new_method_return(sent, 's', ['got it']))
bus1.remove_message_handler(message_handler)
return True
bus1.add_message_handler(message_handler)
reply = bus2.call_sync(msg)
assert reply.message_type == MessageType.METHOD_RETURN, reply.body[0]
assert reply.sender == bus1.unique_name
assert reply.signature == 's'
assert reply.body == ['got it']
assert reply.reply_serial == msg.serial
def message_handler_error(sent):
if sent.sender == bus2.unique_name and sent.serial == msg.serial:
assert sent.path == msg.path
assert sent.serial == msg.serial
assert sent.interface == msg.interface
assert sent.member == msg.member
bus1.send(Message.new_error(sent, 'org.test.Error', 'throwing an error'))
bus1.remove_message_handler(message_handler_error)
return True
bus1.add_message_handler(message_handler_error)
msg.serial = bus2.next_serial()
reply = bus2.call_sync(msg)
assert reply.message_type == MessageType.ERROR
assert reply.sender == bus1.unique_name
assert reply.reply_serial == msg.serial
assert reply.error_name == 'org.test.Error'
assert reply.signature == 's'
assert reply.body == ['throwing an error']
msg.serial = bus2.next_serial()
msg.flags = MessageFlag.NO_REPLY_EXPECTED
reply = bus2.call_sync(msg)
assert reply is None
@pytest.mark.skipif(not has_gi, reason=skip_reason_no_gi)
def test_sending_signals_between_buses():
bus1 = MessageBus().connect_sync()
bus2 = MessageBus().connect_sync()
add_match_msg = Message(destination='org.freedesktop.DBus',
path='/org/freedesktop/DBus',
interface='org.freedesktop.DBus',
member='AddMatch',
signature='s',
body=[f'sender={bus2.unique_name}'])
bus1.call_sync(add_match_msg)
main = GLib.MainLoop()
def wait_for_message():
ret = None
def message_handler(signal):
nonlocal ret
if signal.sender == bus2.unique_name:
ret = signal
bus1.remove_message_handler(message_handler)
main.quit()
bus1.add_message_handler(message_handler)
main.run()
return ret
bus2.send(
Message.new_signal('/org/test/path', 'org.test.interface', 'SomeSignal', 's', ['a signal']))
signal = wait_for_message()
assert signal.message_type == MessageType.SIGNAL
assert signal.path == '/org/test/path'
assert signal.interface == 'org.test.interface'
assert signal.member == 'SomeSignal'
assert signal.signature == 's'
assert signal.body == ['a signal']
|
{"/test/service/test_methods.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/glib/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/constants.py", "/dbus_next/__init__.py"], "/test/service/test_properties.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/introspection.py", "/dbus_next/constants.py"], "/dbus_next/auth.py": ["/dbus_next/errors.py"], "/dbus_next/glib/message_bus.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/constants.py", "/dbus_next/message.py", "/dbus_next/message_bus.py", "/dbus_next/errors.py", "/dbus_next/glib/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/dbus_next/_private/unmarshaller.py": ["/dbus_next/message.py", "/dbus_next/_private/constants.py", "/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/errors.py"], "/examples/aio-tcp-notification.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/introspection.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/validators.py", "/dbus_next/errors.py"], "/dbus_next/aio/__init__.py": ["/dbus_next/aio/message_bus.py", "/dbus_next/aio/proxy_object.py"], "/test/test_glib_low_level.py": ["/dbus_next/__init__.py", "/test/util.py"], "/dbus_next/proxy_object.py": ["/dbus_next/validators.py", "/dbus_next/__init__.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/dbus_next/service.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/__init__.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/test/service/test_export.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_fd_passing.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_methods.py": ["/dbus_next/message.py", "/dbus_next/service.py", "/dbus_next/introspection.py", "/dbus_next/__init__.py", "/test/util.py"], "/test/service/test_standard_interfaces.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py"], "/test/test_validators.py": ["/dbus_next/__init__.py"], "/test/test_disconnect.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_request_name.py": ["/dbus_next/__init__.py", "/test/util.py"], "/examples/mpris.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/signature.py": ["/dbus_next/validators.py", "/dbus_next/errors.py"], "/test/service/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py", "/dbus_next/signature.py"], "/dbus_next/__init__.py": ["/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/validators.py"], "/dbus_next/validators.py": ["/dbus_next/errors.py"], "/test/test_introspection.py": ["/dbus_next/__init__.py"], "/examples/example-service.py": ["/dbus_next/service.py", "/dbus_next/aio/message_bus.py", "/dbus_next/__init__.py"], "/dbus_next/aio/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/errors.py", "/dbus_next/constants.py", "/dbus_next/_private/util.py", "/dbus_next/__init__.py"], "/test/test_aio_low_level.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/_private/util.py": ["/dbus_next/signature.py"], "/examples/aio-list-names.py": ["/dbus_next/__init__.py", "/dbus_next/aio/__init__.py"], "/test/service/test_decorators.py": ["/dbus_next/__init__.py", "/dbus_next/service.py"], "/examples/dbus-next-send.py": ["/dbus_next/validators.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/errors.py": ["/dbus_next/message.py", "/dbus_next/validators.py", "/dbus_next/constants.py"], "/dbus_next/message.py": ["/dbus_next/_private/marshaller.py", "/dbus_next/constants.py", "/dbus_next/_private/constants.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py"], "/test/test_marshaller.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/__init__.py"], "/examples/glib-list-names.py": ["/dbus_next/__init__.py"], "/dbus_next/message_bus.py": ["/dbus_next/_private/address.py", "/dbus_next/_private/util.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/proxy_object.py", "/dbus_next/__init__.py"], "/test/test_big_message.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/aio/message_bus.py": ["/dbus_next/message_bus.py", "/dbus_next/_private/unmarshaller.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/errors.py", "/dbus_next/aio/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/test/client/test_properties.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/_private/marshaller.py": ["/dbus_next/signature.py"], "/test/test_address_parser.py": ["/dbus_next/_private/address.py"], "/test/test_tcp_address.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/_private/address.py"], "/bench/unmarshall.py": ["/dbus_next/_private/unmarshaller.py"], "/test/test_signature.py": ["/dbus_next/__init__.py", "/dbus_next/_private/util.py"], "/dbus_next/_private/address.py": ["/dbus_next/constants.py", "/dbus_next/errors.py"]}
|
9,002
|
altdesktop/python-dbus-next
|
refs/heads/master
|
/dbus_next/proxy_object.py
|
from .validators import assert_object_path_valid, assert_bus_name_valid
from . import message_bus
from .message import Message
from .constants import MessageType, ErrorType
from . import introspection as intr
from .errors import DBusError, InterfaceNotFoundError
from ._private.util import replace_idx_with_fds
from typing import Type, Union, List, Coroutine
import logging
import xml.etree.ElementTree as ET
import inspect
import re
import asyncio
class BaseProxyInterface:
"""An abstract class representing a proxy to an interface exported on the bus by another client.
Implementations of this class are not meant to be constructed directly by
users. Use :func:`BaseProxyObject.get_interface` to get a proxy interface.
Each message bus implementation provides its own proxy interface
implementation that will be returned by that method.
Proxy interfaces can be used to call methods, get properties, and listen to
signals on the interface. Proxy interfaces are created dynamically with a
family of methods for each of these operations based on what members the
interface exposes. Each proxy interface implementation exposes these
members in a different way depending on the features of the backend. See
the documentation of the proxy interface implementation you use for more
details.
:ivar bus_name: The name of the bus this interface is exported on.
:vartype bus_name: str
:ivar path: The object path exported on the client that owns the bus name.
:vartype path: str
:ivar introspection: Parsed introspection data for the proxy interface.
:vartype introspection: :class:`Node <dbus_next.introspection.Interface>`
:ivar bus: The message bus this proxy interface is connected to.
:vartype bus: :class:`BaseMessageBus <dbus_next.message_bus.BaseMessageBus>`
"""
def __init__(self, bus_name, path, introspection, bus):
self.bus_name = bus_name
self.path = path
self.introspection = introspection
self.bus = bus
self._signal_handlers = {}
self._signal_match_rule = f"type='signal',sender={bus_name},interface={introspection.name},path={path}"
_underscorer1 = re.compile(r'(.)([A-Z][a-z]+)')
_underscorer2 = re.compile(r'([a-z0-9])([A-Z])')
@staticmethod
def _to_snake_case(member):
subbed = BaseProxyInterface._underscorer1.sub(r'\1_\2', member)
return BaseProxyInterface._underscorer2.sub(r'\1_\2', subbed).lower()
@staticmethod
def _check_method_return(msg, signature=None):
if msg.message_type == MessageType.ERROR:
raise DBusError._from_message(msg)
elif msg.message_type != MessageType.METHOD_RETURN:
raise DBusError(ErrorType.CLIENT_ERROR, 'method call didnt return a method return', msg)
elif signature is not None and msg.signature != signature:
raise DBusError(ErrorType.CLIENT_ERROR,
f'method call returned unexpected signature: "{msg.signature}"', msg)
def _add_method(self, intr_method):
raise NotImplementedError('this must be implemented in the inheriting class')
def _add_property(self, intr_property):
raise NotImplementedError('this must be implemented in the inheriting class')
def _message_handler(self, msg):
if not msg._matches(message_type=MessageType.SIGNAL,
interface=self.introspection.name,
path=self.path) or msg.member not in self._signal_handlers:
return
if msg.sender != self.bus_name and self.bus._name_owners.get(self.bus_name,
'') != msg.sender:
# The sender is always a unique name, but the bus name given might
# be a well known name. If the sender isn't an exact match, check
# to see if it owns the bus_name we were given from the cache kept
# on the bus for this purpose.
return
match = [s for s in self.introspection.signals if s.name == msg.member]
if not len(match):
return
intr_signal = match[0]
if intr_signal.signature != msg.signature:
logging.warning(
f'got signal "{self.introspection.name}.{msg.member}" with unexpected signature "{msg.signature}"'
)
return
body = replace_idx_with_fds(msg.signature, msg.body, msg.unix_fds)
for handler in self._signal_handlers[msg.member]:
cb_result = handler(*body)
if isinstance(cb_result, Coroutine):
asyncio.create_task(cb_result)
def _add_signal(self, intr_signal, interface):
def on_signal_fn(fn):
fn_signature = inspect.signature(fn)
if not callable(fn) or len(fn_signature.parameters) != len(intr_signal.args):
raise TypeError(
f'reply_notify must be a function with {len(intr_signal.args)} parameters')
if not self._signal_handlers:
self.bus._add_match_rule(self._signal_match_rule)
self.bus.add_message_handler(self._message_handler)
if intr_signal.name not in self._signal_handlers:
self._signal_handlers[intr_signal.name] = []
self._signal_handlers[intr_signal.name].append(fn)
def off_signal_fn(fn):
try:
i = self._signal_handlers[intr_signal.name].index(fn)
del self._signal_handlers[intr_signal.name][i]
if not self._signal_handlers[intr_signal.name]:
del self._signal_handlers[intr_signal.name]
except (KeyError, ValueError):
return
if not self._signal_handlers:
self.bus._remove_match_rule(self._signal_match_rule)
self.bus.remove_message_handler(self._message_handler)
snake_case = BaseProxyInterface._to_snake_case(intr_signal.name)
setattr(interface, f'on_{snake_case}', on_signal_fn)
setattr(interface, f'off_{snake_case}', off_signal_fn)
class BaseProxyObject:
"""An abstract class representing a proxy to an object exported on the bus by another client.
Implementations of this class are not meant to be constructed directly. Use
:func:`BaseMessageBus.get_proxy_object()
<dbus_next.message_bus.BaseMessageBus.get_proxy_object>` to get a proxy
object. Each message bus implementation provides its own proxy object
implementation that will be returned by that method.
The primary use of the proxy object is to select a proxy interface to act
on. Information on what interfaces are available is provided by
introspection data provided to this class. This introspection data can
either be included in your project as an XML file (recommended) or
retrieved from the ``org.freedesktop.DBus.Introspectable`` interface at
runtime.
:ivar bus_name: The name of the bus this object is exported on.
:vartype bus_name: str
:ivar path: The object path exported on the client that owns the bus name.
:vartype path: str
:ivar introspection: Parsed introspection data for the proxy object.
:vartype introspection: :class:`Node <dbus_next.introspection.Node>`
:ivar bus: The message bus this proxy object is connected to.
:vartype bus: :class:`BaseMessageBus <dbus_next.message_bus.BaseMessageBus>`
:ivar ~.ProxyInterface: The proxy interface class this proxy object uses.
:vartype ~.ProxyInterface: Type[:class:`BaseProxyInterface <dbus_next.proxy_object.BaseProxyObject>`]
:ivar child_paths: A list of absolute object paths of the children of this object.
:vartype child_paths: list(str)
:raises:
- :class:`InvalidBusNameError <dbus_next.InvalidBusNameError>` - If the given bus name is not valid.
- :class:`InvalidObjectPathError <dbus_next.InvalidObjectPathError>` - If the given object path is not valid.
- :class:`InvalidIntrospectionError <dbus_next.InvalidIntrospectionError>` - If the introspection data for the node is not valid.
"""
def __init__(self, bus_name: str, path: str, introspection: Union[intr.Node, str, ET.Element],
bus: 'message_bus.BaseMessageBus', ProxyInterface: Type[BaseProxyInterface]):
assert_object_path_valid(path)
assert_bus_name_valid(bus_name)
if not isinstance(bus, message_bus.BaseMessageBus):
raise TypeError('bus must be an instance of BaseMessageBus')
if not issubclass(ProxyInterface, BaseProxyInterface):
raise TypeError('ProxyInterface must be an instance of BaseProxyInterface')
if type(introspection) is intr.Node:
self.introspection = introspection
elif type(introspection) is str:
self.introspection = intr.Node.parse(introspection)
elif type(introspection) is ET.Element:
self.introspection = intr.Node.from_xml(introspection)
else:
raise TypeError(
'introspection must be xml node introspection or introspection.Node class')
self.bus_name = bus_name
self.path = path
self.bus = bus
self.ProxyInterface = ProxyInterface
self.child_paths = [f'{path}/{n.name}' for n in self.introspection.nodes]
self._interfaces = {}
# lazy loaded by get_children()
self._children = None
def get_interface(self, name: str) -> BaseProxyInterface:
"""Get an interface exported on this proxy object and connect it to the bus.
:param name: The name of the interface to retrieve.
:type name: str
:raises:
- :class:`InterfaceNotFoundError <dbus_next.InterfaceNotFoundError>` - If there is no interface by this name exported on the bus.
"""
if name in self._interfaces:
return self._interfaces[name]
try:
intr_interface = next(i for i in self.introspection.interfaces if i.name == name)
except StopIteration:
raise InterfaceNotFoundError(f'interface not found on this object: {name}')
interface = self.ProxyInterface(self.bus_name, self.path, intr_interface, self.bus)
for intr_method in intr_interface.methods:
interface._add_method(intr_method)
for intr_property in intr_interface.properties:
interface._add_property(intr_property)
for intr_signal in intr_interface.signals:
interface._add_signal(intr_signal, interface)
def get_owner_notify(msg, err):
if err:
logging.error(f'getting name owner for "{name}" failed, {err}')
return
if msg.message_type == MessageType.ERROR:
if msg.error_name != ErrorType.NAME_HAS_NO_OWNER.value:
logging.error(f'getting name owner for "{name}" failed, {msg.body[0]}')
return
self.bus._name_owners[self.bus_name] = msg.body[0]
if self.bus_name[0] != ':' and not self.bus._name_owners.get(self.bus_name, ''):
self.bus._call(
Message(destination='org.freedesktop.DBus',
interface='org.freedesktop.DBus',
path='/org/freedesktop/DBus',
member='GetNameOwner',
signature='s',
body=[self.bus_name]), get_owner_notify)
self._interfaces[name] = interface
return interface
def get_children(self) -> List['BaseProxyObject']:
"""Get the child nodes of this proxy object according to the introspection data."""
if self._children is None:
self._children = [
self.__class__(self.bus_name, self.path, child, self.bus)
for child in self.introspection.nodes
]
return self._children
|
{"/test/service/test_methods.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/glib/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/constants.py", "/dbus_next/__init__.py"], "/test/service/test_properties.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/introspection.py", "/dbus_next/constants.py"], "/dbus_next/auth.py": ["/dbus_next/errors.py"], "/dbus_next/glib/message_bus.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/constants.py", "/dbus_next/message.py", "/dbus_next/message_bus.py", "/dbus_next/errors.py", "/dbus_next/glib/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/dbus_next/_private/unmarshaller.py": ["/dbus_next/message.py", "/dbus_next/_private/constants.py", "/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/errors.py"], "/examples/aio-tcp-notification.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/introspection.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/validators.py", "/dbus_next/errors.py"], "/dbus_next/aio/__init__.py": ["/dbus_next/aio/message_bus.py", "/dbus_next/aio/proxy_object.py"], "/test/test_glib_low_level.py": ["/dbus_next/__init__.py", "/test/util.py"], "/dbus_next/proxy_object.py": ["/dbus_next/validators.py", "/dbus_next/__init__.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/dbus_next/service.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/__init__.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/test/service/test_export.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_fd_passing.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_methods.py": ["/dbus_next/message.py", "/dbus_next/service.py", "/dbus_next/introspection.py", "/dbus_next/__init__.py", "/test/util.py"], "/test/service/test_standard_interfaces.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py"], "/test/test_validators.py": ["/dbus_next/__init__.py"], "/test/test_disconnect.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_request_name.py": ["/dbus_next/__init__.py", "/test/util.py"], "/examples/mpris.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/signature.py": ["/dbus_next/validators.py", "/dbus_next/errors.py"], "/test/service/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py", "/dbus_next/signature.py"], "/dbus_next/__init__.py": ["/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/validators.py"], "/dbus_next/validators.py": ["/dbus_next/errors.py"], "/test/test_introspection.py": ["/dbus_next/__init__.py"], "/examples/example-service.py": ["/dbus_next/service.py", "/dbus_next/aio/message_bus.py", "/dbus_next/__init__.py"], "/dbus_next/aio/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/errors.py", "/dbus_next/constants.py", "/dbus_next/_private/util.py", "/dbus_next/__init__.py"], "/test/test_aio_low_level.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/_private/util.py": ["/dbus_next/signature.py"], "/examples/aio-list-names.py": ["/dbus_next/__init__.py", "/dbus_next/aio/__init__.py"], "/test/service/test_decorators.py": ["/dbus_next/__init__.py", "/dbus_next/service.py"], "/examples/dbus-next-send.py": ["/dbus_next/validators.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/errors.py": ["/dbus_next/message.py", "/dbus_next/validators.py", "/dbus_next/constants.py"], "/dbus_next/message.py": ["/dbus_next/_private/marshaller.py", "/dbus_next/constants.py", "/dbus_next/_private/constants.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py"], "/test/test_marshaller.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/__init__.py"], "/examples/glib-list-names.py": ["/dbus_next/__init__.py"], "/dbus_next/message_bus.py": ["/dbus_next/_private/address.py", "/dbus_next/_private/util.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/proxy_object.py", "/dbus_next/__init__.py"], "/test/test_big_message.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/aio/message_bus.py": ["/dbus_next/message_bus.py", "/dbus_next/_private/unmarshaller.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/errors.py", "/dbus_next/aio/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/test/client/test_properties.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/_private/marshaller.py": ["/dbus_next/signature.py"], "/test/test_address_parser.py": ["/dbus_next/_private/address.py"], "/test/test_tcp_address.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/_private/address.py"], "/bench/unmarshall.py": ["/dbus_next/_private/unmarshaller.py"], "/test/test_signature.py": ["/dbus_next/__init__.py", "/dbus_next/_private/util.py"], "/dbus_next/_private/address.py": ["/dbus_next/constants.py", "/dbus_next/errors.py"]}
|
9,003
|
altdesktop/python-dbus-next
|
refs/heads/master
|
/dbus_next/service.py
|
from .constants import PropertyAccess
from .signature import SignatureTree, SignatureBodyMismatchError, Variant
from . import introspection as intr
from .errors import SignalDisabledError
from ._private.util import signature_contains_type, replace_fds_with_idx, replace_idx_with_fds, parse_annotation
from functools import wraps
import inspect
from typing import no_type_check_decorator, Dict, List, Any
import copy
import asyncio
class _Method:
def __init__(self, fn, name, disabled=False):
in_signature = ''
out_signature = ''
inspection = inspect.signature(fn)
in_args = []
for i, param in enumerate(inspection.parameters.values()):
if i == 0:
# first is self
continue
annotation = parse_annotation(param.annotation)
if not annotation:
raise ValueError(
'method parameters must specify the dbus type string as an annotation')
in_args.append(intr.Arg(annotation, intr.ArgDirection.IN, param.name))
in_signature += annotation
out_args = []
out_signature = parse_annotation(inspection.return_annotation)
if out_signature:
for type_ in SignatureTree._get(out_signature).types:
out_args.append(intr.Arg(type_, intr.ArgDirection.OUT))
self.name = name
self.fn = fn
self.disabled = disabled
self.introspection = intr.Method(name, in_args, out_args)
self.in_signature = in_signature
self.out_signature = out_signature
self.in_signature_tree = SignatureTree._get(in_signature)
self.out_signature_tree = SignatureTree._get(out_signature)
def method(name: str = None, disabled: bool = False):
"""A decorator to mark a class method of a :class:`ServiceInterface` to be a DBus service method.
The parameters and return value must each be annotated with a signature
string of a single complete DBus type.
This class method will be called when a client calls the method on the DBus
interface. The parameters given to the function come from the calling
client and will conform to the dbus-next type system. The parameters
returned will be returned to the calling client and must conform to the
dbus-next type system. If multiple parameters are returned, they must be
contained within a :class:`list`.
The decorated method may raise a :class:`DBusError <dbus_next.DBusError>`
to return an error to the client.
:param name: The member name that DBus clients will use to call this method. Defaults to the name of the class method.
:type name: str
:param disabled: If set to true, the method will not be visible to clients.
:type disabled: bool
:example:
::
@method()
def echo(self, val: 's') -> 's':
return val
@method()
def echo_two(self, val1: 's', val2: 'u') -> 'su':
return [val1, val2]
"""
if name is not None and type(name) is not str:
raise TypeError('name must be a string')
if type(disabled) is not bool:
raise TypeError('disabled must be a bool')
@no_type_check_decorator
def decorator(fn):
@wraps(fn)
def wrapped(*args, **kwargs):
fn(*args, **kwargs)
fn_name = name if name else fn.__name__
wrapped.__dict__['__DBUS_METHOD'] = _Method(fn, fn_name, disabled=disabled)
return wrapped
return decorator
class _Signal:
def __init__(self, fn, name, disabled=False):
inspection = inspect.signature(fn)
args = []
signature = ''
signature_tree = None
return_annotation = parse_annotation(inspection.return_annotation)
if return_annotation:
signature = return_annotation
signature_tree = SignatureTree._get(signature)
for type_ in signature_tree.types:
args.append(intr.Arg(type_, intr.ArgDirection.OUT))
else:
signature = ''
signature_tree = SignatureTree._get('')
self.signature = signature
self.signature_tree = signature_tree
self.name = name
self.disabled = disabled
self.introspection = intr.Signal(self.name, args)
def signal(name: str = None, disabled: bool = False):
"""A decorator to mark a class method of a :class:`ServiceInterface` to be a DBus signal.
The signal is broadcast on the bus when the decorated class method is
called by the user.
If the signal has an out argument, the class method must have a return type
annotation with a signature string of a single complete DBus type and the
return value of the class method must conform to the dbus-next type system.
If the signal has multiple out arguments, they must be returned within a
``list``.
:param name: The member name that will be used for this signal. Defaults to
the name of the class method.
:type name: str
:param disabled: If set to true, the signal will not be visible to clients.
:type disabled: bool
:example:
::
@signal()
def string_signal(self, val) -> 's':
return val
@signal()
def two_strings_signal(self, val1, val2) -> 'ss':
return [val1, val2]
"""
if name is not None and type(name) is not str:
raise TypeError('name must be a string')
if type(disabled) is not bool:
raise TypeError('disabled must be a bool')
@no_type_check_decorator
def decorator(fn):
fn_name = name if name else fn.__name__
signal = _Signal(fn, fn_name, disabled)
@wraps(fn)
def wrapped(self, *args, **kwargs):
if signal.disabled:
raise SignalDisabledError('Tried to call a disabled signal')
result = fn(self, *args, **kwargs)
ServiceInterface._handle_signal(self, signal, result)
return result
wrapped.__dict__['__DBUS_SIGNAL'] = signal
return wrapped
return decorator
class _Property(property):
def set_options(self, options):
self.options = getattr(self, 'options', {})
for k, v in options.items():
self.options[k] = v
if 'name' in options and options['name'] is not None:
self.name = options['name']
else:
self.name = self.prop_getter.__name__
if 'access' in options:
self.access = PropertyAccess(options['access'])
else:
self.access = PropertyAccess.READWRITE
if 'disabled' in options:
self.disabled = options['disabled']
else:
self.disabled = False
self.introspection = intr.Property(self.name, self.signature, self.access)
self.__dict__['__DBUS_PROPERTY'] = True
def __init__(self, fn, *args, **kwargs):
self.prop_getter = fn
self.prop_setter = None
inspection = inspect.signature(fn)
if len(inspection.parameters) != 1:
raise ValueError('the property must only have the "self" input parameter')
return_annotation = parse_annotation(inspection.return_annotation)
if not return_annotation:
raise ValueError(
'the property must specify the dbus type string as a return annotation string')
self.signature = return_annotation
tree = SignatureTree._get(return_annotation)
if len(tree.types) != 1:
raise ValueError('the property signature must be a single complete type')
self.type = tree.types[0]
if 'options' in kwargs:
options = kwargs['options']
self.set_options(options)
del kwargs['options']
super().__init__(fn, *args, **kwargs)
def setter(self, fn, **kwargs):
# XXX The setter decorator seems to be recreating the class in the list
# of class members and clobbering the options so we need to reset them.
# Why does it do that?
result = super().setter(fn, **kwargs)
result.prop_setter = fn
result.set_options(self.options)
return result
def dbus_property(access: PropertyAccess = PropertyAccess.READWRITE,
name: str = None,
disabled: bool = False):
"""A decorator to mark a class method of a :class:`ServiceInterface` to be a DBus property.
The class method must be a Python getter method with a return annotation
that is a signature string of a single complete DBus type. When a client
gets the property through the ``org.freedesktop.DBus.Properties``
interface, the getter will be called and the resulting value will be
returned to the client.
If the property is writable, it must have a setter method that takes a
single parameter that is annotated with the same signature. When a client
sets the property through the ``org.freedesktop.DBus.Properties``
interface, the setter will be called with the value from the calling
client.
The parameters of the getter and the setter must conform to the dbus-next
type system. The getter or the setter may raise a :class:`DBusError
<dbus_next.DBusError>` to return an error to the client.
:param name: The name that DBus clients will use to interact with this
property on the bus.
:type name: str
:param disabled: If set to true, the property will not be visible to
clients.
:type disabled: bool
:example:
::
@dbus_property()
def string_prop(self) -> 's':
return self._string_prop
@string_prop.setter
def string_prop(self, val: 's'):
self._string_prop = val
"""
if type(access) is not PropertyAccess:
raise TypeError('access must be a PropertyAccess class')
if name is not None and type(name) is not str:
raise TypeError('name must be a string')
if type(disabled) is not bool:
raise TypeError('disabled must be a bool')
@no_type_check_decorator
def decorator(fn):
options = {'name': name, 'access': access, 'disabled': disabled}
return _Property(fn, options=options)
return decorator
class ServiceInterface:
"""An abstract class that can be extended by the user to define DBus services.
Instances of :class:`ServiceInterface` can be exported on a path of the bus
with the :class:`export <dbus_next.message_bus.BaseMessageBus.export>`
method of a :class:`MessageBus <dbus_next.message_bus.BaseMessageBus>`.
Use the :func:`@method <dbus_next.service.method>`, :func:`@dbus_property
<dbus_next.service.dbus_property>`, and :func:`@signal
<dbus_next.service.signal>` decorators to mark class methods as DBus
methods, properties, and signals respectively.
:ivar name: The name of this interface as it appears to clients. Must be a
valid interface name.
:vartype name: str
"""
def __init__(self, name: str):
# TODO cannot be overridden by a dbus member
self.name = name
self.__methods = []
self.__properties = []
self.__signals = []
self.__buses = set()
for name, member in inspect.getmembers(type(self)):
member_dict = getattr(member, '__dict__', {})
if type(member) is _Property:
# XXX The getter and the setter may show up as different
# members if they have different names. But if they have the
# same name, they will be the same member. So we try to merge
# them together here. I wish we could make this cleaner.
found = False
for prop in self.__properties:
if prop.prop_getter is member.prop_getter:
found = True
if member.prop_setter is not None:
prop.prop_setter = member.prop_setter
if not found:
self.__properties.append(member)
elif '__DBUS_METHOD' in member_dict:
method = member_dict['__DBUS_METHOD']
assert type(method) is _Method
self.__methods.append(method)
elif '__DBUS_SIGNAL' in member_dict:
signal = member_dict['__DBUS_SIGNAL']
assert type(signal) is _Signal
self.__signals.append(signal)
# validate that writable properties have a setter
for prop in self.__properties:
if prop.access.writable() and prop.prop_setter is None:
raise ValueError(f'property "{prop.name}" is writable but does not have a setter')
def emit_properties_changed(self,
changed_properties: Dict[str, Any],
invalidated_properties: List[str] = []):
"""Emit the ``org.freedesktop.DBus.Properties.PropertiesChanged`` signal.
This signal is intended to be used to alert clients when a property of
the interface has changed.
:param changed_properties: The keys must be the names of properties exposed by this bus. The values must be valid for the signature of those properties.
:type changed_properties: dict(str, Any)
:param invalidated_properties: A list of names of properties that are now invalid (presumably for clients who cache the value).
:type invalidated_properties: list(str)
"""
# TODO cannot be overridden by a dbus member
variant_dict = {}
for prop in ServiceInterface._get_properties(self):
if prop.name in changed_properties:
variant_dict[prop.name] = Variant(prop.signature, changed_properties[prop.name])
body = [self.name, variant_dict, invalidated_properties]
for bus in ServiceInterface._get_buses(self):
bus._interface_signal_notify(self, 'org.freedesktop.DBus.Properties',
'PropertiesChanged', 'sa{sv}as', body)
def introspect(self) -> intr.Interface:
"""Get introspection information for this interface.
This might be useful for creating clients for the interface or examining the introspection output of an interface.
:returns: The introspection data for the interface.
:rtype: :class:`dbus_next.introspection.Interface`
"""
# TODO cannot be overridden by a dbus member
return intr.Interface(self.name,
methods=[
method.introspection
for method in ServiceInterface._get_methods(self)
if not method.disabled
],
signals=[
signal.introspection
for signal in ServiceInterface._get_signals(self)
if not signal.disabled
],
properties=[
prop.introspection
for prop in ServiceInterface._get_properties(self)
if not prop.disabled
])
@staticmethod
def _get_properties(interface):
return interface.__properties
@staticmethod
def _get_methods(interface):
return interface.__methods
@staticmethod
def _get_signals(interface):
return interface.__signals
@staticmethod
def _get_buses(interface):
return interface.__buses
@staticmethod
def _add_bus(interface, bus):
interface.__buses.add(bus)
@staticmethod
def _remove_bus(interface, bus):
interface.__buses.remove(bus)
@staticmethod
def _msg_body_to_args(msg):
if signature_contains_type(msg.signature_tree, msg.body, 'h'):
# XXX: This deep copy could be expensive if messages are very
# large. We could optimize this by only copying what we change
# here.
return replace_idx_with_fds(msg.signature_tree, copy.deepcopy(msg.body), msg.unix_fds)
else:
return msg.body
@staticmethod
def _fn_result_to_body(result, signature_tree):
'''The high level interfaces may return single values which may be
wrapped in a list to be a message body. Also they may return fds
directly for type 'h' which need to be put into an external list.'''
out_len = len(signature_tree.types)
if result is None:
result = []
else:
if out_len == 1:
result = [result]
else:
if type(result) is not list:
raise SignatureBodyMismatchError(
'Expected signal to return a list of arguments')
if out_len != len(result):
raise SignatureBodyMismatchError(
f"Signature and function return mismatch, expected {len(signature_tree.types)} arguments but got {len(result)}"
)
return replace_fds_with_idx(signature_tree, result)
@staticmethod
def _handle_signal(interface, signal, result):
body, fds = ServiceInterface._fn_result_to_body(result, signal.signature_tree)
for bus in ServiceInterface._get_buses(interface):
bus._interface_signal_notify(interface, interface.name, signal.name, signal.signature,
body, fds)
@staticmethod
def _get_property_value(interface, prop, callback):
# XXX MUST CHECK TYPE RETURNED BY GETTER
try:
if asyncio.iscoroutinefunction(prop.prop_getter):
task = asyncio.ensure_future(prop.prop_getter(interface))
def get_property_callback(task):
try:
result = task.result()
except Exception as e:
callback(interface, prop, None, e)
return
callback(interface, prop, result, None)
task.add_done_callback(get_property_callback)
return
callback(interface, prop, getattr(interface, prop.prop_getter.__name__), None)
except Exception as e:
callback(interface, prop, None, e)
@staticmethod
def _set_property_value(interface, prop, value, callback):
# XXX MUST CHECK TYPE TO SET
try:
if asyncio.iscoroutinefunction(prop.prop_setter):
task = asyncio.ensure_future(prop.prop_setter(interface, value))
def set_property_callback(task):
try:
task.result()
except Exception as e:
callback(interface, prop, e)
return
callback(interface, prop, None)
task.add_done_callback(set_property_callback)
return
setattr(interface, prop.prop_setter.__name__, value)
callback(interface, prop, None)
except Exception as e:
callback(interface, prop, e)
@staticmethod
def _get_all_property_values(interface, callback, user_data=None):
result = {}
result_error = None
for prop in ServiceInterface._get_properties(interface):
if prop.disabled or not prop.access.readable():
continue
result[prop.name] = None
if not result:
callback(interface, result, user_data, None)
return
def get_property_callback(interface, prop, value, e):
nonlocal result_error
if e is not None:
result_error = e
del result[prop.name]
else:
try:
result[prop.name] = Variant(prop.signature, value)
except SignatureBodyMismatchError as e:
result_error = e
del result[prop.name]
if any(v is None for v in result.values()):
return
callback(interface, result, user_data, result_error)
for prop in ServiceInterface._get_properties(interface):
if prop.disabled or not prop.access.readable():
continue
ServiceInterface._get_property_value(interface, prop, get_property_callback)
|
{"/test/service/test_methods.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/glib/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/constants.py", "/dbus_next/__init__.py"], "/test/service/test_properties.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/introspection.py", "/dbus_next/constants.py"], "/dbus_next/auth.py": ["/dbus_next/errors.py"], "/dbus_next/glib/message_bus.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/constants.py", "/dbus_next/message.py", "/dbus_next/message_bus.py", "/dbus_next/errors.py", "/dbus_next/glib/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/dbus_next/_private/unmarshaller.py": ["/dbus_next/message.py", "/dbus_next/_private/constants.py", "/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/errors.py"], "/examples/aio-tcp-notification.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/introspection.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/validators.py", "/dbus_next/errors.py"], "/dbus_next/aio/__init__.py": ["/dbus_next/aio/message_bus.py", "/dbus_next/aio/proxy_object.py"], "/test/test_glib_low_level.py": ["/dbus_next/__init__.py", "/test/util.py"], "/dbus_next/proxy_object.py": ["/dbus_next/validators.py", "/dbus_next/__init__.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/dbus_next/service.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/__init__.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/test/service/test_export.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_fd_passing.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_methods.py": ["/dbus_next/message.py", "/dbus_next/service.py", "/dbus_next/introspection.py", "/dbus_next/__init__.py", "/test/util.py"], "/test/service/test_standard_interfaces.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py"], "/test/test_validators.py": ["/dbus_next/__init__.py"], "/test/test_disconnect.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_request_name.py": ["/dbus_next/__init__.py", "/test/util.py"], "/examples/mpris.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/signature.py": ["/dbus_next/validators.py", "/dbus_next/errors.py"], "/test/service/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py", "/dbus_next/signature.py"], "/dbus_next/__init__.py": ["/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/validators.py"], "/dbus_next/validators.py": ["/dbus_next/errors.py"], "/test/test_introspection.py": ["/dbus_next/__init__.py"], "/examples/example-service.py": ["/dbus_next/service.py", "/dbus_next/aio/message_bus.py", "/dbus_next/__init__.py"], "/dbus_next/aio/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/errors.py", "/dbus_next/constants.py", "/dbus_next/_private/util.py", "/dbus_next/__init__.py"], "/test/test_aio_low_level.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/_private/util.py": ["/dbus_next/signature.py"], "/examples/aio-list-names.py": ["/dbus_next/__init__.py", "/dbus_next/aio/__init__.py"], "/test/service/test_decorators.py": ["/dbus_next/__init__.py", "/dbus_next/service.py"], "/examples/dbus-next-send.py": ["/dbus_next/validators.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/errors.py": ["/dbus_next/message.py", "/dbus_next/validators.py", "/dbus_next/constants.py"], "/dbus_next/message.py": ["/dbus_next/_private/marshaller.py", "/dbus_next/constants.py", "/dbus_next/_private/constants.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py"], "/test/test_marshaller.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/__init__.py"], "/examples/glib-list-names.py": ["/dbus_next/__init__.py"], "/dbus_next/message_bus.py": ["/dbus_next/_private/address.py", "/dbus_next/_private/util.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/proxy_object.py", "/dbus_next/__init__.py"], "/test/test_big_message.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/aio/message_bus.py": ["/dbus_next/message_bus.py", "/dbus_next/_private/unmarshaller.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/errors.py", "/dbus_next/aio/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/test/client/test_properties.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/_private/marshaller.py": ["/dbus_next/signature.py"], "/test/test_address_parser.py": ["/dbus_next/_private/address.py"], "/test/test_tcp_address.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/_private/address.py"], "/bench/unmarshall.py": ["/dbus_next/_private/unmarshaller.py"], "/test/test_signature.py": ["/dbus_next/__init__.py", "/dbus_next/_private/util.py"], "/dbus_next/_private/address.py": ["/dbus_next/constants.py", "/dbus_next/errors.py"]}
|
9,004
|
altdesktop/python-dbus-next
|
refs/heads/master
|
/test/service/test_export.py
|
from dbus_next.service import ServiceInterface, method
from dbus_next.aio import MessageBus
from dbus_next import Message, MessageType, introspection as intr
import pytest
standard_interfaces_count = len(intr.Node.default().interfaces)
class ExampleInterface(ServiceInterface):
def __init__(self, name):
self._method_called = False
super().__init__(name)
@method()
def some_method(self):
self._method_called = True
@pytest.mark.asyncio
async def test_export_unexport():
interface = ExampleInterface('test.interface')
interface2 = ExampleInterface('test.interface2')
export_path = '/test/path'
export_path2 = '/test/path/child'
bus = await MessageBus().connect()
bus.export(export_path, interface)
assert export_path in bus._path_exports
assert len(bus._path_exports[export_path]) == 1
assert bus._path_exports[export_path][0] is interface
assert len(ServiceInterface._get_buses(interface)) == 1
bus.export(export_path2, interface2)
node = bus._introspect_export_path(export_path)
assert len(node.interfaces) == standard_interfaces_count + 1
assert len(node.nodes) == 1
# relative path
assert node.nodes[0].name == 'child'
bus.unexport(export_path, interface)
assert export_path not in bus._path_exports
assert len(ServiceInterface._get_buses(interface)) == 0
bus.export(export_path2, interface)
assert len(bus._path_exports[export_path2]) == 2
# test unexporting the whole path
bus.unexport(export_path2)
assert not bus._path_exports
assert not ServiceInterface._get_buses(interface)
assert not ServiceInterface._get_buses(interface2)
# test unexporting by name
bus.export(export_path, interface)
bus.unexport(export_path, interface.name)
assert not bus._path_exports
assert not ServiceInterface._get_buses(interface)
node = bus._introspect_export_path('/path/doesnt/exist')
assert type(node) is intr.Node
assert not node.interfaces
assert not node.nodes
@pytest.mark.asyncio
async def test_export_alias():
bus = await MessageBus().connect()
interface = ExampleInterface('test.interface')
export_path = '/test/path'
export_path2 = '/test/path/child'
bus.export(export_path, interface)
bus.export(export_path2, interface)
result = await bus.call(
Message(destination=bus.unique_name,
path=export_path,
interface='test.interface',
member='some_method'))
assert result.message_type is MessageType.METHOD_RETURN, result.body[0]
assert interface._method_called
interface._method_called = False
result = await bus.call(
Message(destination=bus.unique_name,
path=export_path2,
interface='test.interface',
member='some_method'))
assert result.message_type is MessageType.METHOD_RETURN, result.body[0]
assert interface._method_called
@pytest.mark.asyncio
async def test_export_introspection():
interface = ExampleInterface('test.interface')
interface2 = ExampleInterface('test.interface2')
export_path = '/test/path'
export_path2 = '/test/path/child'
bus = await MessageBus().connect()
bus.export(export_path, interface)
bus.export(export_path2, interface2)
root = bus._introspect_export_path('/')
assert len(root.nodes) == 1
|
{"/test/service/test_methods.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/glib/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/constants.py", "/dbus_next/__init__.py"], "/test/service/test_properties.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/introspection.py", "/dbus_next/constants.py"], "/dbus_next/auth.py": ["/dbus_next/errors.py"], "/dbus_next/glib/message_bus.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/constants.py", "/dbus_next/message.py", "/dbus_next/message_bus.py", "/dbus_next/errors.py", "/dbus_next/glib/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/dbus_next/_private/unmarshaller.py": ["/dbus_next/message.py", "/dbus_next/_private/constants.py", "/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/errors.py"], "/examples/aio-tcp-notification.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/introspection.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/validators.py", "/dbus_next/errors.py"], "/dbus_next/aio/__init__.py": ["/dbus_next/aio/message_bus.py", "/dbus_next/aio/proxy_object.py"], "/test/test_glib_low_level.py": ["/dbus_next/__init__.py", "/test/util.py"], "/dbus_next/proxy_object.py": ["/dbus_next/validators.py", "/dbus_next/__init__.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/dbus_next/service.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/__init__.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/test/service/test_export.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_fd_passing.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_methods.py": ["/dbus_next/message.py", "/dbus_next/service.py", "/dbus_next/introspection.py", "/dbus_next/__init__.py", "/test/util.py"], "/test/service/test_standard_interfaces.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py"], "/test/test_validators.py": ["/dbus_next/__init__.py"], "/test/test_disconnect.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_request_name.py": ["/dbus_next/__init__.py", "/test/util.py"], "/examples/mpris.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/signature.py": ["/dbus_next/validators.py", "/dbus_next/errors.py"], "/test/service/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py", "/dbus_next/signature.py"], "/dbus_next/__init__.py": ["/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/validators.py"], "/dbus_next/validators.py": ["/dbus_next/errors.py"], "/test/test_introspection.py": ["/dbus_next/__init__.py"], "/examples/example-service.py": ["/dbus_next/service.py", "/dbus_next/aio/message_bus.py", "/dbus_next/__init__.py"], "/dbus_next/aio/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/errors.py", "/dbus_next/constants.py", "/dbus_next/_private/util.py", "/dbus_next/__init__.py"], "/test/test_aio_low_level.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/_private/util.py": ["/dbus_next/signature.py"], "/examples/aio-list-names.py": ["/dbus_next/__init__.py", "/dbus_next/aio/__init__.py"], "/test/service/test_decorators.py": ["/dbus_next/__init__.py", "/dbus_next/service.py"], "/examples/dbus-next-send.py": ["/dbus_next/validators.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/errors.py": ["/dbus_next/message.py", "/dbus_next/validators.py", "/dbus_next/constants.py"], "/dbus_next/message.py": ["/dbus_next/_private/marshaller.py", "/dbus_next/constants.py", "/dbus_next/_private/constants.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py"], "/test/test_marshaller.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/__init__.py"], "/examples/glib-list-names.py": ["/dbus_next/__init__.py"], "/dbus_next/message_bus.py": ["/dbus_next/_private/address.py", "/dbus_next/_private/util.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/proxy_object.py", "/dbus_next/__init__.py"], "/test/test_big_message.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/aio/message_bus.py": ["/dbus_next/message_bus.py", "/dbus_next/_private/unmarshaller.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/errors.py", "/dbus_next/aio/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/test/client/test_properties.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/_private/marshaller.py": ["/dbus_next/signature.py"], "/test/test_address_parser.py": ["/dbus_next/_private/address.py"], "/test/test_tcp_address.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/_private/address.py"], "/bench/unmarshall.py": ["/dbus_next/_private/unmarshaller.py"], "/test/test_signature.py": ["/dbus_next/__init__.py", "/dbus_next/_private/util.py"], "/dbus_next/_private/address.py": ["/dbus_next/constants.py", "/dbus_next/errors.py"]}
|
9,005
|
altdesktop/python-dbus-next
|
refs/heads/master
|
/test/test_fd_passing.py
|
"""This tests the ability to send and receive file descriptors in dbus messages"""
from dbus_next.service import ServiceInterface, method, signal, dbus_property
from dbus_next.signature import SignatureTree, Variant
from dbus_next.aio import MessageBus
from dbus_next import Message, MessageType
import os
import pytest
def open_file():
return os.open(os.devnull, os.O_RDONLY)
class ExampleInterface(ServiceInterface):
def __init__(self, name):
super().__init__(name)
self.fds = []
@method()
def ReturnsFd(self) -> 'h':
fd = open_file()
self.fds.append(fd)
return fd
@method()
def AcceptsFd(self, fd: 'h'):
assert fd != 0
self.fds.append(fd)
def get_last_fd(self):
return self.fds[-1]
def cleanup(self):
for fd in self.fds:
os.close(fd)
self.fds.clear()
@signal()
def SignalFd(self) -> 'h':
fd = open_file()
self.fds.append(fd)
return fd
@dbus_property()
def PropFd(self) -> 'h':
if not self.fds:
fd = open_file()
self.fds.append(fd)
return self.fds[-1]
@PropFd.setter
def PropFd(self, fd: 'h'):
assert fd
self.fds.append(fd)
def assert_fds_equal(fd1, fd2):
assert fd1
assert fd2
stat1 = os.fstat(fd1)
stat2 = os.fstat(fd2)
assert stat1.st_dev == stat2.st_dev
assert stat1.st_ino == stat2.st_ino
assert stat1.st_rdev == stat2.st_rdev
@pytest.mark.asyncio
async def test_sending_file_descriptor_low_level():
bus1 = await MessageBus(negotiate_unix_fd=True).connect()
bus2 = await MessageBus(negotiate_unix_fd=True).connect()
fd_before = open_file()
fd_after = None
msg = Message(destination=bus1.unique_name,
path='/org/test/path',
interface='org.test.iface',
member='SomeMember',
body=[0],
signature='h',
unix_fds=[fd_before])
def message_handler(sent):
nonlocal fd_after
if sent.sender == bus2.unique_name and sent.serial == msg.serial:
assert sent.path == msg.path
assert sent.serial == msg.serial
assert sent.interface == msg.interface
assert sent.member == msg.member
assert sent.body == [0]
assert len(sent.unix_fds) == 1
fd_after = sent.unix_fds[0]
bus1.send(Message.new_method_return(sent, 's', ['got it']))
bus1.remove_message_handler(message_handler)
return True
bus1.add_message_handler(message_handler)
reply = await bus2.call(msg)
assert reply.body == ['got it']
assert fd_after is not None
assert_fds_equal(fd_before, fd_after)
for fd in [fd_before, fd_after]:
os.close(fd)
for bus in [bus1, bus2]:
bus.disconnect()
@pytest.mark.asyncio
async def test_high_level_service_fd_passing(event_loop):
bus1 = await MessageBus(negotiate_unix_fd=True).connect()
bus2 = await MessageBus(negotiate_unix_fd=True).connect()
interface_name = 'test.interface'
interface = ExampleInterface(interface_name)
export_path = '/test/path'
async def call(member, signature='', body=[], unix_fds=[], iface=interface.name):
return await bus2.call(
Message(destination=bus1.unique_name,
path=export_path,
interface=iface,
member=member,
signature=signature,
body=body,
unix_fds=unix_fds))
bus1.export(export_path, interface)
# test that an fd can be returned by the service
reply = await call('ReturnsFd')
assert reply.message_type == MessageType.METHOD_RETURN, reply.body
assert reply.signature == 'h'
assert len(reply.unix_fds) == 1
assert_fds_equal(interface.get_last_fd(), reply.unix_fds[0])
interface.cleanup()
os.close(reply.unix_fds[0])
# test that an fd can be sent to the service
fd = open_file()
reply = await call('AcceptsFd', signature='h', body=[0], unix_fds=[fd])
assert reply.message_type == MessageType.METHOD_RETURN, reply.body
assert_fds_equal(interface.get_last_fd(), fd)
interface.cleanup()
os.close(fd)
# signals
fut = event_loop.create_future()
def fd_listener(msg):
if msg.sender == bus1.unique_name and msg.message_type == MessageType.SIGNAL:
fut.set_result(msg)
reply = await bus2.call(
Message(destination='org.freedesktop.DBus',
path='/org/freedesktop/DBus',
member='AddMatch',
signature='s',
body=[f"sender='{bus1.unique_name}'"]))
assert reply.message_type == MessageType.METHOD_RETURN
bus2.add_message_handler(fd_listener)
interface.SignalFd()
reply = await fut
assert len(reply.unix_fds) == 1
assert reply.body == [0]
assert_fds_equal(reply.unix_fds[0], interface.get_last_fd())
interface.cleanup()
os.close(reply.unix_fds[0])
# properties
reply = await call('Get',
'ss', [interface_name, 'PropFd'],
iface='org.freedesktop.DBus.Properties')
assert reply.message_type == MessageType.METHOD_RETURN, reply.body
assert reply.body[0].signature == 'h'
assert reply.body[0].value == 0
assert len(reply.unix_fds) == 1
assert_fds_equal(interface.get_last_fd(), reply.unix_fds[0])
interface.cleanup()
os.close(reply.unix_fds[0])
fd = open_file()
reply = await call('Set',
'ssv', [interface_name, 'PropFd', Variant('h', 0)],
iface='org.freedesktop.DBus.Properties',
unix_fds=[fd])
assert reply.message_type == MessageType.METHOD_RETURN, reply.body
assert_fds_equal(interface.get_last_fd(), fd)
interface.cleanup()
os.close(fd)
reply = await call('GetAll', 's', [interface_name], iface='org.freedesktop.DBus.Properties')
assert reply.message_type == MessageType.METHOD_RETURN, reply.body
assert reply.body[0]['PropFd'].signature == 'h'
assert reply.body[0]['PropFd'].value == 0
assert len(reply.unix_fds) == 1
assert_fds_equal(interface.get_last_fd(), reply.unix_fds[0])
interface.cleanup()
os.close(reply.unix_fds[0])
for bus in [bus1, bus2]:
bus.disconnect()
@pytest.mark.asyncio
async def test_sending_file_descriptor_with_proxy(event_loop):
name = 'dbus.next.test.service'
path = '/test/path'
interface_name = 'test.interface'
bus = await MessageBus(negotiate_unix_fd=True).connect()
interface = ExampleInterface(interface_name)
bus.export(path, interface)
await bus.request_name(name)
intr = await bus.introspect(name, path)
proxy = bus.get_proxy_object(name, path, intr)
proxy_interface = proxy.get_interface(interface_name)
# test fds are replaced correctly in all high level interfaces
fd = await proxy_interface.call_returns_fd()
assert_fds_equal(interface.get_last_fd(), fd)
interface.cleanup()
os.close(fd)
fd = open_file()
await proxy_interface.call_accepts_fd(fd)
assert_fds_equal(interface.get_last_fd(), fd)
interface.cleanup()
os.close(fd)
fd = await proxy_interface.get_prop_fd()
assert_fds_equal(interface.get_last_fd(), fd)
interface.cleanup()
os.close(fd)
fd = open_file()
await proxy_interface.set_prop_fd(fd)
assert_fds_equal(interface.get_last_fd(), fd)
interface.cleanup()
os.close(fd)
fut = event_loop.create_future()
def on_signal_fd(fd):
fut.set_result(fd)
proxy_interface.off_signal_fd(on_signal_fd)
proxy_interface.on_signal_fd(on_signal_fd)
interface.SignalFd()
fd = await fut
assert_fds_equal(interface.get_last_fd(), fd)
interface.cleanup()
os.close(fd)
@pytest.mark.asyncio
@pytest.mark.parametrize(
"result, out_signature, expected",
[
pytest.param(5, 'h', ([0], [5]), id='Signature: "h"'),
pytest.param([5, "foo"], 'hs', ([0, "foo"], [5]), id='Signature: "hs"'),
pytest.param([5, 7], 'hh', ([0, 1], [5, 7]), id='Signature: "hh"'),
pytest.param([5, 7], 'ah', ([[0, 1]], [5, 7]), id='Signature: "ah"'),
pytest.param([9], 'ah', ([[0]], [9]), id='Signature: "ah"'),
pytest.param([3], '(h)', ([[0]], [3]), id='Signature: "(h)"'),
pytest.param([3, "foo"], '(hs)', ([[0, "foo"]], [3]), id='Signature: "(hs)"'),
pytest.param([[7, "foo"], [8, "bar"]],
'a(hs)', ([[[0, "foo"], [1, "bar"]]], [7, 8]),
id='Signature: "a(hs)"'),
pytest.param({"foo": 3}, 'a{sh}', ([{
"foo": 0
}], [3]), id='Signature: "a{sh}"'),
pytest.param({
"foo": 3,
"bar": 6
},
'a{sh}', ([{
"foo": 0,
"bar": 1
}], [3, 6]),
id='Signature: "a{sh}"'),
pytest.param(
{"foo": [3, 8]}, 'a{sah}', ([{
"foo": [0, 1]
}], [3, 8]), id='Signature: "a{sah}"'),
pytest.param({'foo': Variant('t', 100)},
'a{sv}', ([{
'foo': Variant('t', 100)
}], []),
id='Signature: "a{sv}"'),
pytest.param(['one', ['two', [Variant('s', 'three')]]],
'(s(s(v)))', ([['one', ['two', [Variant('s', 'three')]]]], []),
id='Signature: "(s(s(v)))"'),
pytest.param(Variant('h', 2), 'v', ([Variant('h', 0)], [2]), id='Variant with: "h"'),
pytest.param(Variant('(hh)', [2, 8]),
'v', ([Variant('(hh)', [0, 1])], [2, 8]),
id='Variant with: "(hh)"'),
pytest.param(
Variant('ah', [2, 4]), 'v', ([Variant('ah', [0, 1])], [2, 4]), id='Variant with: "ah"'),
pytest.param(Variant('(ss)', ['hello', 'world']),
'v', ([Variant('(ss)', ['hello', 'world'])], []),
id='Variant with: "(ss)"'),
pytest.param(Variant('v', Variant('t', 100)),
'v', ([Variant('v', Variant('t', 100))], []),
id='Variant with: "v"'),
pytest.param([
Variant('v', Variant('(ss)', ['hello', 'world'])), {
'foo': Variant('t', 100)
}, ['one', ['two', [Variant('s', 'three')]]]
],
'va{sv}(s(s(v)))', ([
Variant('v', Variant('(ss)', ['hello', 'world'])), {
'foo': Variant('t', 100)
}, ['one', ['two', [Variant('s', 'three')]]]
], []),
id='Variant with: "va{sv}(s(s(v)))"'),
],
)
async def test_fn_result_to_body(result, out_signature, expected):
out_signature_tree = SignatureTree(out_signature)
assert ServiceInterface._fn_result_to_body(result, out_signature_tree) == expected
|
{"/test/service/test_methods.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/glib/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/constants.py", "/dbus_next/__init__.py"], "/test/service/test_properties.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/introspection.py", "/dbus_next/constants.py"], "/dbus_next/auth.py": ["/dbus_next/errors.py"], "/dbus_next/glib/message_bus.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/constants.py", "/dbus_next/message.py", "/dbus_next/message_bus.py", "/dbus_next/errors.py", "/dbus_next/glib/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/dbus_next/_private/unmarshaller.py": ["/dbus_next/message.py", "/dbus_next/_private/constants.py", "/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/errors.py"], "/examples/aio-tcp-notification.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/introspection.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/validators.py", "/dbus_next/errors.py"], "/dbus_next/aio/__init__.py": ["/dbus_next/aio/message_bus.py", "/dbus_next/aio/proxy_object.py"], "/test/test_glib_low_level.py": ["/dbus_next/__init__.py", "/test/util.py"], "/dbus_next/proxy_object.py": ["/dbus_next/validators.py", "/dbus_next/__init__.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/dbus_next/service.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/__init__.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/test/service/test_export.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_fd_passing.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_methods.py": ["/dbus_next/message.py", "/dbus_next/service.py", "/dbus_next/introspection.py", "/dbus_next/__init__.py", "/test/util.py"], "/test/service/test_standard_interfaces.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py"], "/test/test_validators.py": ["/dbus_next/__init__.py"], "/test/test_disconnect.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_request_name.py": ["/dbus_next/__init__.py", "/test/util.py"], "/examples/mpris.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/signature.py": ["/dbus_next/validators.py", "/dbus_next/errors.py"], "/test/service/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py", "/dbus_next/signature.py"], "/dbus_next/__init__.py": ["/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/validators.py"], "/dbus_next/validators.py": ["/dbus_next/errors.py"], "/test/test_introspection.py": ["/dbus_next/__init__.py"], "/examples/example-service.py": ["/dbus_next/service.py", "/dbus_next/aio/message_bus.py", "/dbus_next/__init__.py"], "/dbus_next/aio/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/errors.py", "/dbus_next/constants.py", "/dbus_next/_private/util.py", "/dbus_next/__init__.py"], "/test/test_aio_low_level.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/_private/util.py": ["/dbus_next/signature.py"], "/examples/aio-list-names.py": ["/dbus_next/__init__.py", "/dbus_next/aio/__init__.py"], "/test/service/test_decorators.py": ["/dbus_next/__init__.py", "/dbus_next/service.py"], "/examples/dbus-next-send.py": ["/dbus_next/validators.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/errors.py": ["/dbus_next/message.py", "/dbus_next/validators.py", "/dbus_next/constants.py"], "/dbus_next/message.py": ["/dbus_next/_private/marshaller.py", "/dbus_next/constants.py", "/dbus_next/_private/constants.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py"], "/test/test_marshaller.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/__init__.py"], "/examples/glib-list-names.py": ["/dbus_next/__init__.py"], "/dbus_next/message_bus.py": ["/dbus_next/_private/address.py", "/dbus_next/_private/util.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/proxy_object.py", "/dbus_next/__init__.py"], "/test/test_big_message.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/aio/message_bus.py": ["/dbus_next/message_bus.py", "/dbus_next/_private/unmarshaller.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/errors.py", "/dbus_next/aio/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/test/client/test_properties.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/_private/marshaller.py": ["/dbus_next/signature.py"], "/test/test_address_parser.py": ["/dbus_next/_private/address.py"], "/test/test_tcp_address.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/_private/address.py"], "/bench/unmarshall.py": ["/dbus_next/_private/unmarshaller.py"], "/test/test_signature.py": ["/dbus_next/__init__.py", "/dbus_next/_private/util.py"], "/dbus_next/_private/address.py": ["/dbus_next/constants.py", "/dbus_next/errors.py"]}
|
9,006
|
altdesktop/python-dbus-next
|
refs/heads/master
|
/test/client/test_methods.py
|
from dbus_next.message import MessageFlag
from dbus_next.service import ServiceInterface, method
import dbus_next.introspection as intr
from dbus_next import aio, glib, DBusError
from test.util import check_gi_repository, skip_reason_no_gi
import pytest
has_gi = check_gi_repository()
class ExampleInterface(ServiceInterface):
def __init__(self):
super().__init__('test.interface')
@method()
def Ping(self):
pass
@method()
def EchoInt64(self, what: 'x') -> 'x':
return what
@method()
def EchoString(self, what: 's') -> 's':
return what
@method()
def ConcatStrings(self, what1: 's', what2: 's') -> 's':
return what1 + what2
@method()
def EchoThree(self, what1: 's', what2: 's', what3: 's') -> 'sss':
return [what1, what2, what3]
@method()
def ThrowsError(self):
raise DBusError('test.error', 'something went wrong')
@pytest.mark.asyncio
async def test_aio_proxy_object():
bus_name = 'aio.client.test.methods'
bus = await aio.MessageBus().connect()
bus2 = await aio.MessageBus().connect()
await bus.request_name(bus_name)
service_interface = ExampleInterface()
bus.export('/test/path', service_interface)
# add some more to test nodes
bus.export('/test/path/child1', ExampleInterface())
bus.export('/test/path/child2', ExampleInterface())
introspection = await bus2.introspect(bus_name, '/test/path')
assert type(introspection) is intr.Node
obj = bus2.get_proxy_object(bus_name, '/test/path', introspection)
interface = obj.get_interface(service_interface.name)
children = obj.get_children()
assert len(children) == 2
for child in obj.get_children():
assert type(child) is aio.ProxyObject
result = await interface.call_ping()
assert result is None
result = await interface.call_echo_string('hello')
assert result == 'hello'
result = await interface.call_concat_strings('hello ', 'world')
assert result == 'hello world'
result = await interface.call_echo_three('hello', 'there', 'world')
assert result == ['hello', 'there', 'world']
result = await interface.call_echo_int64(-10000)
assert result == -10000
result = await interface.call_echo_string('no reply', flags=MessageFlag.NO_REPLY_EXPECTED)
assert result is None
with pytest.raises(DBusError):
try:
await interface.call_throws_error()
except DBusError as e:
assert e.reply is not None
assert e.type == 'test.error'
assert e.text == 'something went wrong'
raise e
bus.disconnect()
bus2.disconnect()
@pytest.mark.skipif(not has_gi, reason=skip_reason_no_gi)
def test_glib_proxy_object():
bus_name = 'glib.client.test.methods'
bus = glib.MessageBus().connect_sync()
bus.request_name_sync(bus_name)
service_interface = ExampleInterface()
bus.export('/test/path', service_interface)
bus2 = glib.MessageBus().connect_sync()
introspection = bus2.introspect_sync(bus_name, '/test/path')
assert type(introspection) is intr.Node
obj = bus.get_proxy_object(bus_name, '/test/path', introspection)
interface = obj.get_interface(service_interface.name)
result = interface.call_ping_sync()
assert result is None
result = interface.call_echo_string_sync('hello')
assert result == 'hello'
result = interface.call_concat_strings_sync('hello ', 'world')
assert result == 'hello world'
result = interface.call_echo_three_sync('hello', 'there', 'world')
assert result == ['hello', 'there', 'world']
with pytest.raises(DBusError):
try:
result = interface.call_throws_error_sync()
assert False, result
except DBusError as e:
assert e.reply is not None
assert e.type == 'test.error'
assert e.text == 'something went wrong'
raise e
bus.disconnect()
bus2.disconnect()
|
{"/test/service/test_methods.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/glib/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/constants.py", "/dbus_next/__init__.py"], "/test/service/test_properties.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/introspection.py", "/dbus_next/constants.py"], "/dbus_next/auth.py": ["/dbus_next/errors.py"], "/dbus_next/glib/message_bus.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/constants.py", "/dbus_next/message.py", "/dbus_next/message_bus.py", "/dbus_next/errors.py", "/dbus_next/glib/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/dbus_next/_private/unmarshaller.py": ["/dbus_next/message.py", "/dbus_next/_private/constants.py", "/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/errors.py"], "/examples/aio-tcp-notification.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/introspection.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/validators.py", "/dbus_next/errors.py"], "/dbus_next/aio/__init__.py": ["/dbus_next/aio/message_bus.py", "/dbus_next/aio/proxy_object.py"], "/test/test_glib_low_level.py": ["/dbus_next/__init__.py", "/test/util.py"], "/dbus_next/proxy_object.py": ["/dbus_next/validators.py", "/dbus_next/__init__.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/dbus_next/service.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/__init__.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/test/service/test_export.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_fd_passing.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_methods.py": ["/dbus_next/message.py", "/dbus_next/service.py", "/dbus_next/introspection.py", "/dbus_next/__init__.py", "/test/util.py"], "/test/service/test_standard_interfaces.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py"], "/test/test_validators.py": ["/dbus_next/__init__.py"], "/test/test_disconnect.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_request_name.py": ["/dbus_next/__init__.py", "/test/util.py"], "/examples/mpris.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/signature.py": ["/dbus_next/validators.py", "/dbus_next/errors.py"], "/test/service/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py", "/dbus_next/signature.py"], "/dbus_next/__init__.py": ["/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/validators.py"], "/dbus_next/validators.py": ["/dbus_next/errors.py"], "/test/test_introspection.py": ["/dbus_next/__init__.py"], "/examples/example-service.py": ["/dbus_next/service.py", "/dbus_next/aio/message_bus.py", "/dbus_next/__init__.py"], "/dbus_next/aio/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/errors.py", "/dbus_next/constants.py", "/dbus_next/_private/util.py", "/dbus_next/__init__.py"], "/test/test_aio_low_level.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/_private/util.py": ["/dbus_next/signature.py"], "/examples/aio-list-names.py": ["/dbus_next/__init__.py", "/dbus_next/aio/__init__.py"], "/test/service/test_decorators.py": ["/dbus_next/__init__.py", "/dbus_next/service.py"], "/examples/dbus-next-send.py": ["/dbus_next/validators.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/errors.py": ["/dbus_next/message.py", "/dbus_next/validators.py", "/dbus_next/constants.py"], "/dbus_next/message.py": ["/dbus_next/_private/marshaller.py", "/dbus_next/constants.py", "/dbus_next/_private/constants.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py"], "/test/test_marshaller.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/__init__.py"], "/examples/glib-list-names.py": ["/dbus_next/__init__.py"], "/dbus_next/message_bus.py": ["/dbus_next/_private/address.py", "/dbus_next/_private/util.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/proxy_object.py", "/dbus_next/__init__.py"], "/test/test_big_message.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/aio/message_bus.py": ["/dbus_next/message_bus.py", "/dbus_next/_private/unmarshaller.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/errors.py", "/dbus_next/aio/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/test/client/test_properties.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/_private/marshaller.py": ["/dbus_next/signature.py"], "/test/test_address_parser.py": ["/dbus_next/_private/address.py"], "/test/test_tcp_address.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/_private/address.py"], "/bench/unmarshall.py": ["/dbus_next/_private/unmarshaller.py"], "/test/test_signature.py": ["/dbus_next/__init__.py", "/dbus_next/_private/util.py"], "/dbus_next/_private/address.py": ["/dbus_next/constants.py", "/dbus_next/errors.py"]}
|
9,007
|
altdesktop/python-dbus-next
|
refs/heads/master
|
/test/service/test_standard_interfaces.py
|
from dbus_next.service import ServiceInterface, dbus_property, PropertyAccess
from dbus_next.signature import Variant
from dbus_next.aio import MessageBus
from dbus_next import Message, MessageType, introspection as intr
from dbus_next.constants import ErrorType
import pytest
standard_interfaces_count = len(intr.Node.default().interfaces)
class ExampleInterface(ServiceInterface):
def __init__(self, name):
super().__init__(name)
class ExampleComplexInterface(ServiceInterface):
def __init__(self, name):
self._foo = 42
self._bar = 'str'
self._async_prop = 'async'
super().__init__(name)
@dbus_property(access=PropertyAccess.READ)
def Foo(self) -> 'y':
return self._foo
@dbus_property(access=PropertyAccess.READ)
def Bar(self) -> 's':
return self._bar
@dbus_property(access=PropertyAccess.READ)
async def AsyncProp(self) -> 's':
return self._async_prop
@pytest.mark.asyncio
async def test_introspectable_interface():
bus1 = await MessageBus().connect()
bus2 = await MessageBus().connect()
interface = ExampleInterface('test.interface')
interface2 = ExampleInterface('test.interface2')
export_path = '/test/path'
bus1.export(export_path, interface)
bus1.export(export_path, interface2)
reply = await bus2.call(
Message(destination=bus1.unique_name,
path=export_path,
interface='org.freedesktop.DBus.Introspectable',
member='Introspect'))
assert reply.message_type == MessageType.METHOD_RETURN, reply.body[0]
assert reply.signature == 's'
node = intr.Node.parse(reply.body[0])
assert len(node.interfaces) == standard_interfaces_count + 2
assert node.interfaces[-1].name == 'test.interface2'
assert node.interfaces[-2].name == 'test.interface'
assert not node.nodes
# introspect works on every path
reply = await bus2.call(
Message(destination=bus1.unique_name,
path='/path/doesnt/exist',
interface='org.freedesktop.DBus.Introspectable',
member='Introspect'))
assert reply.message_type == MessageType.METHOD_RETURN, reply.body[0]
assert reply.signature == 's'
node = intr.Node.parse(reply.body[0])
assert not node.interfaces
assert not node.nodes
@pytest.mark.asyncio
async def test_peer_interface():
bus1 = await MessageBus().connect()
bus2 = await MessageBus().connect()
reply = await bus2.call(
Message(destination=bus1.unique_name,
path='/path/doesnt/exist',
interface='org.freedesktop.DBus.Peer',
member='Ping'))
assert reply.message_type == MessageType.METHOD_RETURN, reply.body[0]
assert reply.signature == ''
reply = await bus2.call(
Message(destination=bus1.unique_name,
path='/path/doesnt/exist',
interface='org.freedesktop.DBus.Peer',
member='GetMachineId',
signature=''))
assert reply.message_type == MessageType.METHOD_RETURN, reply.body[0]
assert reply.signature == 's'
@pytest.mark.asyncio
async def test_object_manager():
expected_reply = {
'/test/path/deeper': {
'test.interface2': {
'Bar': Variant('s', 'str'),
'Foo': Variant('y', 42),
'AsyncProp': Variant('s', 'async'),
}
}
}
reply_ext = {
'/test/path': {
'test.interface1': {},
'test.interface2': {
'Bar': Variant('s', 'str'),
'Foo': Variant('y', 42),
'AsyncProp': Variant('s', 'async'),
}
}
}
bus1 = await MessageBus().connect()
bus2 = await MessageBus().connect()
interface = ExampleInterface('test.interface1')
interface2 = ExampleComplexInterface('test.interface2')
export_path = '/test/path'
bus1.export(export_path, interface)
bus1.export(export_path, interface2)
bus1.export(export_path + '/deeper', interface2)
reply_root = await bus2.call(
Message(destination=bus1.unique_name,
path='/',
interface='org.freedesktop.DBus.ObjectManager',
member='GetManagedObjects'))
reply_level1 = await bus2.call(
Message(destination=bus1.unique_name,
path=export_path,
interface='org.freedesktop.DBus.ObjectManager',
member='GetManagedObjects'))
reply_level2 = await bus2.call(
Message(destination=bus1.unique_name,
path=export_path + '/deeper',
interface='org.freedesktop.DBus.ObjectManager',
member='GetManagedObjects'))
assert reply_root.signature == 'a{oa{sa{sv}}}'
assert reply_level1.signature == 'a{oa{sa{sv}}}'
assert reply_level2.signature == 'a{oa{sa{sv}}}'
assert reply_level2.body == [{}]
assert reply_level1.body == [expected_reply]
expected_reply.update(reply_ext)
assert reply_root.body == [expected_reply]
@pytest.mark.asyncio
async def test_standard_interface_properties():
# standard interfaces have no properties, but should still behave correctly
# when you try to call the methods anyway (#49)
bus1 = await MessageBus().connect()
bus2 = await MessageBus().connect()
interface = ExampleInterface('test.interface1')
export_path = '/test/path'
bus1.export(export_path, interface)
for iface in [
'org.freedesktop.DBus.Properties', 'org.freedesktop.DBus.Introspectable',
'org.freedesktop.DBus.Peer', 'org.freedesktop.DBus.ObjectManager'
]:
result = await bus2.call(
Message(destination=bus1.unique_name,
path=export_path,
interface='org.freedesktop.DBus.Properties',
member='Get',
signature='ss',
body=[iface, 'anything']))
assert result.message_type is MessageType.ERROR
assert result.error_name == ErrorType.UNKNOWN_PROPERTY.value
result = await bus2.call(
Message(destination=bus1.unique_name,
path=export_path,
interface='org.freedesktop.DBus.Properties',
member='Set',
signature='ssv',
body=[iface, 'anything', Variant('s', 'new thing')]))
assert result.message_type is MessageType.ERROR
assert result.error_name == ErrorType.UNKNOWN_PROPERTY.value
result = await bus2.call(
Message(destination=bus1.unique_name,
path=export_path,
interface='org.freedesktop.DBus.Properties',
member='GetAll',
signature='s',
body=[iface]))
assert result.message_type is MessageType.METHOD_RETURN
assert result.body == [{}]
|
{"/test/service/test_methods.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/glib/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/constants.py", "/dbus_next/__init__.py"], "/test/service/test_properties.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/introspection.py", "/dbus_next/constants.py"], "/dbus_next/auth.py": ["/dbus_next/errors.py"], "/dbus_next/glib/message_bus.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/constants.py", "/dbus_next/message.py", "/dbus_next/message_bus.py", "/dbus_next/errors.py", "/dbus_next/glib/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/dbus_next/_private/unmarshaller.py": ["/dbus_next/message.py", "/dbus_next/_private/constants.py", "/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/errors.py"], "/examples/aio-tcp-notification.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/introspection.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/validators.py", "/dbus_next/errors.py"], "/dbus_next/aio/__init__.py": ["/dbus_next/aio/message_bus.py", "/dbus_next/aio/proxy_object.py"], "/test/test_glib_low_level.py": ["/dbus_next/__init__.py", "/test/util.py"], "/dbus_next/proxy_object.py": ["/dbus_next/validators.py", "/dbus_next/__init__.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/dbus_next/service.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/__init__.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/test/service/test_export.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_fd_passing.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_methods.py": ["/dbus_next/message.py", "/dbus_next/service.py", "/dbus_next/introspection.py", "/dbus_next/__init__.py", "/test/util.py"], "/test/service/test_standard_interfaces.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py"], "/test/test_validators.py": ["/dbus_next/__init__.py"], "/test/test_disconnect.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_request_name.py": ["/dbus_next/__init__.py", "/test/util.py"], "/examples/mpris.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/signature.py": ["/dbus_next/validators.py", "/dbus_next/errors.py"], "/test/service/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py", "/dbus_next/signature.py"], "/dbus_next/__init__.py": ["/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/validators.py"], "/dbus_next/validators.py": ["/dbus_next/errors.py"], "/test/test_introspection.py": ["/dbus_next/__init__.py"], "/examples/example-service.py": ["/dbus_next/service.py", "/dbus_next/aio/message_bus.py", "/dbus_next/__init__.py"], "/dbus_next/aio/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/errors.py", "/dbus_next/constants.py", "/dbus_next/_private/util.py", "/dbus_next/__init__.py"], "/test/test_aio_low_level.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/_private/util.py": ["/dbus_next/signature.py"], "/examples/aio-list-names.py": ["/dbus_next/__init__.py", "/dbus_next/aio/__init__.py"], "/test/service/test_decorators.py": ["/dbus_next/__init__.py", "/dbus_next/service.py"], "/examples/dbus-next-send.py": ["/dbus_next/validators.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/errors.py": ["/dbus_next/message.py", "/dbus_next/validators.py", "/dbus_next/constants.py"], "/dbus_next/message.py": ["/dbus_next/_private/marshaller.py", "/dbus_next/constants.py", "/dbus_next/_private/constants.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py"], "/test/test_marshaller.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/__init__.py"], "/examples/glib-list-names.py": ["/dbus_next/__init__.py"], "/dbus_next/message_bus.py": ["/dbus_next/_private/address.py", "/dbus_next/_private/util.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/proxy_object.py", "/dbus_next/__init__.py"], "/test/test_big_message.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/aio/message_bus.py": ["/dbus_next/message_bus.py", "/dbus_next/_private/unmarshaller.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/errors.py", "/dbus_next/aio/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/test/client/test_properties.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/_private/marshaller.py": ["/dbus_next/signature.py"], "/test/test_address_parser.py": ["/dbus_next/_private/address.py"], "/test/test_tcp_address.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/_private/address.py"], "/bench/unmarshall.py": ["/dbus_next/_private/unmarshaller.py"], "/test/test_signature.py": ["/dbus_next/__init__.py", "/dbus_next/_private/util.py"], "/dbus_next/_private/address.py": ["/dbus_next/constants.py", "/dbus_next/errors.py"]}
|
9,008
|
altdesktop/python-dbus-next
|
refs/heads/master
|
/test/test_validators.py
|
from dbus_next import (is_bus_name_valid, is_object_path_valid, is_interface_name_valid,
is_member_name_valid)
def test_object_path_validator():
valid_paths = ['/', '/foo', '/foo/bar', '/foo/bar/bat']
invalid_paths = [
None, '', 'foo', 'foo/bar', '/foo/bar/', '/$/foo/bar', '/foo//bar', '/foo$bar/baz'
]
for path in valid_paths:
assert is_object_path_valid(path), f'path should be valid: "{path}"'
for path in invalid_paths:
assert not is_object_path_valid(path), f'path should be invalid: "{path}"'
def test_bus_name_validator():
valid_names = [
'foo.bar', 'foo.bar.bat', '_foo._bar', 'foo.bar69', 'foo.bar-69',
'org.mpris.MediaPlayer2.google-play-desktop-player'
]
invalid_names = [
None, '', '5foo.bar', 'foo.6bar', '.foo.bar', 'bar..baz', '$foo.bar', 'foo$.ba$r'
]
for name in valid_names:
assert is_bus_name_valid(name), f'bus name should be valid: "{name}"'
for name in invalid_names:
assert not is_bus_name_valid(name), f'bus name should be invalid: "{name}"'
def test_interface_name_validator():
valid_names = ['foo.bar', 'foo.bar.bat', '_foo._bar', 'foo.bar69']
invalid_names = [
None, '', '5foo.bar', 'foo.6bar', '.foo.bar', 'bar..baz', '$foo.bar', 'foo$.ba$r',
'org.mpris.MediaPlayer2.google-play-desktop-player'
]
for name in valid_names:
assert is_interface_name_valid(name), f'interface name should be valid: "{name}"'
for name in invalid_names:
assert not is_interface_name_valid(name), f'interface name should be invalid: "{name}"'
def test_member_name_validator():
valid_members = ['foo', 'FooBar', 'Bat_Baz69', 'foo-bar']
invalid_members = [None, '', 'foo.bar', '5foo', 'foo$bar']
for member in valid_members:
assert is_member_name_valid(member), f'member name should be valid: "{member}"'
for member in invalid_members:
assert not is_member_name_valid(member), f'member name should be invalid: "{member}"'
|
{"/test/service/test_methods.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/glib/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/constants.py", "/dbus_next/__init__.py"], "/test/service/test_properties.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/introspection.py", "/dbus_next/constants.py"], "/dbus_next/auth.py": ["/dbus_next/errors.py"], "/dbus_next/glib/message_bus.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/constants.py", "/dbus_next/message.py", "/dbus_next/message_bus.py", "/dbus_next/errors.py", "/dbus_next/glib/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/dbus_next/_private/unmarshaller.py": ["/dbus_next/message.py", "/dbus_next/_private/constants.py", "/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/errors.py"], "/examples/aio-tcp-notification.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/introspection.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/validators.py", "/dbus_next/errors.py"], "/dbus_next/aio/__init__.py": ["/dbus_next/aio/message_bus.py", "/dbus_next/aio/proxy_object.py"], "/test/test_glib_low_level.py": ["/dbus_next/__init__.py", "/test/util.py"], "/dbus_next/proxy_object.py": ["/dbus_next/validators.py", "/dbus_next/__init__.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/dbus_next/service.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/__init__.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/test/service/test_export.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_fd_passing.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_methods.py": ["/dbus_next/message.py", "/dbus_next/service.py", "/dbus_next/introspection.py", "/dbus_next/__init__.py", "/test/util.py"], "/test/service/test_standard_interfaces.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py"], "/test/test_validators.py": ["/dbus_next/__init__.py"], "/test/test_disconnect.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_request_name.py": ["/dbus_next/__init__.py", "/test/util.py"], "/examples/mpris.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/signature.py": ["/dbus_next/validators.py", "/dbus_next/errors.py"], "/test/service/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py", "/dbus_next/signature.py"], "/dbus_next/__init__.py": ["/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/validators.py"], "/dbus_next/validators.py": ["/dbus_next/errors.py"], "/test/test_introspection.py": ["/dbus_next/__init__.py"], "/examples/example-service.py": ["/dbus_next/service.py", "/dbus_next/aio/message_bus.py", "/dbus_next/__init__.py"], "/dbus_next/aio/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/errors.py", "/dbus_next/constants.py", "/dbus_next/_private/util.py", "/dbus_next/__init__.py"], "/test/test_aio_low_level.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/_private/util.py": ["/dbus_next/signature.py"], "/examples/aio-list-names.py": ["/dbus_next/__init__.py", "/dbus_next/aio/__init__.py"], "/test/service/test_decorators.py": ["/dbus_next/__init__.py", "/dbus_next/service.py"], "/examples/dbus-next-send.py": ["/dbus_next/validators.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/errors.py": ["/dbus_next/message.py", "/dbus_next/validators.py", "/dbus_next/constants.py"], "/dbus_next/message.py": ["/dbus_next/_private/marshaller.py", "/dbus_next/constants.py", "/dbus_next/_private/constants.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py"], "/test/test_marshaller.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/__init__.py"], "/examples/glib-list-names.py": ["/dbus_next/__init__.py"], "/dbus_next/message_bus.py": ["/dbus_next/_private/address.py", "/dbus_next/_private/util.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/proxy_object.py", "/dbus_next/__init__.py"], "/test/test_big_message.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/aio/message_bus.py": ["/dbus_next/message_bus.py", "/dbus_next/_private/unmarshaller.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/errors.py", "/dbus_next/aio/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/test/client/test_properties.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/_private/marshaller.py": ["/dbus_next/signature.py"], "/test/test_address_parser.py": ["/dbus_next/_private/address.py"], "/test/test_tcp_address.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/_private/address.py"], "/bench/unmarshall.py": ["/dbus_next/_private/unmarshaller.py"], "/test/test_signature.py": ["/dbus_next/__init__.py", "/dbus_next/_private/util.py"], "/dbus_next/_private/address.py": ["/dbus_next/constants.py", "/dbus_next/errors.py"]}
|
9,009
|
altdesktop/python-dbus-next
|
refs/heads/master
|
/test/test_disconnect.py
|
from dbus_next.aio import MessageBus
from dbus_next import Message
import os
import pytest
import functools
@pytest.mark.asyncio
async def test_bus_disconnect_before_reply(event_loop):
'''In this test, the bus disconnects before the reply comes in. Make sure
the caller receives a reply with the error instead of hanging.'''
bus = MessageBus()
assert not bus.connected
await bus.connect()
assert bus.connected
ping = bus.call(
Message(destination='org.freedesktop.DBus',
path='/org/freedesktop/DBus',
interface='org.freedesktop.DBus',
member='Ping'))
event_loop.call_soon(bus.disconnect)
with pytest.raises((EOFError, BrokenPipeError)):
await ping
assert bus._disconnected
assert not bus.connected
assert (await bus.wait_for_disconnect()) is None
@pytest.mark.asyncio
async def test_unexpected_disconnect(event_loop):
bus = MessageBus()
assert not bus.connected
await bus.connect()
assert bus.connected
ping = bus.call(
Message(destination='org.freedesktop.DBus',
path='/org/freedesktop/DBus',
interface='org.freedesktop.DBus',
member='Ping'))
event_loop.call_soon(functools.partial(os.close, bus._fd))
with pytest.raises(OSError):
await ping
assert bus._disconnected
assert not bus.connected
with pytest.raises(OSError):
await bus.wait_for_disconnect()
|
{"/test/service/test_methods.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/glib/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/constants.py", "/dbus_next/__init__.py"], "/test/service/test_properties.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/introspection.py", "/dbus_next/constants.py"], "/dbus_next/auth.py": ["/dbus_next/errors.py"], "/dbus_next/glib/message_bus.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/constants.py", "/dbus_next/message.py", "/dbus_next/message_bus.py", "/dbus_next/errors.py", "/dbus_next/glib/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/dbus_next/_private/unmarshaller.py": ["/dbus_next/message.py", "/dbus_next/_private/constants.py", "/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/errors.py"], "/examples/aio-tcp-notification.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/introspection.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/validators.py", "/dbus_next/errors.py"], "/dbus_next/aio/__init__.py": ["/dbus_next/aio/message_bus.py", "/dbus_next/aio/proxy_object.py"], "/test/test_glib_low_level.py": ["/dbus_next/__init__.py", "/test/util.py"], "/dbus_next/proxy_object.py": ["/dbus_next/validators.py", "/dbus_next/__init__.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/dbus_next/service.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/__init__.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/test/service/test_export.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_fd_passing.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_methods.py": ["/dbus_next/message.py", "/dbus_next/service.py", "/dbus_next/introspection.py", "/dbus_next/__init__.py", "/test/util.py"], "/test/service/test_standard_interfaces.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py"], "/test/test_validators.py": ["/dbus_next/__init__.py"], "/test/test_disconnect.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_request_name.py": ["/dbus_next/__init__.py", "/test/util.py"], "/examples/mpris.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/signature.py": ["/dbus_next/validators.py", "/dbus_next/errors.py"], "/test/service/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py", "/dbus_next/signature.py"], "/dbus_next/__init__.py": ["/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/validators.py"], "/dbus_next/validators.py": ["/dbus_next/errors.py"], "/test/test_introspection.py": ["/dbus_next/__init__.py"], "/examples/example-service.py": ["/dbus_next/service.py", "/dbus_next/aio/message_bus.py", "/dbus_next/__init__.py"], "/dbus_next/aio/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/errors.py", "/dbus_next/constants.py", "/dbus_next/_private/util.py", "/dbus_next/__init__.py"], "/test/test_aio_low_level.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/_private/util.py": ["/dbus_next/signature.py"], "/examples/aio-list-names.py": ["/dbus_next/__init__.py", "/dbus_next/aio/__init__.py"], "/test/service/test_decorators.py": ["/dbus_next/__init__.py", "/dbus_next/service.py"], "/examples/dbus-next-send.py": ["/dbus_next/validators.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/errors.py": ["/dbus_next/message.py", "/dbus_next/validators.py", "/dbus_next/constants.py"], "/dbus_next/message.py": ["/dbus_next/_private/marshaller.py", "/dbus_next/constants.py", "/dbus_next/_private/constants.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py"], "/test/test_marshaller.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/__init__.py"], "/examples/glib-list-names.py": ["/dbus_next/__init__.py"], "/dbus_next/message_bus.py": ["/dbus_next/_private/address.py", "/dbus_next/_private/util.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/proxy_object.py", "/dbus_next/__init__.py"], "/test/test_big_message.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/aio/message_bus.py": ["/dbus_next/message_bus.py", "/dbus_next/_private/unmarshaller.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/errors.py", "/dbus_next/aio/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/test/client/test_properties.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/_private/marshaller.py": ["/dbus_next/signature.py"], "/test/test_address_parser.py": ["/dbus_next/_private/address.py"], "/test/test_tcp_address.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/_private/address.py"], "/bench/unmarshall.py": ["/dbus_next/_private/unmarshaller.py"], "/test/test_signature.py": ["/dbus_next/__init__.py", "/dbus_next/_private/util.py"], "/dbus_next/_private/address.py": ["/dbus_next/constants.py", "/dbus_next/errors.py"]}
|
9,010
|
altdesktop/python-dbus-next
|
refs/heads/master
|
/test/test_request_name.py
|
from dbus_next import aio, glib, Message, MessageType, NameFlag, RequestNameReply, ReleaseNameReply
from test.util import check_gi_repository, skip_reason_no_gi
import pytest
has_gi = check_gi_repository()
@pytest.mark.asyncio
async def test_name_requests():
test_name = 'aio.test.request.name'
bus1 = await aio.MessageBus().connect()
bus2 = await aio.MessageBus().connect()
async def get_name_owner(name):
reply = await bus1.call(
Message(destination='org.freedesktop.DBus',
path='/org/freedesktop/DBus',
interface='org.freedesktop.DBus',
member='GetNameOwner',
signature='s',
body=[name]))
assert reply.message_type == MessageType.METHOD_RETURN
return reply.body[0]
reply = await bus1.request_name(test_name)
assert reply == RequestNameReply.PRIMARY_OWNER
reply = await bus1.request_name(test_name)
assert reply == RequestNameReply.ALREADY_OWNER
reply = await bus2.request_name(test_name, NameFlag.ALLOW_REPLACEMENT)
assert reply == RequestNameReply.IN_QUEUE
reply = await bus1.release_name(test_name)
assert reply == ReleaseNameReply.RELEASED
reply = await bus1.release_name('name.doesnt.exist')
assert reply == ReleaseNameReply.NON_EXISTENT
reply = await bus1.release_name(test_name)
assert reply == ReleaseNameReply.NOT_OWNER
new_owner = await get_name_owner(test_name)
assert new_owner == bus2.unique_name
reply = await bus1.request_name(test_name, NameFlag.DO_NOT_QUEUE)
assert reply == RequestNameReply.EXISTS
reply = await bus1.request_name(test_name, NameFlag.DO_NOT_QUEUE | NameFlag.REPLACE_EXISTING)
assert reply == RequestNameReply.PRIMARY_OWNER
bus1.disconnect()
bus2.disconnect()
@pytest.mark.skipif(not has_gi, reason=skip_reason_no_gi)
def test_request_name_glib():
test_name = 'glib.test.request.name'
bus = glib.MessageBus().connect_sync()
reply = bus.request_name_sync(test_name)
assert reply == RequestNameReply.PRIMARY_OWNER
reply = bus.release_name_sync(test_name)
assert reply == ReleaseNameReply.RELEASED
bus.disconnect()
|
{"/test/service/test_methods.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/glib/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/constants.py", "/dbus_next/__init__.py"], "/test/service/test_properties.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/introspection.py", "/dbus_next/constants.py"], "/dbus_next/auth.py": ["/dbus_next/errors.py"], "/dbus_next/glib/message_bus.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/constants.py", "/dbus_next/message.py", "/dbus_next/message_bus.py", "/dbus_next/errors.py", "/dbus_next/glib/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/dbus_next/_private/unmarshaller.py": ["/dbus_next/message.py", "/dbus_next/_private/constants.py", "/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/errors.py"], "/examples/aio-tcp-notification.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/introspection.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/validators.py", "/dbus_next/errors.py"], "/dbus_next/aio/__init__.py": ["/dbus_next/aio/message_bus.py", "/dbus_next/aio/proxy_object.py"], "/test/test_glib_low_level.py": ["/dbus_next/__init__.py", "/test/util.py"], "/dbus_next/proxy_object.py": ["/dbus_next/validators.py", "/dbus_next/__init__.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/dbus_next/service.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/__init__.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/test/service/test_export.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_fd_passing.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_methods.py": ["/dbus_next/message.py", "/dbus_next/service.py", "/dbus_next/introspection.py", "/dbus_next/__init__.py", "/test/util.py"], "/test/service/test_standard_interfaces.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py"], "/test/test_validators.py": ["/dbus_next/__init__.py"], "/test/test_disconnect.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_request_name.py": ["/dbus_next/__init__.py", "/test/util.py"], "/examples/mpris.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/signature.py": ["/dbus_next/validators.py", "/dbus_next/errors.py"], "/test/service/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py", "/dbus_next/signature.py"], "/dbus_next/__init__.py": ["/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/validators.py"], "/dbus_next/validators.py": ["/dbus_next/errors.py"], "/test/test_introspection.py": ["/dbus_next/__init__.py"], "/examples/example-service.py": ["/dbus_next/service.py", "/dbus_next/aio/message_bus.py", "/dbus_next/__init__.py"], "/dbus_next/aio/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/errors.py", "/dbus_next/constants.py", "/dbus_next/_private/util.py", "/dbus_next/__init__.py"], "/test/test_aio_low_level.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/_private/util.py": ["/dbus_next/signature.py"], "/examples/aio-list-names.py": ["/dbus_next/__init__.py", "/dbus_next/aio/__init__.py"], "/test/service/test_decorators.py": ["/dbus_next/__init__.py", "/dbus_next/service.py"], "/examples/dbus-next-send.py": ["/dbus_next/validators.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/errors.py": ["/dbus_next/message.py", "/dbus_next/validators.py", "/dbus_next/constants.py"], "/dbus_next/message.py": ["/dbus_next/_private/marshaller.py", "/dbus_next/constants.py", "/dbus_next/_private/constants.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py"], "/test/test_marshaller.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/__init__.py"], "/examples/glib-list-names.py": ["/dbus_next/__init__.py"], "/dbus_next/message_bus.py": ["/dbus_next/_private/address.py", "/dbus_next/_private/util.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/proxy_object.py", "/dbus_next/__init__.py"], "/test/test_big_message.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/aio/message_bus.py": ["/dbus_next/message_bus.py", "/dbus_next/_private/unmarshaller.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/errors.py", "/dbus_next/aio/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/test/client/test_properties.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/_private/marshaller.py": ["/dbus_next/signature.py"], "/test/test_address_parser.py": ["/dbus_next/_private/address.py"], "/test/test_tcp_address.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/_private/address.py"], "/bench/unmarshall.py": ["/dbus_next/_private/unmarshaller.py"], "/test/test_signature.py": ["/dbus_next/__init__.py", "/dbus_next/_private/util.py"], "/dbus_next/_private/address.py": ["/dbus_next/constants.py", "/dbus_next/errors.py"]}
|
9,011
|
altdesktop/python-dbus-next
|
refs/heads/master
|
/examples/mpris.py
|
#!/usr/bin/env python3
import sys
import os
sys.path.append(os.path.abspath(os.path.dirname(__file__) + '/..'))
from dbus_next.aio import MessageBus
import asyncio
loop = asyncio.get_event_loop()
async def main():
bus = await MessageBus().connect()
# the introspection xml would normally be included in your project, but
# this is convenient for development
introspection = await bus.introspect('org.mpris.MediaPlayer2.vlc', '/org/mpris/MediaPlayer2')
obj = bus.get_proxy_object('org.mpris.MediaPlayer2.vlc', '/org/mpris/MediaPlayer2',
introspection)
player = obj.get_interface('org.mpris.MediaPlayer2.Player')
properties = obj.get_interface('org.freedesktop.DBus.Properties')
# call methods on the interface (this causes the media player to play)
await player.call_play()
volume = await player.get_volume()
print(f'current volume: {volume}, setting to 0.5')
await player.set_volume(0.5)
# listen to signals
def on_properties_changed(interface_name, changed_properties, invalidated_properties):
for changed, variant in changed_properties.items():
print(f'property changed: {changed} - {variant.value}')
properties.on_properties_changed(on_properties_changed)
await bus.wait_for_disconnect()
loop.run_until_complete(main())
|
{"/test/service/test_methods.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/glib/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/constants.py", "/dbus_next/__init__.py"], "/test/service/test_properties.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/introspection.py", "/dbus_next/constants.py"], "/dbus_next/auth.py": ["/dbus_next/errors.py"], "/dbus_next/glib/message_bus.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/constants.py", "/dbus_next/message.py", "/dbus_next/message_bus.py", "/dbus_next/errors.py", "/dbus_next/glib/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/dbus_next/_private/unmarshaller.py": ["/dbus_next/message.py", "/dbus_next/_private/constants.py", "/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/errors.py"], "/examples/aio-tcp-notification.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/introspection.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/validators.py", "/dbus_next/errors.py"], "/dbus_next/aio/__init__.py": ["/dbus_next/aio/message_bus.py", "/dbus_next/aio/proxy_object.py"], "/test/test_glib_low_level.py": ["/dbus_next/__init__.py", "/test/util.py"], "/dbus_next/proxy_object.py": ["/dbus_next/validators.py", "/dbus_next/__init__.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/dbus_next/service.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/__init__.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/test/service/test_export.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_fd_passing.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_methods.py": ["/dbus_next/message.py", "/dbus_next/service.py", "/dbus_next/introspection.py", "/dbus_next/__init__.py", "/test/util.py"], "/test/service/test_standard_interfaces.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py"], "/test/test_validators.py": ["/dbus_next/__init__.py"], "/test/test_disconnect.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_request_name.py": ["/dbus_next/__init__.py", "/test/util.py"], "/examples/mpris.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/signature.py": ["/dbus_next/validators.py", "/dbus_next/errors.py"], "/test/service/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py", "/dbus_next/signature.py"], "/dbus_next/__init__.py": ["/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/validators.py"], "/dbus_next/validators.py": ["/dbus_next/errors.py"], "/test/test_introspection.py": ["/dbus_next/__init__.py"], "/examples/example-service.py": ["/dbus_next/service.py", "/dbus_next/aio/message_bus.py", "/dbus_next/__init__.py"], "/dbus_next/aio/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/errors.py", "/dbus_next/constants.py", "/dbus_next/_private/util.py", "/dbus_next/__init__.py"], "/test/test_aio_low_level.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/_private/util.py": ["/dbus_next/signature.py"], "/examples/aio-list-names.py": ["/dbus_next/__init__.py", "/dbus_next/aio/__init__.py"], "/test/service/test_decorators.py": ["/dbus_next/__init__.py", "/dbus_next/service.py"], "/examples/dbus-next-send.py": ["/dbus_next/validators.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/errors.py": ["/dbus_next/message.py", "/dbus_next/validators.py", "/dbus_next/constants.py"], "/dbus_next/message.py": ["/dbus_next/_private/marshaller.py", "/dbus_next/constants.py", "/dbus_next/_private/constants.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py"], "/test/test_marshaller.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/__init__.py"], "/examples/glib-list-names.py": ["/dbus_next/__init__.py"], "/dbus_next/message_bus.py": ["/dbus_next/_private/address.py", "/dbus_next/_private/util.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/proxy_object.py", "/dbus_next/__init__.py"], "/test/test_big_message.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/aio/message_bus.py": ["/dbus_next/message_bus.py", "/dbus_next/_private/unmarshaller.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/errors.py", "/dbus_next/aio/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/test/client/test_properties.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/_private/marshaller.py": ["/dbus_next/signature.py"], "/test/test_address_parser.py": ["/dbus_next/_private/address.py"], "/test/test_tcp_address.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/_private/address.py"], "/bench/unmarshall.py": ["/dbus_next/_private/unmarshaller.py"], "/test/test_signature.py": ["/dbus_next/__init__.py", "/dbus_next/_private/util.py"], "/dbus_next/_private/address.py": ["/dbus_next/constants.py", "/dbus_next/errors.py"]}
|
9,012
|
altdesktop/python-dbus-next
|
refs/heads/master
|
/dbus_next/signature.py
|
from .validators import is_object_path_valid
from .errors import InvalidSignatureError, SignatureBodyMismatchError
from functools import lru_cache
from typing import Any, List, Union
class SignatureType:
"""A class that represents a single complete type within a signature.
This class is not meant to be constructed directly. Use the :class:`SignatureTree`
class to parse signatures.
:ivar ~.signature: The signature of this complete type.
:vartype ~.signature: str
:ivar children: A list of child types if this is a container type. Arrays \
have one child type, dict entries have two child types (key and value), and \
structs have child types equal to the number of struct members.
:vartype children: list(:class:`SignatureType`)
"""
_tokens = 'ybnqiuxtdsogavh({'
def __init__(self, token: str) -> None:
self.token = token
self.children: List[SignatureType] = []
self._signature = None
def __eq__(self, other):
if type(other) is SignatureType:
return self.signature == other.signature
else:
return super().__eq__(other)
def _collapse(self):
if self.token not in 'a({':
return self.token
signature = [self.token]
for child in self.children:
signature.append(child._collapse())
if self.token == '(':
signature.append(')')
elif self.token == '{':
signature.append('}')
return ''.join(signature)
@property
def signature(self) -> str:
if self._signature is not None:
return self._signature
self._signature = self._collapse()
return self._signature
@staticmethod
def _parse_next(signature):
if not signature:
return (None, '')
token = signature[0]
if token not in SignatureType._tokens:
raise InvalidSignatureError(f'got unexpected token: "{token}"')
# container types
if token == 'a':
self = SignatureType('a')
(child, signature) = SignatureType._parse_next(signature[1:])
if not child:
raise InvalidSignatureError('missing type for array')
self.children.append(child)
return (self, signature)
elif token == '(':
self = SignatureType('(')
signature = signature[1:]
while True:
(child, signature) = SignatureType._parse_next(signature)
if not signature:
raise InvalidSignatureError('missing closing ")" for struct')
self.children.append(child)
if signature[0] == ')':
return (self, signature[1:])
elif token == '{':
self = SignatureType('{')
signature = signature[1:]
(key_child, signature) = SignatureType._parse_next(signature)
if not key_child or len(key_child.children):
raise InvalidSignatureError('expected a simple type for dict entry key')
self.children.append(key_child)
(value_child, signature) = SignatureType._parse_next(signature)
if not value_child:
raise InvalidSignatureError('expected a value for dict entry')
if not signature or signature[0] != '}':
raise InvalidSignatureError('missing closing "}" for dict entry')
self.children.append(value_child)
return (self, signature[1:])
# basic type
return (SignatureType(token), signature[1:])
def _verify_byte(self, body):
BYTE_MIN = 0x00
BYTE_MAX = 0xff
if not isinstance(body, int):
raise SignatureBodyMismatchError(
f'DBus BYTE type "y" must be Python type "int", got {type(body)}')
if body < BYTE_MIN or body > BYTE_MAX:
raise SignatureBodyMismatchError(
f'DBus BYTE type must be between {BYTE_MIN} and {BYTE_MAX}')
def _verify_boolean(self, body):
if not isinstance(body, bool):
raise SignatureBodyMismatchError(
f'DBus BOOLEAN type "b" must be Python type "bool", got {type(body)}')
def _verify_int16(self, body):
INT16_MIN = -0x7fff - 1
INT16_MAX = 0x7fff
if not isinstance(body, int):
raise SignatureBodyMismatchError(
f'DBus INT16 type "n" must be Python type "int", got {type(body)}')
elif body > INT16_MAX or body < INT16_MIN:
raise SignatureBodyMismatchError(
f'DBus INT16 type "n" must be between {INT16_MIN} and {INT16_MAX}')
def _verify_uint16(self, body):
UINT16_MIN = 0
UINT16_MAX = 0xffff
if not isinstance(body, int):
raise SignatureBodyMismatchError(
f'DBus UINT16 type "q" must be Python type "int", got {type(body)}')
elif body > UINT16_MAX or body < UINT16_MIN:
raise SignatureBodyMismatchError(
f'DBus UINT16 type "q" must be between {UINT16_MIN} and {UINT16_MAX}')
def _verify_int32(self, body):
INT32_MIN = -0x7fffffff - 1
INT32_MAX = 0x7fffffff
if not isinstance(body, int):
raise SignatureBodyMismatchError(
f'DBus INT32 type "i" must be Python type "int", got {type(body)}')
elif body > INT32_MAX or body < INT32_MIN:
raise SignatureBodyMismatchError(
f'DBus INT32 type "i" must be between {INT32_MIN} and {INT32_MAX}')
def _verify_uint32(self, body):
UINT32_MIN = 0
UINT32_MAX = 0xffffffff
if not isinstance(body, int):
raise SignatureBodyMismatchError(
f'DBus UINT32 type "u" must be Python type "int", got {type(body)}')
elif body > UINT32_MAX or body < UINT32_MIN:
raise SignatureBodyMismatchError(
f'DBus UINT32 type "u" must be between {UINT32_MIN} and {UINT32_MAX}')
def _verify_int64(self, body):
INT64_MAX = 9223372036854775807
INT64_MIN = -INT64_MAX - 1
if not isinstance(body, int):
raise SignatureBodyMismatchError(
f'DBus INT64 type "x" must be Python type "int", got {type(body)}')
elif body > INT64_MAX or body < INT64_MIN:
raise SignatureBodyMismatchError(
f'DBus INT64 type "x" must be between {INT64_MIN} and {INT64_MAX}')
def _verify_uint64(self, body):
UINT64_MIN = 0
UINT64_MAX = 18446744073709551615
if not isinstance(body, int):
raise SignatureBodyMismatchError(
f'DBus UINT64 type "t" must be Python type "int", got {type(body)}')
elif body > UINT64_MAX or body < UINT64_MIN:
raise SignatureBodyMismatchError(
f'DBus UINT64 type "t" must be between {UINT64_MIN} and {UINT64_MAX}')
def _verify_double(self, body):
if not isinstance(body, float) and not isinstance(body, int):
raise SignatureBodyMismatchError(
f'DBus DOUBLE type "d" must be Python type "float" or "int", got {type(body)}')
def _verify_unix_fd(self, body):
try:
self._verify_uint32(body)
except SignatureBodyMismatchError:
raise SignatureBodyMismatchError('DBus UNIX_FD type "h" must be a valid UINT32')
def _verify_object_path(self, body):
if not is_object_path_valid(body):
raise SignatureBodyMismatchError(
'DBus OBJECT_PATH type "o" must be a valid object path')
def _verify_string(self, body):
if not isinstance(body, str):
raise SignatureBodyMismatchError(
f'DBus STRING type "s" must be Python type "str", got {type(body)}')
def _verify_signature(self, body):
# I guess we could run it through the SignatureTree parser instead
if not isinstance(body, str):
raise SignatureBodyMismatchError(
f'DBus SIGNATURE type "g" must be Python type "str", got {type(body)}')
if len(body.encode()) > 0xff:
raise SignatureBodyMismatchError('DBus SIGNATURE type "g" must be less than 256 bytes')
def _verify_array(self, body):
child_type = self.children[0]
if child_type.token == '{':
if not isinstance(body, dict):
raise SignatureBodyMismatchError(
f'DBus ARRAY type "a" with DICT_ENTRY child must be Python type "dict", got {type(body)}'
)
for key, value in body.items():
child_type.children[0].verify(key)
child_type.children[1].verify(value)
elif child_type.token == 'y':
if not isinstance(body, (bytearray, bytes)):
raise SignatureBodyMismatchError(
f'DBus ARRAY type "a" with BYTE child must be Python type "bytes", got {type(body)}'
)
# no need to verify children
else:
if not isinstance(body, list):
raise SignatureBodyMismatchError(
f'DBus ARRAY type "a" must be Python type "list", got {type(body)}')
for member in body:
child_type.verify(member)
def _verify_struct(self, body):
# TODO allow tuples
if not isinstance(body, list):
raise SignatureBodyMismatchError(
f'DBus STRUCT type "(" must be Python type "list", got {type(body)}')
if len(body) != len(self.children):
raise SignatureBodyMismatchError(
'DBus STRUCT type "(" must have Python list members equal to the number of struct type members'
)
for i, member in enumerate(body):
self.children[i].verify(member)
def _verify_variant(self, body):
# a variant signature and value is valid by construction
if not isinstance(body, Variant):
raise SignatureBodyMismatchError(
f'DBus VARIANT type "v" must be Python type "Variant", got {type(body)}')
def verify(self, body: Any) -> bool:
"""Verify that the body matches this type.
:returns: True if the body matches this type.
:raises:
:class:`SignatureBodyMismatchError` if the body does not match this type.
"""
if body is None:
raise SignatureBodyMismatchError('Cannot serialize Python type "None"')
validator = self.validators.get(self.token)
if validator:
validator(self, body)
else:
raise Exception(f'cannot verify type with token {self.token}')
return True
validators = {
"y": _verify_byte,
"b": _verify_boolean,
"n": _verify_int16,
"q": _verify_uint16,
"i": _verify_int32,
"u": _verify_uint32,
"x": _verify_int64,
"t": _verify_uint64,
"d": _verify_double,
"h": _verify_uint32,
"o": _verify_string,
"s": _verify_string,
"g": _verify_signature,
"a": _verify_array,
"(": _verify_struct,
"v": _verify_variant,
}
class SignatureTree:
"""A class that represents a signature as a tree structure for conveniently
working with DBus signatures.
This class will not normally be used directly by the user.
:ivar types: A list of parsed complete types.
:vartype types: list(:class:`SignatureType`)
:ivar ~.signature: The signature of this signature tree.
:vartype ~.signature: str
:raises:
:class:`InvalidSignatureError` if the given signature is not valid.
"""
@staticmethod
@lru_cache(maxsize=None)
def _get(signature: str = '') -> "SignatureTree":
return SignatureTree(signature)
def __init__(self, signature: str = ''):
self.signature = signature
self.types: List[SignatureType] = []
if len(signature) > 0xff:
raise InvalidSignatureError('A signature must be less than 256 characters')
while signature:
(type_, signature) = SignatureType._parse_next(signature)
self.types.append(type_)
def __eq__(self, other):
if type(other) is SignatureTree:
return self.signature == other.signature
else:
return super().__eq__(other)
def verify(self, body: List[Any]):
"""Verifies that the give body matches this signature tree
:param body: the body to verify for this tree
:type body: list(Any)
:returns: True if the signature matches the body or an exception if not.
:raises:
:class:`SignatureBodyMismatchError` if the signature does not match the body.
"""
if not isinstance(body, list):
raise SignatureBodyMismatchError(f'The body must be a list (got {type(body)})')
if len(body) != len(self.types):
raise SignatureBodyMismatchError(
f'The body has the wrong number of types (got {len(body)}, expected {len(self.types)})'
)
for i, type_ in enumerate(self.types):
type_.verify(body[i])
return True
class Variant:
"""A class to represent a DBus variant (type "v").
This class is used in message bodies to represent variants. The user can
expect a value in the body with type "v" to use this class and can
construct this class directly for use in message bodies sent over the bus.
:ivar signature: The signature for this variant. Must be a single complete type.
:vartype signature: str
:ivar signature_type: The parsed signature of this variant.
:vartype signature_type: :class:`SignatureType`
:ivar value: The value of this variant. Must correspond to the signature.
:vartype value: Any
:raises:
:class:`InvalidSignatureError` if the signature is not valid.
:class:`SignatureBodyMismatchError` if the signature does not match the body.
"""
def __init__(self,
signature: Union[str, SignatureTree, SignatureType],
value: Any,
verify: bool = True):
signature_str = ''
signature_tree = None
signature_type = None
if type(signature) is SignatureTree:
signature_tree = signature
elif type(signature) is SignatureType:
signature_type = signature
signature_str = signature.signature
elif type(signature) is str:
signature_tree = SignatureTree._get(signature)
else:
raise TypeError('signature must be a SignatureTree, SignatureType, or a string')
if signature_tree:
if verify and len(signature_tree.types) != 1:
raise ValueError('variants must have a signature for a single complete type')
signature_str = signature_tree.signature
signature_type = signature_tree.types[0]
if verify:
signature_type.verify(value)
self.type = signature_type
self.signature = signature_str
self.value = value
def __eq__(self, other):
if type(other) is Variant:
return self.signature == other.signature and self.value == other.value
else:
return super().__eq__(other)
def __repr__(self):
return "<dbus_next.signature.Variant ('%s', %s)>" % (self.type.signature, self.value)
|
{"/test/service/test_methods.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/glib/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/constants.py", "/dbus_next/__init__.py"], "/test/service/test_properties.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/introspection.py", "/dbus_next/constants.py"], "/dbus_next/auth.py": ["/dbus_next/errors.py"], "/dbus_next/glib/message_bus.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/constants.py", "/dbus_next/message.py", "/dbus_next/message_bus.py", "/dbus_next/errors.py", "/dbus_next/glib/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/dbus_next/_private/unmarshaller.py": ["/dbus_next/message.py", "/dbus_next/_private/constants.py", "/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/errors.py"], "/examples/aio-tcp-notification.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/introspection.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/validators.py", "/dbus_next/errors.py"], "/dbus_next/aio/__init__.py": ["/dbus_next/aio/message_bus.py", "/dbus_next/aio/proxy_object.py"], "/test/test_glib_low_level.py": ["/dbus_next/__init__.py", "/test/util.py"], "/dbus_next/proxy_object.py": ["/dbus_next/validators.py", "/dbus_next/__init__.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/dbus_next/service.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/__init__.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/test/service/test_export.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_fd_passing.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_methods.py": ["/dbus_next/message.py", "/dbus_next/service.py", "/dbus_next/introspection.py", "/dbus_next/__init__.py", "/test/util.py"], "/test/service/test_standard_interfaces.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py"], "/test/test_validators.py": ["/dbus_next/__init__.py"], "/test/test_disconnect.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_request_name.py": ["/dbus_next/__init__.py", "/test/util.py"], "/examples/mpris.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/signature.py": ["/dbus_next/validators.py", "/dbus_next/errors.py"], "/test/service/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py", "/dbus_next/signature.py"], "/dbus_next/__init__.py": ["/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/validators.py"], "/dbus_next/validators.py": ["/dbus_next/errors.py"], "/test/test_introspection.py": ["/dbus_next/__init__.py"], "/examples/example-service.py": ["/dbus_next/service.py", "/dbus_next/aio/message_bus.py", "/dbus_next/__init__.py"], "/dbus_next/aio/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/errors.py", "/dbus_next/constants.py", "/dbus_next/_private/util.py", "/dbus_next/__init__.py"], "/test/test_aio_low_level.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/_private/util.py": ["/dbus_next/signature.py"], "/examples/aio-list-names.py": ["/dbus_next/__init__.py", "/dbus_next/aio/__init__.py"], "/test/service/test_decorators.py": ["/dbus_next/__init__.py", "/dbus_next/service.py"], "/examples/dbus-next-send.py": ["/dbus_next/validators.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/errors.py": ["/dbus_next/message.py", "/dbus_next/validators.py", "/dbus_next/constants.py"], "/dbus_next/message.py": ["/dbus_next/_private/marshaller.py", "/dbus_next/constants.py", "/dbus_next/_private/constants.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py"], "/test/test_marshaller.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/__init__.py"], "/examples/glib-list-names.py": ["/dbus_next/__init__.py"], "/dbus_next/message_bus.py": ["/dbus_next/_private/address.py", "/dbus_next/_private/util.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/proxy_object.py", "/dbus_next/__init__.py"], "/test/test_big_message.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/aio/message_bus.py": ["/dbus_next/message_bus.py", "/dbus_next/_private/unmarshaller.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/errors.py", "/dbus_next/aio/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/test/client/test_properties.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/_private/marshaller.py": ["/dbus_next/signature.py"], "/test/test_address_parser.py": ["/dbus_next/_private/address.py"], "/test/test_tcp_address.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/_private/address.py"], "/bench/unmarshall.py": ["/dbus_next/_private/unmarshaller.py"], "/test/test_signature.py": ["/dbus_next/__init__.py", "/dbus_next/_private/util.py"], "/dbus_next/_private/address.py": ["/dbus_next/constants.py", "/dbus_next/errors.py"]}
|
9,013
|
altdesktop/python-dbus-next
|
refs/heads/master
|
/test/service/test_signals.py
|
from dbus_next.service import ServiceInterface, signal, SignalDisabledError, dbus_property
from dbus_next.aio import MessageBus
from dbus_next import Message, MessageType
from dbus_next.constants import PropertyAccess
from dbus_next.signature import Variant
import pytest
import asyncio
class ExampleInterface(ServiceInterface):
def __init__(self, name):
super().__init__(name)
@signal()
def signal_empty(self):
assert type(self) is ExampleInterface
@signal()
def signal_simple(self) -> 's':
assert type(self) is ExampleInterface
return 'hello'
@signal()
def signal_multiple(self) -> 'ss':
assert type(self) is ExampleInterface
return ['hello', 'world']
@signal(name='renamed')
def original_name(self):
assert type(self) is ExampleInterface
@signal(disabled=True)
def signal_disabled(self):
assert type(self) is ExampleInterface
@dbus_property(access=PropertyAccess.READ)
def test_prop(self) -> 'i':
return 42
class SecondExampleInterface(ServiceInterface):
def __init__(self, name):
super().__init__(name)
@dbus_property(access=PropertyAccess.READ)
def str_prop(self) -> 's':
return "abc"
@dbus_property(access=PropertyAccess.READ)
def list_prop(self) -> 'ai':
return [1, 2, 3]
class ExpectMessage:
def __init__(self, bus1, bus2, interface_name, timeout=1):
self.future = asyncio.get_event_loop().create_future()
self.bus1 = bus1
self.bus2 = bus2
self.interface_name = interface_name
self.timeout = timeout
self.timeout_task = None
def message_handler(self, msg):
if msg.sender == self.bus1.unique_name and msg.interface == self.interface_name:
self.timeout_task.cancel()
self.future.set_result(msg)
return True
def timeout_cb(self):
self.future.set_exception(TimeoutError)
async def __aenter__(self):
self.bus2.add_message_handler(self.message_handler)
self.timeout_task = asyncio.get_event_loop().call_later(self.timeout, self.timeout_cb)
return self.future
async def __aexit__(self, exc_type, exc_val, exc_tb):
self.bus2.remove_message_handler(self.message_handler)
def assert_signal_ok(signal, export_path, member, signature, body):
assert signal.message_type == MessageType.SIGNAL
assert signal.path == export_path
assert signal.member == member
assert signal.signature == signature
assert signal.body == body
@pytest.mark.asyncio
async def test_signals():
bus1 = await MessageBus().connect()
bus2 = await MessageBus().connect()
interface = ExampleInterface('test.interface')
export_path = '/test/path'
bus1.export(export_path, interface)
await bus2.call(
Message(destination='org.freedesktop.DBus',
path='/org/freedesktop/DBus',
interface='org.freedesktop.DBus',
member='AddMatch',
signature='s',
body=[f'sender={bus1.unique_name}']))
async with ExpectMessage(bus1, bus2, interface.name) as expected_signal:
interface.signal_empty()
assert_signal_ok(signal=await expected_signal,
export_path=export_path,
member='signal_empty',
signature='',
body=[])
async with ExpectMessage(bus1, bus2, interface.name) as expected_signal:
interface.original_name()
assert_signal_ok(signal=await expected_signal,
export_path=export_path,
member='renamed',
signature='',
body=[])
async with ExpectMessage(bus1, bus2, interface.name) as expected_signal:
interface.signal_simple()
assert_signal_ok(signal=await expected_signal,
export_path=export_path,
member='signal_simple',
signature='s',
body=['hello'])
async with ExpectMessage(bus1, bus2, interface.name) as expected_signal:
interface.signal_multiple()
assert_signal_ok(signal=await expected_signal,
export_path=export_path,
member='signal_multiple',
signature='ss',
body=['hello', 'world'])
with pytest.raises(SignalDisabledError):
interface.signal_disabled()
@pytest.mark.asyncio
async def test_interface_add_remove_signal():
bus1 = await MessageBus().connect()
bus2 = await MessageBus().connect()
await bus2.call(
Message(destination='org.freedesktop.DBus',
path='/org/freedesktop/DBus',
interface='org.freedesktop.DBus',
member='AddMatch',
signature='s',
body=[f'sender={bus1.unique_name}']))
first_interface = ExampleInterface('test.interface.first')
second_interface = SecondExampleInterface('test.interface.second')
export_path = '/test/path'
# add first interface
async with ExpectMessage(bus1, bus2, 'org.freedesktop.DBus.ObjectManager') as expected_signal:
bus1.export(export_path, first_interface)
assert_signal_ok(
signal=await expected_signal,
export_path=export_path,
member='InterfacesAdded',
signature='oa{sa{sv}}',
body=[export_path, {
'test.interface.first': {
'test_prop': Variant('i', 42)
}
}])
# add second interface
async with ExpectMessage(bus1, bus2, 'org.freedesktop.DBus.ObjectManager') as expected_signal:
bus1.export(export_path, second_interface)
assert_signal_ok(signal=await expected_signal,
export_path=export_path,
member='InterfacesAdded',
signature='oa{sa{sv}}',
body=[
export_path, {
'test.interface.second': {
'str_prop': Variant('s', "abc"),
'list_prop': Variant('ai', [1, 2, 3])
}
}
])
# remove single interface
async with ExpectMessage(bus1, bus2, 'org.freedesktop.DBus.ObjectManager') as expected_signal:
bus1.unexport(export_path, second_interface)
assert_signal_ok(signal=await expected_signal,
export_path=export_path,
member='InterfacesRemoved',
signature='oas',
body=[export_path, ['test.interface.second']])
# add second interface again
async with ExpectMessage(bus1, bus2, 'org.freedesktop.DBus.ObjectManager') as expected_signal:
bus1.export(export_path, second_interface)
await expected_signal
# remove multiple interfaces
async with ExpectMessage(bus1, bus2, 'org.freedesktop.DBus.ObjectManager') as expected_signal:
bus1.unexport(export_path)
assert_signal_ok(signal=await expected_signal,
export_path=export_path,
member='InterfacesRemoved',
signature='oas',
body=[export_path, ['test.interface.first', 'test.interface.second']])
|
{"/test/service/test_methods.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/glib/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/constants.py", "/dbus_next/__init__.py"], "/test/service/test_properties.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/introspection.py", "/dbus_next/constants.py"], "/dbus_next/auth.py": ["/dbus_next/errors.py"], "/dbus_next/glib/message_bus.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/constants.py", "/dbus_next/message.py", "/dbus_next/message_bus.py", "/dbus_next/errors.py", "/dbus_next/glib/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/dbus_next/_private/unmarshaller.py": ["/dbus_next/message.py", "/dbus_next/_private/constants.py", "/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/errors.py"], "/examples/aio-tcp-notification.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/introspection.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/validators.py", "/dbus_next/errors.py"], "/dbus_next/aio/__init__.py": ["/dbus_next/aio/message_bus.py", "/dbus_next/aio/proxy_object.py"], "/test/test_glib_low_level.py": ["/dbus_next/__init__.py", "/test/util.py"], "/dbus_next/proxy_object.py": ["/dbus_next/validators.py", "/dbus_next/__init__.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/dbus_next/service.py": ["/dbus_next/constants.py", "/dbus_next/signature.py", "/dbus_next/__init__.py", "/dbus_next/errors.py", "/dbus_next/_private/util.py"], "/test/service/test_export.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_fd_passing.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/client/test_methods.py": ["/dbus_next/message.py", "/dbus_next/service.py", "/dbus_next/introspection.py", "/dbus_next/__init__.py", "/test/util.py"], "/test/service/test_standard_interfaces.py": ["/dbus_next/service.py", "/dbus_next/signature.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py"], "/test/test_validators.py": ["/dbus_next/__init__.py"], "/test/test_disconnect.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/test/test_request_name.py": ["/dbus_next/__init__.py", "/test/util.py"], "/examples/mpris.py": ["/dbus_next/aio/__init__.py"], "/dbus_next/signature.py": ["/dbus_next/validators.py", "/dbus_next/errors.py"], "/test/service/test_signals.py": ["/dbus_next/service.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/constants.py", "/dbus_next/signature.py"], "/dbus_next/__init__.py": ["/dbus_next/constants.py", "/dbus_next/errors.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/validators.py"], "/dbus_next/validators.py": ["/dbus_next/errors.py"], "/test/test_introspection.py": ["/dbus_next/__init__.py"], "/examples/example-service.py": ["/dbus_next/service.py", "/dbus_next/aio/message_bus.py", "/dbus_next/__init__.py"], "/dbus_next/aio/proxy_object.py": ["/dbus_next/proxy_object.py", "/dbus_next/message_bus.py", "/dbus_next/message.py", "/dbus_next/signature.py", "/dbus_next/errors.py", "/dbus_next/constants.py", "/dbus_next/_private/util.py", "/dbus_next/__init__.py"], "/test/test_aio_low_level.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/_private/util.py": ["/dbus_next/signature.py"], "/examples/aio-list-names.py": ["/dbus_next/__init__.py", "/dbus_next/aio/__init__.py"], "/test/service/test_decorators.py": ["/dbus_next/__init__.py", "/dbus_next/service.py"], "/examples/dbus-next-send.py": ["/dbus_next/validators.py", "/dbus_next/aio/__init__.py", "/dbus_next/__init__.py"], "/dbus_next/errors.py": ["/dbus_next/message.py", "/dbus_next/validators.py", "/dbus_next/constants.py"], "/dbus_next/message.py": ["/dbus_next/_private/marshaller.py", "/dbus_next/constants.py", "/dbus_next/_private/constants.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py"], "/test/test_marshaller.py": ["/dbus_next/_private/unmarshaller.py", "/dbus_next/__init__.py"], "/examples/glib-list-names.py": ["/dbus_next/__init__.py"], "/dbus_next/message_bus.py": ["/dbus_next/_private/address.py", "/dbus_next/_private/util.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/validators.py", "/dbus_next/errors.py", "/dbus_next/signature.py", "/dbus_next/proxy_object.py", "/dbus_next/__init__.py"], "/test/test_big_message.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/aio/message_bus.py": ["/dbus_next/message_bus.py", "/dbus_next/_private/unmarshaller.py", "/dbus_next/message.py", "/dbus_next/constants.py", "/dbus_next/service.py", "/dbus_next/errors.py", "/dbus_next/aio/proxy_object.py", "/dbus_next/__init__.py", "/dbus_next/auth.py"], "/test/client/test_properties.py": ["/dbus_next/__init__.py", "/dbus_next/service.py", "/test/util.py"], "/dbus_next/_private/marshaller.py": ["/dbus_next/signature.py"], "/test/test_address_parser.py": ["/dbus_next/_private/address.py"], "/test/test_tcp_address.py": ["/dbus_next/aio/__init__.py", "/dbus_next/__init__.py", "/dbus_next/_private/address.py"], "/bench/unmarshall.py": ["/dbus_next/_private/unmarshaller.py"], "/test/test_signature.py": ["/dbus_next/__init__.py", "/dbus_next/_private/util.py"], "/dbus_next/_private/address.py": ["/dbus_next/constants.py", "/dbus_next/errors.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.